def _get_repos(p): if not os.access(p, os.R_OK) or not os.access(p, os.X_OK): log.warning('ignoring repo path without access: %s', p) return if not os.access(p, os.W_OK): log.warning('repo path without write access: %s', p) for dirpath in os.listdir(p): if os.path.isfile(os.path.join(p, dirpath)): continue cur_path = os.path.join(p, dirpath) # skip removed repos if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): continue #skip .<somethin> dirs if dirpath.startswith('.'): continue try: scm_info = get_scm(cur_path) yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info except VCSError: if not recursive: continue #check if this dir containts other repos for recursive scan rec_path = os.path.join(p, dirpath) if not os.path.islink(rec_path) and os.path.isdir(rec_path): for inner_scm in _get_repos(rec_path): yield inner_scm
def __init__(self, path, log=None, params={}): assert have_pysqlite > 0 self.cnx = None if path != ':memory:': if not os.access(path, os.F_OK): raise TracError(_('Database "%(path)s" not found.', path=path)) dbdir = os.path.dirname(path) if not os.access(path, os.R_OK + os.W_OK) or \ not os.access(dbdir, os.R_OK + os.W_OK): raise TracError( _('The user %(user)s requires read _and_ write ' 'permissions to the database file %(path)s ' 'and the directory it is located in.', user=getuser(), path=path)) self._active_cursors = weakref.WeakKeyDictionary() timeout = int(params.get('timeout', 10.0)) self._eager = params.get('cursor', 'eager') == 'eager' # eager is default, can be turned off by specifying ?cursor= if isinstance(path, unicode): # needed with 2.4.0 path = path.encode('utf-8') cnx = sqlite.connect(path, detect_types=sqlite.PARSE_DECLTYPES, check_same_thread=sqlite_version < (3, 3, 1), timeout=timeout) # load extensions extensions = params.get('extensions', []) if len(extensions) > 0: cnx.enable_load_extension(True) for ext in extensions: cnx.load_extension(ext) cnx.enable_load_extension(False) ConnectionWrapper.__init__(self, cnx, log)
def revert(self): if self.__state != self.__STATE['APPLIED']: raise Error("Attempt to revert configuration from state %s" % self.__state) for child in self.__children: child.revert() log("Reverting changes to %s configuration" % self.__path) # Remove existing new configuration if os.access(self.__newpath, os.F_OK): os.unlink(self.__newpath) # Revert new configuration. if os.access(self.__path, os.F_OK): os.link(self.__path, self.__newpath) os.unlink(self.__path) # Revert to old configuration. if os.access(self.__oldpath, os.F_OK): os.link(self.__oldpath, self.__path) os.unlink(self.__oldpath) # Leave .*.xapi-new as an aid to debugging. self.__state = self.__STATE['REVERTED']
def check_validate(self,tail_file): if not os.access(tail_file,os.F_OK): raise TailError("File '%s' does not exist" % (tail_file)) if not os.access(tail_file,os.R_OK): raise TailError("File '%s' not readable" % (tail_file)) if os.path.isdir(tail_file): raise TailError("Dir '%s' is not a file" % (tail_file))
def Initialize(config=None, token=None): """Initialize or update a GRR configuration.""" print "Checking write access on config %s" % config["Config.writeback"] if not os.access(config.parser.filename, os.W_OK): raise IOError("Config not writeable (need sudo?)") print "\nStep 0: Importing Configuration from previous installation." options_imported = 0 prev_config_file = config.Get("ConfigUpdater.old_config", default=None) if prev_config_file and os.access(prev_config_file, os.R_OK): print "Found config file %s." % prev_config_file if raw_input("Do you want to import this configuration?" " [yN]: ").upper() == "Y": options_imported = ImportConfig(prev_config_file, config) else: print "No old config file found." print "\nStep 1: Key Generation" if config.Get("PrivateKeys.server_key", default=None): if options_imported > 0: print ("Since you have imported keys from another installation in the " "last step,\nyou probably do not want to generate new keys now.") if (raw_input("You already have keys in your config, do you want to" " overwrite them? [yN]: ").upper() or "N") == "Y": flags.FLAGS.overwrite = True GenerateKeys(config) else: GenerateKeys(config) print "\nStep 2: Setting Basic Configuration Parameters" ConfigureBaseOptions(config) AddUsers(token=token) ManageBinaries(config, token=token) print "\nGRR Initialization complete!\n"
def main(): logging.basicConfig(level=logging.DEBUG) file_name = os.path.join(TARGET_DIR, 'stop_server.bat') if os.access(file_name, os.F_OK): logging.info('Trying to stop possibly running server...') subprocess.call(file_name, stderr=subprocess.PIPE, shell=True) if os.access(TARGET_DIR, os.F_OK): shutil.rmtree(TARGET_DIR) makedirs(TARGET_DIR, exist_ok=True) if IS_WINDOWS: deploy_wnmp() deploy_dokuwiki('nginx/www') else: deploy_dokuwiki() for pattern in [ 'example.nginx.conf', 'readme.txt',]: for path in glob.glob(os.path.join(TARGET_DIR, os.path.normpath(pattern))): if os.path.isdir(path): shutil.rmtree(path) else: os.unlink(path)
def apply(self): if self.__state != self.__STATE['NOT-APPLIED']: raise Error("Attempt to apply configuration from state %s" % self.__state) for child in self.__children: child.apply() log("Applying changes to %s configuration" % self.__path) # Remove previous backup. if os.access(self.__oldpath, os.F_OK): os.unlink(self.__oldpath) # Save current configuration. if os.access(self.__path, os.F_OK): os.link(self.__path, self.__oldpath) os.unlink(self.__path) # Apply new configuration. assert(os.path.exists(self.__newpath)) os.link(self.__newpath, self.__path) # Remove temporary file. os.unlink(self.__newpath) self.__state = self.__STATE['APPLIED']
def _find_jar(url=None): """ Finds the location of loci_tools.jar, if necessary download it to a writeable location. """ for loc in _gen_jar_locations(): if os.path.isfile(os.path.join(loc, 'loci_tools.jar')): return os.path.join(loc, 'loci_tools.jar') warn('loci_tools.jar not found, downloading') for loc in _gen_jar_locations(): # check if dir exists and has write access: if os.path.exists(loc) and os.access(loc, os.W_OK): break # if directory is pims and it does not exist, so make it (if allowed) if os.path.basename(loc) == 'pims' and \ os.access(os.path.dirname(loc), os.W_OK): os.mkdir(loc) break else: raise IOError('No writeable location found. In order to use the ' 'Bioformats reader, please download ' 'loci_tools.jar to the pims program folder or one of ' 'the locations provided by _gen_jar_locations().') from six.moves.urllib.request import urlretrieve if url is None: url = ('http://downloads.openmicroscopy.org/bio-formats/5.1.0/' + 'artifacts/loci_tools.jar') urlretrieve(url, os.path.join(loc, 'loci_tools.jar')) return os.path.join(loc, 'loci_tools.jar')
def GenerateCapacitor (dim, cap, dest): dirmod = dest + ".pretty" if not os.access (dirmod, os.R_OK): os.mkdir (dirmod, 0755) dir3d = dest + ".3dshapes" if not os.access (dir3d, os.R_OK): os.mkdir (dir3d, 0755) for mod in [ "Hand", "Reflow", "Wave" ]: if cap.has_key (mod): _GenerateCapacitor (dirmod, dir3d, dim, mod, cap, cap [mod]) if (not cap.has_key ("Hand")) and (cap.has_key ("Reflow")): # Automatically generate hand soldering pads # Use "Reflow" pads but add 25% size to them and move pads # from each out so that the internal cleaning G stays the same pads = cap ["Reflow"] pads ['C'] += pads ['Y'] * 0.25 pads ['Z'] += pads ['Y'] * 0.25 * 2 pads ['X'] *= 1.25 pads ['Y'] *= 1.25 _GenerateCapacitor (dest + ".pretty", dest + ".3dshapes", dim, "Hand", cap, pads)
def showSettings(): BAD = "x" OK = "o" AUTO = "A" svn_repos_isavaile = BAD trac_project_isavaile = BAD trac_templ_isavaile = BAD if os.access(env['svn']['repos'], os.F_OK): svn_repos_isavaile = OK if os.access(env['trac']['project'], os.F_OK): trac_project_isavaile = OK if os.access(env['trac']['template'], os.F_OK): trac_templ_isavaile = OK print "Current settings: " print "A = auto detect, o = valid path, x = invalid path" print "[SVN] " print " admin:\t"+AUTO+"\t"+env['svn']['admin'] print " repos:\t"+svn_repos_isavaile+"\t"+env['svn']['repos'] print " " print "[TRAC] " print " templ:\t"+trac_templ_isavaile+"\t"+env['trac']['template'] print " admin:\t"+AUTO+"\t"+env['trac']['admin'] print " project:\t"+trac_project_isavaile+"\t"+env['trac']['project'] print " " print "[PGSQL]" print " create:\t"+AUTO+"\t"+env['pgsql']['create'] print " drop:\t"+AUTO+"\t"+env['pgsql']['drop'] print " "
def create_class_file(name, backup, **kwargs): """ Return a $FILETYPEStripper() class, corresponding to the filetype of the given file :param str name: name of the file to be parsed :param bool backup: shell the file be backuped? """ if not os.path.isfile(name): # check if the file exists logging.error('%s is not a valid file', name) return None elif not os.access(name, os.R_OK): # check read permissions logging.error('%s is is not readable', name) return None mime = mimetypes.guess_type(name)[0] if not mime: logging.info('Unable to find mimetype of %s', name) return None if mime.startswith('application/vnd.oasis.opendocument'): mime = 'application/opendocument' # opendocument fileformat elif mime.startswith('application/vnd.openxmlformats-officedocument'): mime = 'application/officeopenxml' # office openxml is_writable = os.access(name, os.W_OK) try: stripper_class = strippers.STRIPPERS[mime] except KeyError: logging.info('Don\'t have stripper for %s format', mime) return None return stripper_class(name, mime, backup, is_writable, **kwargs)
def __init__(self, fullpath, owner=None): for check in self.checks: if not check( fullpath ): raise http.Http404('Path not found `%s` or IADMIN_FM_ROOT not configured in settings' % fullpath) self.name = self.basename = os.path.basename(fullpath) or ROOT_NAME self.parent = os.path.dirname(fullpath) self.absolute_path = fullpath # file system absolute path self.relative_path = utils.relpath(fullpath, utils.get_document_root()) self.path = self.relative_path.split('/') self.mime = mimetypes.guess_type(self.absolute_path, False)[0] or '' self.can_read = os.access(self.absolute_path, os.R_OK) self.can_write = os.access(self.absolute_path, os.W_OK) self.is_link = os.path.islink(self.absolute_path) try: itemstat = os.stat(self.absolute_path) self.user = getpwuid(itemstat.st_uid)[0] self.group = getgrgid(itemstat.st_gid)[0] self.size = itemstat.st_size self.ctime = datetime.fromtimestamp(itemstat.st_ctime) self.mtime = datetime.fromtimestamp(itemstat.st_mtime) octs = "%04d" % int(oct(itemstat.st_mode & 0777)) self.perms_numeric = octs self.perms = "%s%s%s" % (perms[int(octs[1])], perms[int(octs[2])], perms[int(octs[3])]) except: self.user = self.group = self.perms_numeric = self.perms = '' self.size = self.ctime = self.mtime = None
def one_detail(self, files): file_set = {} file_stat = os.stat(files) file_set['size'] = file_stat[stat.ST_SIZE] mode = file_stat[stat.ST_MODE] if stat.S_ISREG(mode): file_set['is_file'] = True else: file_set['is_file'] = False if os.access(files, os.R_OK): file_set['R_OK'] = True else: file_set['R_OK'] = False if os.access(files, os.W_OK): file_set['W_OK'] = True else: file_set['W_OK'] = False if os.access(files, os.X_OK): file_set['X_OK'] = True else: file_set['X_OK'] = False file_set['time'] = os.path.getmtime(files) x = time.localtime(file_set['time']) file_set['time_of_last_modification'] = time.strftime('%Y-%m-%d %H:%M:%S', x) [tmp1,tmp2] = file_set['time_of_last_modification'].split() nima = str(int(tmp2.split(':')[0]) + 13) if int(nima) > 24: nima = str(int(nima) -24) tmp2 = nima + ':' + tmp2.split(':')[1] + ':' + tmp2.split(':')[2] file_set['time_of_last_modification'] = tmp1 + " " + tmp2 print file_set['time_of_last_modification'] file_set['name'] = files return file_set
def checkRepoPath(repoRoot, filePath): # Make sure repoRoot is a valid rdiff-backup repository dataPath = joinPaths(repoRoot, rdiffDataDirName) if not os.access(dataPath, os.F_OK) or not os.path.isdir(dataPath): raise DoesNotExistError() # Make sure there are no symlinks in the path pathToCheck = joinPaths(repoRoot, filePath) while True: pathToCheck = pathToCheck.rstrip("/") if os.path.islink(pathToCheck): raise AccessDeniedError() (pathToCheck, file) = os.path.split(pathToCheck) if not file: break # Make sure that the folder/file exists somewhere - either in the current folder, or in the incrementsDir if not os.access(joinPaths(repoRoot, filePath), os.F_OK): (parentFolder, filename) = os.path.split(joinPaths(repoRoot, rdiffIncrementsDirName, filePath)) try: increments = os.listdir(parentFolder) except OSError: increments = [] increments = filter(lambda x: x.startswith(filename), increments) if not increments: raise DoesNotExistError()
def generate_install_media(self, force_download=False, customize_or_icicle=False): """ Method to generate the install media for RedHat based operating systems that install from floppy. If force_download is False (the default), then the original media will only be fetched if it is not cached locally. If force_download is True, then the original media will be downloaded regardless of whether it is cached locally. """ self.log.info("Generating install media") if not force_download: if os.access(self.jeos_filename, os.F_OK): # if we found a cached JEOS, we don't need to do anything here; # we'll copy the JEOS itself later on return elif os.access(self.modified_floppy_cache, os.F_OK): self.log.info("Using cached modified media") shutil.copyfile(self.modified_floppy_cache, self.output_floppy) return self._get_original_floppy(self.url + "/images/bootnet.img", force_download) self._copy_floppy() try: self._modify_floppy() if self.cache_modified_media: self.log.info("Caching modified media for future use") shutil.copyfile(self.output_floppy, self.modified_floppy_cache) finally: self._cleanup_floppy()
def onDatabaseDirectoryChanged(self,databaseDirectory): if not hasattr(slicer, 'dicomDatabase') or not slicer.dicomDatabase: slicer.dicomDatabase = ctk.ctkDICOMDatabase() DICOM.setDatabasePrecacheTags(self.dicomBrowser) databaseFilepath = databaseDirectory + "/ctkDICOM.sql" messages = "" if not os.path.exists(databaseDirectory): try: os.mkdir(databaseDirectory) except OSError: messages += "Directory does not exist and cannot be created. " else: if not os.access(databaseDirectory, os.W_OK): messages += "Directory not writable. " if not os.access(databaseDirectory, os.R_OK): messages += "Directory not readable. " if messages != "": self.messageBox('The database file path "%s" cannot be used. %s\nPlease pick a different database directory using the LocalDatabase button in the DICOM Browser.' % (databaseFilepath,messages)) else: slicer.dicomDatabase.openDatabase(databaseDirectory + "/ctkDICOM.sql", "SLICER") if not slicer.dicomDatabase.isOpen: self.messageBox('The database file path "%s" cannot be opened.\nPlease pick a different database directory using the LocalDatabase button in the DICOM Browser.' % databaseFilepath) self.dicomDatabase = None else: if self.dicomBrowser: if self.dicomBrowser.databaseDirectory != databaseDirectory: self.dicomBrowser.databaseDirectory = databaseDirectory else: settings = qt.QSettings() settings.setValue('DatabaseDirectory', databaseDirectory) settings.sync() if slicer.dicomDatabase: slicer.app.setDICOMDatabase(slicer.dicomDatabase)
def main(): def _get_commands(): return([m for m in dir(Commands) if not m.startswith('__')]) parser = argparse.ArgumentParser(description='Performs operations' + ' on a netCDF file.', epilog='OUTFILE = INFILE will replace the ' + 'INFILE with the OUTFILE.', formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('command', help="possible commands: {}" .format(_get_commands()), metavar='COMMAND') parser.add_argument('arguments', help='arguments for commands' + '\n "compress": ARG = compression_level (int), default=9' + '\n "delvar": ARG = variable_to_delete (str)', metavar="ARG", nargs='*') parser.add_argument('fin', help='input file', metavar='INFILE') parser.add_argument('fout', help='output file', metavar='OUTFILE') args = vars(parser.parse_args()) # check input file if not os.access(args['fin'], os.R_OK): parser.error("Can't open {} for reading".format(args['fin'])) # check output file outpath = os.path.dirname(args['fin']) or '.' if not os.access(outpath, os.W_OK): parser.error("can't write output file {}".format(args['fout'])) # check command if not hasattr(Commands, args['command']): parser.error("Command {} not implemented".format(args['command'])) else: getattr(Commands, args['command'])([args, parser])
def which(name, flags=os.X_OK): """Search PATH for executable files with the given name. On newer versions of MS-Windows, the PATHEXT environment variable will be set to the list of file extensions for files considered executable. This will normally include things like ".EXE". This fuction will also find files with the given name ending with any of these extensions. On MS-Windows the only flag that has any meaning is os.F_OK. Any other flags will be ignored. @type name: C{str} @param name: The name for which to search. @type flags: C{int} @param flags: Arguments to L{os.access}. @rtype: C{list} @param: A list of the full paths to files found, in the order in which they were found. """ result = [] exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep)) path = os.environ.get('PATH', None) if path is None: return [] for p in os.environ.get('PATH', '').split(os.pathsep): p = os.path.join(p, name) if os.access(p, flags): result.append(p) for e in exts: pext = p + e if os.access(pext, flags): result.append(pext) return result
def check(self): """ Checks module :return: """ if not CEPH: self.error('rados module is needed to use ceph.chart.py') return False if not (self.config_file and self.keyring_file): self.error('config_file and/or keyring_file is not defined') return False # Verify files and permissions if not (os.access(self.config_file, os.F_OK)): self.error('{0} does not exist'.format(self.config_file)) return False if not (os.access(self.keyring_file, os.F_OK)): self.error('{0} does not exist'.format(self.keyring_file)) return False if not (os.access(self.config_file, os.R_OK)): self.error('Ceph plugin does not read {0}, define read permission.'.format(self.config_file)) return False if not (os.access(self.keyring_file, os.R_OK)): self.error('Ceph plugin does not read {0}, define read permission.'.format(self.keyring_file)) return False try: self.cluster = rados.Rados(conffile=self.config_file, conf=dict(keyring=self.keyring_file)) self.cluster.connect() except rados.Error as error: self.error(error) return False self.create_definitions() return True
def getExpandedPath(playerPath): if os.access(playerPath, os.X_OK): return playerPath for path in os.environ['PATH'].split(':'): path = os.path.join(os.path.realpath(path), playerPath) if os.access(path, os.X_OK): return path
def _generate_ssh_keys(self): if (not os.access(self.ssh_key_dir, os.W_OK)): os.makedirs(self.ssh_key_dir) # Create ssh keys on on localhost. if (not os.access(self.ssh_private_key_file, os.R_OK)): subprocess.Popen('ssh-keygen -t rsa -f ' + self.ssh_private_key_file + ' -N ""', shell=True).communicate()
def fetch (self): """Return value: Fetched file's full path..""" if not self.url.filename(): self.err(_('Filename error')) if not os.access(self.destdir, os.W_OK): self.err(_('Access denied to write to destination directory: "%s"') % (self.destdir)) archive_file = os.path.join(self.destdir, self.url.filename()) if os.path.exists(archive_file) and not os.access(archive_file, os.W_OK): self.err(_('Access denied to destination file: "%s"') % (archive_file)) partial_file = archive_file + '.part' if self.url.is_local_file(): self.fetchLocalFile(partial_file) else: self.fetchRemoteFile(partial_file) if os.stat(partial_file).st_size == 0: os.remove(partial_file) self.err(_('A problem occured. Please check the archive address and/or permissions again.')) move(partial_file, archive_file) return archive_file
def on_ok(widget): file_path = dialog.get_filename() file_path = decode_filechooser_file_paths((file_path,))[0] if os.path.exists(file_path): # check if we have write permissions if not os.access(file_path, os.W_OK): file_name = os.path.basename(file_path) dialogs.ErrorDialog(_('Cannot overwrite existing file "%s"') % \ file_name, _('A file with this name already exists and you ' 'do not have permission to overwrite it.')) return dialog2 = dialogs.FTOverwriteConfirmationDialog( _('This file already exists'), _('What do you want to do?'), propose_resume=False, on_response=(on_continue, file_path), transient_for=dialog) dialog2.set_destroy_with_parent(True) else: dirname = os.path.dirname(file_path) if not os.access(dirname, os.W_OK): dialogs.ErrorDialog(_('Directory "%s" is not writable') % \ dirname, _('You do not have permission to create files in ' 'this directory.')) return on_continue(0, file_path)
def check_outdir(options): ''' Check if output directory exists and if the actual user have reading and wrinting access. ''' if not os.path.isdir(options.outdir): print options.outdir+" is not an existing directory.\n" create = raw_input("Create directory? [y/n]: ") if create.lower() == "y": try: os.makedirs(options.outdir) except: raise IOError("\nCould not create the specified directory." \ " \nExiting...") print "" elif create.lower() == "n": print "\nExiting..." sys.exit() elif create != "y" or create != "n": print "Not a valid option. \nExiting..." sys.exit() if not os.access(options.outdir, os.R_OK) or \ not os.access(options.outdir, os.W_OK): print "The actual user does not have READING and/or WRITE access to" \ " %s directory. \nExiting..." % options.outdir sys.exit(1) options.outdir = os.path.abspath(options.outdir) if options.outdir[-1] != "/": options.outdir = options.outdir+"/" return options
def which (filename): """Source http://mail.python.org/pipermail/python-list/2002-August/157829.html""" if os.access(filename, os.X_OK): return filename if not os.environ.has_key('PATH') or os.environ['PATH'] == '': p = os.defpath else: p = os.environ['PATH'] pathlist = p.split (os.pathsep) for path in pathlist: f = os.path.join(path, filename) if os.access(f, os.X_OK): return f fAlternative = f + '.exe' if os.access(fAlternative, os.X_OK): return fAlternative fAlternative = f + '.sh' if os.access(fAlternative, os.X_OK): return fAlternative fAlternative = f + '.bat' if os.access(fAlternative, os.X_OK): return fAlternative return None
def __init__(self, cur_options, command, blast_mat_root=None, extra_env="", params=None,InputHandler=None, SuppressStderr=None, SuppressStdout=None,WorkingDir=None,\ HALT_EXEC=False): """ Initialize blast """ # update options self._parameters.update(cur_options) # check if need to set env variable (for cgi calls) if blast_mat_root: self._command = "export BLASTMAT=%s;%s%s" % (blast_mat_root, extra_env, command) else: # Determine if blast is installed and raise an ApplicationError # if not -- this is done here so the user will get the most # informative error message available. self._error_on_missing_application(params) # Otherwise raise error about $BLASTMAT not being set if not ('BLASTMAT' in environ or \ access(path.expanduser("~/.ncbirc"), F_OK) or \ access(".ncbirc", F_OK)): ## SHOULD THIS BE CHANGED TO RAISE AN ApplicationError? raise RuntimeError(blastmat_error_message) self._command = command super(Blast, self).__init__(params=params, InputHandler=InputHandler,SuppressStderr=SuppressStderr, SuppressStdout=SuppressStdout,WorkingDir=WorkingDir,\ HALT_EXEC=HALT_EXEC)
def init_entry(self, key): """Ensure we can access a cache file. Create a lock for it if needed. Return whether the cache file exists yet or not. """ cache_path = self.cache_path(key) exists = os.path.exists(cache_path) if exists: if not os.path.isfile(cache_path): raise CacheError("Cache file is not a file: %s" % cache_path) if not os.access(cache_path, os.R_OK | os.W_OK): raise CacheError("Cannot access cache file: %s" % cache_path) else: # if the file is hierarchical, make parent directories parent = os.path.dirname(cache_path) if parent.rstrip(os.path.sep) != self.root: mkdirp(parent) if not os.access(parent, os.R_OK | os.W_OK): raise CacheError("Cannot access cache directory: %s" % parent) # ensure lock is created for this key self._get_lock(key) return exists
def validate_key_repository(self, requires_write=False): """Validate permissions on the key repository directory.""" # NOTE(lbragstad): We shouldn't need to check if the directory was # passed in as None because we don't set allow_no_values to True. # ensure current user has sufficient access to the key repository is_valid = (os.access(self.key_repository, os.R_OK) and os.access(self.key_repository, os.X_OK)) if requires_write: is_valid = (is_valid and os.access(self.key_repository, os.W_OK)) if not is_valid: LOG.error( 'Either [%(config_group)s] key_repository does not exist ' 'or Keystone does not have sufficient permission to ' 'access it: %(key_repo)s', {'key_repo': self.key_repository, 'config_group': self.config_group}) else: # ensure the key repository isn't world-readable stat_info = os.stat(self.key_repository) if(stat_info.st_mode & stat.S_IROTH or stat_info.st_mode & stat.S_IXOTH): LOG.warning( 'key_repository is world readable: %s', self.key_repository) return is_valid
def __init__(self, name, *args, **kwargs): # get the VPCS location config = Config.instance() vpcs_config = config.get_section_config(name.upper()) self._vpcs = vpcs_config.get("vpcs_path") if not self._vpcs or not os.path.isfile(self._vpcs): paths = [os.getcwd()] + os.environ["PATH"].split(os.pathsep) # look for VPCS in the current working directory and $PATH for path in paths: try: if "vpcs" in os.listdir(path) and os.access(os.path.join(path, "vpcs"), os.X_OK): self._vpcs = os.path.join(path, "vpcs") break except OSError: continue if not self._vpcs: log.warning("VPCS binary couldn't be found!") elif not os.access(self._vpcs, os.X_OK): log.warning("VPCS is not executable") # a new process start when calling IModule IModule.__init__(self, name, *args, **kwargs) self._vpcs_instances = {} self._console_start_port_range = vpcs_config.get("console_start_port_range", 4501) self._console_end_port_range = vpcs_config.get("console_end_port_range", 5000) self._allocated_udp_ports = [] self._udp_start_port_range = vpcs_config.get("udp_start_port_range", 20501) self._udp_end_port_range = vpcs_config.get("udp_end_port_range", 21000) self._host = vpcs_config.get("host", kwargs["host"]) self._console_host = vpcs_config.get("console_host", kwargs["console_host"]) self._projects_dir = kwargs["projects_dir"] self._tempdir = kwargs["temp_dir"] self._working_dir = self._projects_dir
def run_quick_save(self, current_name=None): """ Display a dialog which asks the user where a file should be saved. The value of target_path in the returned dictionary is an absolute path. :param set current_name: The name of the file to save. :return: A dictionary with target_uri and target_path keys representing the path choosen. :rtype: dict """ self.set_action(Gtk.FileChooserAction.SAVE) self.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL) self.add_button(Gtk.STOCK_SAVE, Gtk.ResponseType.ACCEPT) self.set_do_overwrite_confirmation(True) if current_name: self.set_current_name(current_name) self.show_all() response = self.run() if response == Gtk.ResponseType.CANCEL: return None target_path = self.get_filename() if os.path.isfile(target_path): if not os.access(target_path, os.W_OK): gui_utilities.show_dialog_error("Can not write to the selected file", self.parent) return None elif not os.access(os.path.dirname(target_path), os.W_OK): gui_utilities.show_dialog_error("Can not create the selected file", self.parent) return None target_uri = self.get_uri() return {"target_uri": target_uri, "target_path": target_path}
def determine_flags(target, target_os, target_cpu, target_cc, cc_bin, ccache, root_dir, pkcs11_lib): # pylint: disable=too-many-branches,too-many-statements,too-many-arguments,too-many-locals """ Return the configure.py flags as well as make/test running prefixes """ is_cross_target = target.startswith('cross-') if target_os not in ['linux', 'osx', 'windows']: print('Error unknown OS %s' % (target_os)) return (None, None, None) if is_cross_target: if target_os == 'osx': target_os = 'ios' elif target == 'cross-win32': target_os = 'mingw' make_prefix = [] test_prefix = [] test_cmd = [os.path.join(root_dir, 'botan-test')] fast_tests = ['block', 'aead', 'hash', 'stream', 'mac', 'modes', 'hmac_drbg', 'hmac_drbg_unit', 'tls', 'ffi', 'rsa_sign', 'rsa_verify', 'dh_kat', 'ecdsa_sign', 'curve25519_scalar', 'simd_32', 'os_utils', 'util', 'util_dates'] install_prefix = os.path.join(tempfile.gettempdir(), 'botan-install') flags = ['--prefix=%s' % (install_prefix), '--cc=%s' % (target_cc), '--os=%s' % (target_os)] if target_cc == 'msvc': flags += ['--ack-vc2013-deprecated'] if target_cpu != None: flags += ['--cpu=%s' % (target_cpu)] if target in ['shared', 'mini-shared']: flags += ['--disable-static'] if target in ['static', 'mini-static', 'fuzzers'] or target_os in ['ios', 'mingw']: flags += ['--disable-shared'] if target in ['mini-static', 'mini-shared']: flags += ['--minimized-build', '--enable-modules=system_rng,sha2_32,sha2_64,aes'] if target == 'shared' and target_os != 'osx': # Enabling amalgamation build for shared is somewhat arbitrary, but we want to test it # somewhere. In addition the majority of the Windows builds are shared, and MSVC is # much faster compiling via the amalgamation than individual files. flags += ['--amalgamation'] if target in ['bsi', 'nist']: # Arbitrarily test disable static on module policy builds # tls is optional for bsi/nist but add it so verify tests work with these minimized configs flags += ['--module-policy=%s' % (target), '--enable-modules=tls', '--disable-static'] if target == 'docs': flags += ['--with-doxygen', '--with-sphinx', '--with-rst2man'] test_cmd = None if target == 'coverage': flags += ['--with-coverage-info', '--test-mode'] if target == 'valgrind': flags += ['--with-valgrind'] test_prefix = ['valgrind', '--error-exitcode=9', '-v', '--leak-check=full', '--show-reachable=yes'] test_cmd += fast_tests if target == 'fuzzers': flags += ['--unsafe-fuzzer-mode'] if target in ['fuzzers', 'coverage', 'valgrind']: flags += ['--with-debug-info'] if target in ['fuzzers', 'coverage']: flags += ['--build-fuzzers=test'] if target in ['fuzzers', 'sanitizer']: # On VC iterator debugging comes from generic debug mode if target_cc == 'msvc': flags += ['--with-debug-info'] else: flags += ['--with-sanitizers'] if target in ['valgrind', 'sanitizer', 'fuzzers']: flags += ['--disable-modules=locking_allocator'] if target == 'parallel': flags += ['--with-openmp'] if target == 'sonar': if target_os != 'linux' or target_cc != 'clang': raise Exception('Only Linux/clang supported in Sonar target currently') flags += ['--cc-abi-flags=-fprofile-instr-generate -fcoverage-mapping', '--disable-shared'] make_prefix = [os.path.join(root_dir, 'build-wrapper-linux-x86/build-wrapper-linux-x86-64'), '--out-dir', 'bw-outputs'] if is_cross_target: if target_os == 'ios': make_prefix = ['xcrun', '--sdk', 'iphoneos'] test_cmd = None if target == 'cross-arm32': flags += ['--cpu=armv7', '--cc-abi-flags=-arch armv7 -arch armv7s -stdlib=libc++'] elif target == 'cross-arm64': flags += ['--cpu=arm64', '--cc-abi-flags=-arch arm64 -stdlib=libc++'] else: raise Exception("Unknown cross target '%s' for iOS" % (target)) elif target == 'cross-win32': cc_bin = 'i686-w64-mingw32-g++' flags += ['--cpu=x86_32', '--cc-abi-flags=-static', '--ar-command=i686-w64-mingw32-ar'] test_cmd = [os.path.join(root_dir, 'botan-test.exe')] # No runtime prefix required for Wine else: # Build everything but restrict what is run test_cmd += fast_tests if target == 'cross-arm32': flags += ['--cpu=armv7'] cc_bin = 'arm-linux-gnueabihf-g++' test_prefix = ['qemu-arm', '-L', '/usr/arm-linux-gnueabihf/'] elif target == 'cross-arm64': flags += ['--cpu=aarch64'] cc_bin = 'aarch64-linux-gnu-g++' test_prefix = ['qemu-aarch64', '-L', '/usr/aarch64-linux-gnu/'] elif target == 'cross-ppc32': flags += ['--cpu=ppc32'] cc_bin = 'powerpc-linux-gnu-g++' test_prefix = ['qemu-ppc', '-L', '/usr/powerpc-linux-gnu/'] elif target == 'cross-ppc64': flags += ['--cpu=ppc64', '--with-endian=little'] cc_bin = 'powerpc64le-linux-gnu-g++' test_prefix = ['qemu-ppc64le', '-L', '/usr/powerpc64le-linux-gnu/'] else: raise Exception("Unknown cross target '%s' for Linux" % (target)) else: # Flags specific to native targets if target_os in ['osx', 'linux']: flags += ['--with-bzip2', '--with-sqlite', '--with-zlib'] if target_os == 'osx': # Test Boost on OS X flags += ['--with-boost'] # Travis has 10.12 as default image flags += ['--with-os-features=getentropy'] elif target_os == 'linux': flags += ['--with-lzma'] if target_os == 'linux': if target not in ['sanitizer', 'valgrind', 'mini-shared', 'mini-static']: # Avoid OpenSSL when using dynamic checkers, or on OS X where it sporadically # is not installed on the CI image flags += ['--with-openssl'] if target in ['sonar', 'coverage']: flags += ['--with-tpm'] test_cmd += ['--run-long-tests', '--run-online-tests'] if pkcs11_lib and os.access(pkcs11_lib, os.R_OK): test_cmd += ['--pkcs11-lib=%s' % (pkcs11_lib)] if ccache is None: flags += ['--cc-bin=%s' % (cc_bin)] elif ccache == 'clcache': flags += ['--cc-bin=%s' % (ccache)] else: flags += ['--cc-bin=%s %s' % (ccache, cc_bin)] if test_cmd is None: run_test_command = None else: run_test_command = test_prefix + test_cmd return flags, run_test_command, make_prefix
def is_executable_file(self, file_path): return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def InitAVSwitch(): if getBoxType() == 'vuduo' or getBoxType().startswith('ixuss'): config.av.yuvenabled = ConfigBoolean(default=False) else: config.av.yuvenabled = ConfigBoolean(default=True) config.av.osd_alpha = ConfigSlider(default=255, increment = 5, limits=(20,255)) # Make openATV compatible with some plugins who still use config.av.osd_alpha colorformat_choices = {"cvbs": _("CVBS"), "rgb": _("RGB"), "svideo": _("S-Video")} # when YUV is not enabled, don't let the user select it if config.av.yuvenabled.value: colorformat_choices["yuv"] = _("YPbPr") config.av.autores = ConfigSelection(choices={"disabled": _("Disabled"), "simple": _("Simple"), "native": _("Native"), "all": _("All resolutions"), "hd": _("only HD")}, default="disabled") config.av.autores_preview = NoSave(ConfigYesNo(default=False)) config.av.autores_1080i_deinterlace = ConfigYesNo(default=False) config.av.autores_24p = ConfigSelection(choices= { "24,24": _("24p/24p"), "24,50": _("24p/50p"), "24,60": _("24p/60p"), "50,24": _("50p/24p"), "60,24": _("60p/24p"), "50,50": _("50p/50p"), "50,60": _("50p/60p"), "60,50": _("60p/50p"), "60,60": _("60p/60p") }, default="50,24") # first value <=720p , second value > 720p config.av.autores_unknownres = ConfigSelection(choices={"next": _("next higher Resolution"), "highest": _("highest Resolution")}, default="next") choicelist = [] for i in range(5, 16): choicelist.append(("%d" % i, ngettext("%d second", "%d seconds", i) % i)) config.av.autores_label_timeout = ConfigSelection(default = "5", choices = [("0", _("Not Shown"))] + choicelist) config.av.autores_delay = ConfigSelectionNumber(min = 0, max = 3000, stepwidth = 50, default = 400, wraparound = True) config.av.autores_deinterlace = ConfigYesNo(default=False) config.av.autores_sd = ConfigSelection(choices={"720p50": _("720p50"), "720p": _("720p"), "1080i50": _("1080i50"), "1080i": _("1080i")}, default="720p50") config.av.autores_480p24 = ConfigSelection(choices={"480p24": _("480p 24Hz"), "720p24": _("720p 24Hz"), "1080p24": _("1080p 24Hz")}, default="1080p24") config.av.autores_720p24 = ConfigSelection(choices={"720p24": _("720p 24Hz"), "1080p24": _("1080p 24Hz"), "1080i50": _("1080i 50Hz"), "1080i": _("1080i 60Hz")}, default="720p24") config.av.autores_1080p24 = ConfigSelection(choices={"1080p24": _("1080p 24Hz"), "1080p25": _("1080p 25Hz"), "1080i50": _("1080p 50Hz"), "1080i": _("1080i 60Hz")}, default="1080p24") config.av.autores_1080p25 = ConfigSelection(choices={"1080p25": _("1080p 25Hz"), "1080p50": _("1080p 50Hz"), "1080i50": _("1080i 50Hz")}, default="1080p25") config.av.autores_1080p30 = ConfigSelection(choices={"1080p30": _("1080p 30Hz"), "1080p60": _("1080p 60Hz"), "1080i": _("1080i 60Hz")}, default="1080p30") config.av.smart1080p = ConfigSelection(choices={"false": _("off"), "true": _("1080p50: 24p/50p/60p"), "2160p50": _("2160p50: 24p/50p/60p"), "1080i50": _("1080i50: 24p/50i/60i"), "720p50": _("720p50: 24p/50p/60p")}, default="false") config.av.colorformat = ConfigSelection(choices=colorformat_choices, default="rgb") config.av.aspectratio = ConfigSelection(choices={ "4_3_letterbox": _("4:3 Letterbox"), "4_3_panscan": _("4:3 PanScan"), "16_9": _("16:9"), "16_9_always": _("16:9 always"), "16_10_letterbox": _("16:10 Letterbox"), "16_10_panscan": _("16:10 PanScan"), "16_9_letterbox": _("16:9 Letterbox")}, default = "16_9") config.av.aspect = ConfigSelection(choices={ "4:3": _("4:3"), "16:9": _("16:9"), "16:10": _("16:10"), "auto": _("Automatic")}, default = "16:9") policy2_choices = { # TRANSLATORS: (aspect ratio policy: black bars on top/bottom) in doubt, keep english term. "letterbox": _("Letterbox"), # TRANSLATORS: (aspect ratio policy: cropped content on left/right) in doubt, keep english term "panscan": _("Pan&scan"), # TRANSLATORS: (aspect ratio policy: display as fullscreen, even if this breaks the aspect) "scale": _("Just scale")} if os.path.exists("/proc/stb/video/policy2_choices"): f = open("/proc/stb/video/policy2_choices") if "auto" in f.readline(): # TRANSLATORS: (aspect ratio policy: always try to display as fullscreen, when there is no content (black bars) on left/right, even if this breaks the aspect. policy2_choices.update({"auto": _("Auto")}) f.close() config.av.policy_169 = ConfigSelection(choices=policy2_choices, default = "letterbox") policy_choices = { # TRANSLATORS: (aspect ratio policy: black bars on left/right) in doubt, keep english term. "panscan": _("Pillarbox"), # TRANSLATORS: (aspect ratio policy: cropped content on left/right) in doubt, keep english term "letterbox": _("Pan&scan"), # TRANSLATORS: (aspect ratio policy: display as fullscreen, with stretching the left/right) # "nonlinear": _("Nonlinear"), # TRANSLATORS: (aspect ratio policy: display as fullscreen, even if this breaks the aspect) "bestfit": _("Just scale")} if os.path.exists("/proc/stb/video/policy_choices"): f = open("/proc/stb/video/policy_choices") if "auto" in f.readline(): # TRANSLATORS: (aspect ratio policy: always try to display as fullscreen, when there is no content (black bars) on left/right, even if this breaks the aspect. policy_choices.update({"auto": _("Auto")}) f.close() config.av.policy_43 = ConfigSelection(choices=policy_choices, default = "panscan") config.av.tvsystem = ConfigSelection(choices = {"pal": _("PAL"), "ntsc": _("NTSC"), "multinorm": _("multinorm")}, default="pal") config.av.wss = ConfigEnableDisable(default = True) config.av.generalAC3delay = ConfigSelectionNumber(-1000, 1000, 5, default = 0) config.av.generalPCMdelay = ConfigSelectionNumber(-1000, 1000, 5, default = 0) config.av.vcrswitch = ConfigEnableDisable(default = False) #config.av.aspect.setValue('16:9') config.av.aspect.addNotifier(iAVSwitch.setAspect) config.av.wss.addNotifier(iAVSwitch.setWss) config.av.policy_43.addNotifier(iAVSwitch.setPolicy43) config.av.policy_169.addNotifier(iAVSwitch.setPolicy169) def setColorFormat(configElement): if config.av.videoport and config.av.videoport.value in ("YPbPr", "Scart-YPbPr"): iAVSwitch.setColorFormat(3) elif config.av.videoport and config.av.videoport.value in ("RCA"): iAVSwitch.setColorFormat(0) else: if getBoxType() == 'et6x00': map = {"cvbs": 3, "rgb": 3, "svideo": 2, "yuv": 3} elif getBoxType() == 'gbquad' or getBoxType() == 'gbquadplus' or getBoxType().startswith('et'): map = {"cvbs": 0, "rgb": 3, "svideo": 2, "yuv": 3} else: map = {"cvbs": 0, "rgb": 1, "svideo": 2, "yuv": 3} iAVSwitch.setColorFormat(map[configElement.value]) config.av.colorformat.addNotifier(setColorFormat) def setAspectRatio(configElement): map = {"4_3_letterbox": 0, "4_3_panscan": 1, "16_9": 2, "16_9_always": 3, "16_10_letterbox": 4, "16_10_panscan": 5, "16_9_letterbox" : 6} iAVSwitch.setAspectRatio(map[configElement.value]) iAVSwitch.setInput("ENCODER") # init on startup if (getBoxType() in ('gbquad', 'gbquadplus', 'et5x00', 'ixussone', 'ixusszero', 'axodin', 'axodinc', 'starsatlx', 'galaxym6', 'geniuse3hd', 'evoe3hd', 'axase3', 'axase3c', 'omtimussos1', 'omtimussos2', 'gb800seplus', 'gb800ueplus', 'gbultrase', 'gbultraue' , 'twinboxlcd' )) or about.getModelString() == 'et6000': detected = False else: detected = eAVSwitch.getInstance().haveScartSwitch() SystemInfo["ScartSwitch"] = detected if os.path.exists("/proc/stb/hdmi/bypass_edid_checking"): f = open("/proc/stb/hdmi/bypass_edid_checking", "r") can_edidchecking = f.read().strip().split(" ") f.close() else: can_edidchecking = False SystemInfo["Canedidchecking"] = can_edidchecking if can_edidchecking: def setEDIDBypass(configElement): try: f = open("/proc/stb/hdmi/bypass_edid_checking", "w") f.write(configElement.value) f.close() except: pass config.av.bypass_edid_checking = ConfigSelection(choices={ "00000000": _("off"), "00000001": _("on")}, default = "00000001") config.av.bypass_edid_checking.addNotifier(setEDIDBypass) else: config.av.bypass_edid_checking = ConfigNothing() if os.path.exists("/proc/stb/video/hdmi_colorspace"): f = open("/proc/stb/video/hdmi_colorspace", "r") have_colorspace = f.read().strip().split(" ") f.close() else: have_colorspace = False SystemInfo["havecolorspace"] = have_colorspace if have_colorspace: def setHDMIColorspace(configElement): try: f = open("/proc/stb/video/hdmi_colorspace", "w") f.write(configElement.value) f.close() except: pass if getBoxType() in ('vusolo4k'): config.av.hdmicolorspace = ConfigSelection(choices={ "Edid(Auto)": _("Auto"), "Hdmi_Rgb": _("RGB")}, default = "Edid(Auto)") else: config.av.hdmicolorspace = ConfigSelection(choices={ "auto": _("auto"), "rgb": _("rgb"), "420": _("420"), "422": _("422"), "444": _("444")}, default = "auto") config.av.hdmicolorspace.addNotifier(setHDMIColorspace) else: config.av.hdmicolorspace = ConfigNothing() if os.path.exists("/proc/stb/video/hdmi_colorimetry"): f = open("/proc/stb/video/hdmi_colorimetry", "r") have_colorimetry = f.read().strip().split(" ") f.close() else: have_colorimetry = False SystemInfo["havecolorimetry"] = have_colorimetry if have_colorimetry: def setHDMIColorimetry(configElement): sleep(0.1) #workaround for "http://www.opena.tv/image-betabereich/28098-menue-leer-nach-bild-grundeinstellungen-speichern-nein.html#post249179" try: f = open("/proc/stb/video/hdmi_colorimetry", "w") f.write(configElement.value) f.close() except: pass config.av.hdmicolorimetry = ConfigSelection(choices={ "auto": _("auto"), "bt2020ncl": _("BT 2020 NCL"), "bt2020cl": _("BT 2020 CL"), "bt709": _("BT 709")}, default = "auto") config.av.hdmicolorimetry.addNotifier(setHDMIColorimetry) else: config.av.hdmicolorimetry = ConfigNothing() if os.path.exists("/proc/stb/hdmi/audio_source"): f = open("/proc/stb/hdmi/audio_source", "r") can_audiosource = f.read().strip().split(" ") f.close() else: can_audiosource = False SystemInfo["Canaudiosource"] = can_audiosource if can_audiosource: def setAudioSource(configElement): try: f = open("/proc/stb/hdmi/audio_source", "w") f.write(configElement.value) f.close() except: pass config.av.audio_source = ConfigSelection(choices={ "pcm": _("PCM"), "spdif": _("SPDIF")}, default="pcm") config.av.audio_source.addNotifier(setAudioSource) else: config.av.audio_source = ConfigNothing() if os.path.exists("/proc/stb/audio/3d_surround_choices"): f = open("/proc/stb/audio/3d_surround_choices", "r") can_3dsurround = f.read().strip().split(" ") f.close() else: can_3dsurround = False SystemInfo["Can3DSurround"] = can_3dsurround if can_3dsurround: def set3DSurround(configElement): f = open("/proc/stb/audio/3d_surround", "w") f.write(configElement.value) f.close() choice_list = [("none", _("off")), ("hdmi", _("HDMI")), ("spdif", _("SPDIF")), ("dac", _("DAC"))] config.av.surround_3d = ConfigSelection(choices = choice_list, default = "none") config.av.surround_3d.addNotifier(set3DSurround) else: config.av.surround_3d = ConfigNothing() if os.path.exists("/proc/stb/audio/3d_surround_speaker_position_choices"): f = open("/proc/stb/audio/3d_surround_speaker_position_choices", "r") can_3dsurround_speaker = f.read().strip().split(" ") f.close() else: can_3dsurround_speaker = False SystemInfo["Can3DSpeaker"] = can_3dsurround_speaker if can_3dsurround_speaker: def set3DSurroundSpeaker(configElement): f = open("/proc/stb/audio/3d_surround_speaker_position", "w") f.write(configElement.value) f.close() choice_list = [("center", _("center")), ("wide", _("wide")), ("extrawide", _("extra wide"))] config.av.surround_3d_speaker = ConfigSelection(choices = choice_list, default = "center") config.av.surround_3d_speaker.addNotifier(set3DSurroundSpeaker) else: config.av.surround_3d_speaker = ConfigNothing() if os.path.exists("/proc/stb/audio/avl_choices"): f = open("/proc/stb/audio/avl_choices", "r") can_autovolume = f.read().strip().split(" ") f.close() else: can_autovolume = False SystemInfo["CanAutoVolume"] = can_autovolume if can_autovolume: def setAutoVolume(configElement): f = open("/proc/stb/audio/avl", "w") f.write(configElement.value) f.close() choice_list = [("none", _("off")), ("hdmi", _("HDMI")), ("spdif", _("SPDIF")), ("dac", _("DAC"))] config.av.autovolume = ConfigSelection(choices = choice_list, default = "none") config.av.autovolume.addNotifier(setAutoVolume) else: config.av.autovolume = ConfigNothing() try: can_pcm_multichannel = os.access("/proc/stb/audio/multichannel_pcm", os.W_OK) except: can_pcm_multichannel = False SystemInfo["supportPcmMultichannel"] = can_pcm_multichannel if can_pcm_multichannel: def setPCMMultichannel(configElement): open("/proc/stb/audio/multichannel_pcm", "w").write(configElement.value and "enable" or "disable") config.av.pcm_multichannel = ConfigYesNo(default = False) config.av.pcm_multichannel.addNotifier(setPCMMultichannel) try: f = open("/proc/stb/audio/ac3_choices", "r") file = f.read()[:-1] f.close() can_downmix_ac3 = "downmix" in file except: can_downmix_ac3 = False SystemInfo["CanPcmMultichannel"] = False SystemInfo["CanDownmixAC3"] = can_downmix_ac3 if can_downmix_ac3: def setAC3Downmix(configElement): f = open("/proc/stb/audio/ac3", "w") f.write(configElement.value and "downmix" or "passthrough") f.close() if SystemInfo.get("supportPcmMultichannel", False) and not configElement.value: SystemInfo["CanPcmMultichannel"] = True else: SystemInfo["CanPcmMultichannel"] = False if can_pcm_multichannel: config.av.pcm_multichannel.setValue(False) config.av.downmix_ac3 = ConfigYesNo(default = True) config.av.downmix_ac3.addNotifier(setAC3Downmix) try: f = open("/proc/stb/audio/aac_choices", "r") file = f.read()[:-1] f.close() can_downmix_aac = "downmix" in file except: can_downmix_aac = False SystemInfo["CanDownmixAAC"] = can_downmix_aac if can_downmix_aac: def setAACDownmix(configElement): f = open("/proc/stb/audio/aac", "w") f.write(configElement.value and "downmix" or "passthrough") f.close() config.av.downmix_aac = ConfigYesNo(default = True) config.av.downmix_aac.addNotifier(setAACDownmix) if os.path.exists("/proc/stb/audio/aac_transcode_choices"): f = open("/proc/stb/audio/aac_transcode_choices", "r") can_aactranscode = f.read().strip().split(" ") f.close() else: can_aactranscode = False SystemInfo["CanAACTranscode"] = can_aactranscode if can_aactranscode: def setAACTranscode(configElement): f = open("/proc/stb/audio/aac_transcode", "w") f.write(configElement.value) f.close() choice_list = [("off", _("off")), ("ac3", _("AC3")), ("dts", _("DTS"))] config.av.transcodeaac = ConfigSelection(choices = choice_list, default = "off") config.av.transcodeaac.addNotifier(setAACTranscode) else: config.av.transcodeaac = ConfigNothing() if os.path.exists("/proc/stb/vmpeg/0/pep_scaler_sharpness"): def setScaler_sharpness(config): myval = int(config.value) try: print "[VideoMode] setting scaler_sharpness to: %0.8X" % myval f = open("/proc/stb/vmpeg/0/pep_scaler_sharpness", "w") f.write("%0.8X" % myval) f.close() f = open("/proc/stb/vmpeg/0/pep_apply", "w") f.write("1") f.close() except IOError: print "couldn't write pep_scaler_sharpness" if getBoxType() in ('gbquad', 'gbquadplus'): config.av.scaler_sharpness = ConfigSlider(default=5, limits=(0,26)) else: config.av.scaler_sharpness = ConfigSlider(default=13, limits=(0,26)) config.av.scaler_sharpness.addNotifier(setScaler_sharpness) else: config.av.scaler_sharpness = NoSave(ConfigNothing()) config.av.edid_override = ConfigYesNo(default = False) iAVSwitch.setConfiguredMode()
sys.exit(-1) # In python > 3, strings may also contain Unicode characters. So, convert # them to bytes if sys.version_info < (3, ): new_prefix = sys.argv[1] new_dl_path = sys.argv[2] else: new_prefix = sys.argv[1].encode() new_dl_path = sys.argv[2].encode() executables_list = sys.argv[3:] for e in executables_list: perms = os.stat(e)[stat.ST_MODE] if os.access(e, os.W_OK | os.R_OK): perms = None else: os.chmod(e, perms | stat.S_IRWXU) try: f = open(e, "r+b") except IOError: exctype, ioex = sys.exc_info()[:2] if ioex.errno == errno.ETXTBSY: print("Could not open %s. File used by another process.\nPlease "\ "make sure you exit all processes that might use any SDK "\ "binaries." % e) else: print("Could not open %s: %s(%d)" % (e, ioex.strerror, ioex.errno)) sys.exit(-1)
def arguments_post_parsing(argv: argparse.Namespace): moc_front_end, available_moc_front_ends = get_moc_frontends(argv) is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx =\ deduce_framework_by_namespace(argv) if not moc_front_end else [False, False, False, False, False] if not any([is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx]): frameworks = ['tf', 'caffe', 'mxnet', 'kaldi', 'onnx'] frameworks = list(set(frameworks + available_moc_front_ends)) if argv.framework not in frameworks: if argv.use_legacy_frontend: raise Error( 'Framework {} is not a valid target when using the --use_legacy_frontend flag. ' 'The following legacy frameworks are available: {}' + refer_to_faq_msg(15), argv.framework, frameworks) else: raise Error( 'Framework {} is not a valid target. Please use --framework with one from the list: {}. ' + refer_to_faq_msg(15), argv.framework, frameworks) if is_tf and not argv.input_model and not argv.saved_model_dir and not argv.input_meta_graph: raise Error( 'Path to input model or saved model dir is required: use --input_model, --saved_model_dir or ' '--input_meta_graph') elif is_mxnet and not argv.input_model and not argv.input_symbol and not argv.pretrained_model_name: raise Error( 'Path to input model or input symbol or pretrained_model_name is required: use --input_model or ' '--input_symbol or --pretrained_model_name') elif is_caffe and not argv.input_model and not argv.input_proto: raise Error( 'Path to input model or input proto is required: use --input_model or --input_proto' ) elif (is_kaldi or is_onnx) and not argv.input_model: raise Error('Path to input model is required: use --input_model.') log.debug(str(argv)) log.debug("Model Optimizer started") model_name = "<UNKNOWN_NAME>" if argv.model_name: model_name = argv.model_name elif argv.input_model: model_name = get_model_name(argv.input_model) elif is_tf and argv.saved_model_dir: model_name = "saved_model" elif is_tf and argv.input_meta_graph: model_name = get_model_name(argv.input_meta_graph) elif is_mxnet and argv.input_symbol: model_name = get_model_name(argv.input_symbol) argv.model_name = model_name log.debug('Output model name would be {}{{.xml, .bin}}'.format( argv.model_name)) # if --input_proto is not provided, try to retrieve another one # by suffix substitution from model file name if is_caffe and not argv.input_proto: argv.input_proto = replace_ext(argv.input_model, '.caffemodel', '.prototxt') if not argv.input_proto: raise Error( "Cannot find prototxt file: for Caffe please specify --input_proto - a " + "protobuf file that stores topology and --input_model that stores " + "pretrained weights. " + refer_to_faq_msg(20)) log.info('Deduced name for prototxt: {}'.format(argv.input_proto)) if not argv.silent: print_argv(argv, is_caffe, is_tf, is_mxnet, is_kaldi, is_onnx, argv.model_name) # This try-except is additional reinsurance that the IE # dependency search does not break the MO pipeline def raise_ie_not_found(): raise Error( "Could not find the Inference Engine or nGraph Python API.\n" "Consider building the Inference Engine and nGraph Python APIs from sources or try to install OpenVINO (TM) Toolkit using \"install_prerequisites.{}\"" .format("bat" if sys.platform == "windows" else "sh")) try: if not find_ie_version(silent=argv.silent): raise_ie_not_found() except Exception as e: raise_ie_not_found() if 'data_type' in argv and argv.data_type in ['FP16', 'half']: argv.data_type = 'FP32' argv.compress_fp16 = True else: argv.compress_fp16 = False # This is just to check that transform key is valid and transformations are available check_available_transforms(parse_transform(argv.transform)) if argv.legacy_ir_generation and len(argv.transform) != 0: raise Error( "--legacy_ir_generation and --transform keys can not be used at the same time." ) # For C++ frontends there are no specific Python installation requirements, check only generic ones if moc_front_end: ret_code = check_requirements() else: ret_code = check_requirements(framework=argv.framework) if ret_code: raise Error( 'check_requirements exited with return code {}'.format(ret_code)) if is_tf and argv.tensorflow_use_custom_operations_config is not None: argv.transformations_config = argv.tensorflow_use_custom_operations_config if is_caffe and argv.mean_file and argv.mean_values: raise Error( 'Both --mean_file and mean_values are specified. Specify either mean file or mean values. ' + refer_to_faq_msg(17)) elif is_caffe and argv.mean_file and argv.mean_file_offsets: values = get_tuple_values(argv.mean_file_offsets, t=int, num_exp_values=2) mean_file_offsets = np.array([int(x) for x in values[0].split(',')]) if not all([offset >= 0 for offset in mean_file_offsets]): raise Error( "Negative value specified for --mean_file_offsets option. " "Please specify positive integer values in format '(x,y)'. " + refer_to_faq_msg(18)) argv.mean_file_offsets = mean_file_offsets if argv.scale and argv.scale_values: raise Error( 'Both --scale and --scale_values are defined. Specify either scale factor or scale values per input ' + 'channels. ' + refer_to_faq_msg(19)) if argv.scale and argv.scale < 1.0: log.error( "The scale value is less than 1.0. This is most probably an issue because the scale value specifies " "floating point value which all input values will be *divided*.", extra={'is_warning': True}) if argv.input_model and (is_tf and argv.saved_model_dir): raise Error('Both --input_model and --saved_model_dir are defined. ' 'Specify either input model or saved model directory.') if is_tf: if argv.saved_model_tags is not None: if ' ' in argv.saved_model_tags: raise Error( 'Incorrect saved model tag was provided. Specify --saved_model_tags with no spaces in it' ) argv.saved_model_tags = argv.saved_model_tags.split(',') argv.output = argv.output.split(',') if argv.output else None argv.placeholder_shapes, argv.placeholder_data_types = get_placeholder_shapes( argv.input, argv.input_shape, argv.batch) mean_values = parse_tuple_pairs(argv.mean_values) scale_values = parse_tuple_pairs(argv.scale_values) mean_scale = get_mean_scale_dictionary(mean_values, scale_values, argv.input) argv.mean_scale_values = mean_scale if not os.path.exists(argv.output_dir): try: os.makedirs(argv.output_dir) except PermissionError as e: raise Error( "Failed to create directory {}. Permission denied! " + refer_to_faq_msg(22), argv.output_dir) from e else: if not os.access(argv.output_dir, os.W_OK): raise Error( "Output directory {} is not writable for current user. " + refer_to_faq_msg(22), argv.output_dir) log.debug("Placeholder shapes : {}".format(argv.placeholder_shapes)) if hasattr(argv, 'extensions') and argv.extensions and argv.extensions != '': extensions = argv.extensions.split(',') else: extensions = None argv.freeze_placeholder_with_value, argv.input = get_freeze_placeholder_values( argv.input, argv.freeze_placeholder_with_value) if is_tf: from mo.front.tf.register_custom_ops import get_front_classes import_extensions.load_dirs(argv.framework, extensions, get_front_classes) elif is_caffe: send_framework_info('caffe') from mo.front.caffe.register_custom_ops import get_front_classes import_extensions.load_dirs(argv.framework, extensions, get_front_classes) elif is_mxnet: send_framework_info('mxnet') from mo.front.mxnet.register_custom_ops import get_front_classes import_extensions.load_dirs(argv.framework, extensions, get_front_classes) elif is_kaldi: send_framework_info('kaldi') from mo.front.kaldi.register_custom_ops import get_front_classes import_extensions.load_dirs(argv.framework, extensions, get_front_classes) elif is_onnx: send_framework_info('onnx') from mo.front.onnx.register_custom_ops import get_front_classes import_extensions.load_dirs(argv.framework, extensions, get_front_classes) return argv
def main(args=None): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,too-many-return-statements """ Parse options, do the things """ if os.getenv('COVERITY_SCAN_BRANCH') == '1': print('Skipping build COVERITY_SCAN_BRANCH set in environment') return 0 (options, args) = parse_args(args or sys.argv) if len(args) != 2: print('Usage: %s [options] target' % (args[0])) return 1 target = args[1] py_interp = 'python' use_python2 = have_prog('python2') if options.use_python3 is None: use_python3 = have_prog('python3') else: use_python3 = options.use_python3 if use_python3: py_interp = 'python3' if options.cc_bin is None: if options.cc == 'gcc': options.cc_bin = 'g++' elif options.cc == 'clang': options.cc_bin = 'clang++' elif options.cc == 'msvc': options.cc_bin = 'cl' else: print('Error unknown compiler %s' % (options.cc)) return 1 if options.compiler_cache is None and options.cc != 'msvc': # Autodetect ccache, unless using clang profiling - ccache seems to misbehave there if have_prog('ccache') and target not in ['sonar']: options.compiler_cache = 'ccache' if options.compiler_cache == 'clcache' and target in ['sanitizer']: # clcache doesn't support /Zi so using it just adds overhead with # no benefit options.compiler_cache = None if target == 'sonar' and os.getenv('SONAR_TOKEN') is None: print('Skipping Sonar scan due to missing SONAR_TOKEN env variable') return 0 root_dir = options.root_dir if os.access(root_dir, os.R_OK) != True: raise Exception('Bad root dir setting, dir %s not readable' % (root_dir)) cmds = [] if target == 'lint': if not use_python2 and not use_python3: raise Exception('No python interpreters found cannot lint') pylint_rc = '--rcfile=%s' % (os.path.join(root_dir, 'src/configs/pylint.rc')) pylint_flags = [pylint_rc, '--reports=no', '--score=no'] # Some disabled rules specific to Python2 # superfluous-parens: needed for Python3 compatible print statements # too-many-locals: variable counting differs from pylint3 py2_flags = '--disable=superfluous-parens,too-many-locals' py_scripts = [ 'configure.py', 'src/python/botan2.py', 'src/scripts/ci_build.py', 'src/scripts/install.py', 'src/scripts/dist.py', 'src/scripts/cleanup.py', 'src/scripts/build_docs.py', 'src/scripts/website.py', 'src/scripts/bench.py', 'src/scripts/test_python.py', 'src/scripts/test_cli.py', 'src/scripts/python_unittests.py', 'src/scripts/python_unittests_unix.py'] for target in py_scripts: target_path = os.path.join(root_dir, target) if use_python2: cmds.append(['python2', '-m', 'pylint'] + pylint_flags + [py2_flags, target_path]) if use_python3: cmds.append(['python3', '-m', 'pylint'] + pylint_flags + [target_path]) else: config_flags, run_test_command, make_prefix = determine_flags( target, options.os, options.cpu, options.cc, options.cc_bin, options.compiler_cache, root_dir, options.pkcs11_lib) cmds.append([py_interp, os.path.join(root_dir, 'configure.py')] + config_flags) make_cmd = [options.make_tool] if root_dir != '.': make_cmd += ['-C', root_dir] if options.build_jobs > 1: make_cmd += ['-j%d' % (options.build_jobs)] make_cmd += ['-k'] if target == 'docs': cmds.append(make_cmd + ['docs']) else: if options.compiler_cache == 'ccache': cmds.append(['ccache', '--show-stats']) elif options.compiler_cache == 'clcache': cmds.append(['clcache', '-s']) make_targets = ['libs', 'cli', 'tests'] if target in ['coverage', 'fuzzers']: make_targets += ['fuzzers', 'fuzzer_corpus_zip'] cmds.append(make_prefix + make_cmd + make_targets) if options.compiler_cache == 'ccache': cmds.append(['ccache', '--show-stats']) elif options.compiler_cache == 'clcache': cmds.append(['clcache', '-s']) if run_test_command != None: cmds.append(run_test_command) if target in ['coverage', 'fuzzers']: cmds.append([py_interp, os.path.join(root_dir, 'src/scripts/test_fuzzers.py'), os.path.join(root_dir, 'fuzzer_corpus'), os.path.join(root_dir, 'build/fuzzer')]) if target in ['shared', 'coverage'] and options.os != 'windows': botan_exe = os.path.join(root_dir, 'botan-cli.exe' if options.os == 'windows' else 'botan') test_scripts = ['cli_tests.py', 'test_cli.py'] for script in test_scripts: cmds.append([py_interp, os.path.join(root_dir, 'src/scripts', script), botan_exe]) python_tests = os.path.join(root_dir, 'src/scripts/test_python.py') if target in ['shared', 'coverage']: if use_python2: cmds.append(['python2', '-b', python_tests]) if use_python3: cmds.append(['python3', '-b', python_tests]) if target in ['shared', 'static', 'bsi', 'nist']: cmds.append(make_cmd + ['install']) if target in ['sonar']: cmds.append(['llvm-profdata', 'merge', '-sparse', 'default.profraw', '-o', 'botan.profdata']) cmds.append(['llvm-cov', 'show', './botan-test', '-instr-profile=botan.profdata', '>', 'build/cov_report.txt']) sonar_config = os.path.join(root_dir, os.path.join(root_dir, 'src/configs/sonar-project.properties')) cmds.append(['sonar-scanner', '-Dproject.settings=%s' % (sonar_config), '-Dsonar.login=$SONAR_TOKEN']) if target in ['coverage']: if not have_prog('lcov'): print('Error: lcov not found in PATH (%s)' % (os.getenv('PATH'))) return 1 if not have_prog('gcov'): print('Error: gcov not found in PATH (%s)' % (os.getenv('PATH'))) return 1 cov_file = 'coverage.info' raw_cov_file = 'coverage.info.raw' cmds.append(['lcov', '--capture', '--directory', options.root_dir, '--output-file', raw_cov_file]) cmds.append(['lcov', '--remove', raw_cov_file, '/usr/*', '--output-file', cov_file]) cmds.append(['lcov', '--list', cov_file]) if have_prog('coverage'): cmds.append(['coverage', 'run', '--branch', '--rcfile', os.path.join(root_dir, 'src/configs/coverage.rc'), python_tests]) if have_prog('codecov'): # If codecov exists assume we are on Travis and report to codecov.io cmds.append(['codecov']) else: # Otherwise generate a local HTML report cmds.append(['genhtml', cov_file, '--output-directory', 'lcov-out']) cmds.append(make_cmd + ['clean']) cmds.append(make_cmd + ['distclean']) for cmd in cmds: if options.dry_run: print('$ ' + ' '.join(cmd)) else: run_cmd(cmd, root_dir) return 0
def _is_executable(path): return os.path.isfile(path) and os.access(path, os.X_OK)
def _nameserver(self): if os.path.isfile('result/{}/infrastructure/terraform.tfstate'.format(self.grid_name)) and os.access('result/{}/infrastructure/terraform.tfstate'.format(self.grid_name), os.R_OK): with open('result/{}/infrastructure/terraform.tfstate'.format(self.grid_name), 'r') as json_file: json_data = json.load(json_file) for module in json_data['modules']: for resource, value in module['resources'].iteritems(): if resource == 'azure_instance.terminal': return value['primary']['attributes']['ip_address']
def _build_command(self, binary, *other_args): ''' Takes a binary (ssh, scp, sftp) and optional extra arguments and returns a command line as an array that can be passed to subprocess.Popen. ''' self._command = [] ## First, the command name. # If we want to use password authentication, we have to set up a pipe to # write the password to sshpass. if self._play_context.password: if not self._sshpass_available(): raise AnsibleError( "to use the 'ssh' connection type with passwords, you must install the sshpass program" ) self.sshpass_pipe = os.pipe() self._command += ['sshpass', '-d{0}'.format(self.sshpass_pipe[0])] self._command += [binary] ## Next, additional arguments based on the configuration. # sftp batch mode allows us to correctly catch failed transfers, but can # be disabled if the client side doesn't support the option. if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE: self._command += ['-b', '-'] self._command += ['-C'] if self._play_context.verbosity > 3: self._command += ['-vvv'] elif binary == 'ssh': # Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q. self._command += ['-q'] # Next, we add [ssh_connection]ssh_args from ansible.cfg. if self._play_context.ssh_args: args = self._split_ssh_args(self._play_context.ssh_args) self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. if not C.HOST_KEY_CHECKING: self._add_args( "ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", ("-o", "StrictHostKeyChecking=no")) if self._play_context.port is not None: self._add_args("ANSIBLE_REMOTE_PORT/remote_port/ansible_port set", ("-o", "Port={0}".format(self._play_context.port))) key = self._play_context.private_key_file if key: self._add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key)))) if not self._play_context.password: self._add_args("ansible_password/ansible_ssh_pass not set", ( "-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", "-o", "PasswordAuthentication=no")) user = self._play_context.remote_user if user: self._add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set", ("-o", "User={0}".format( to_bytes(self._play_context.remote_user)))) self._add_args( "ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout))) # Add in any common or binary-specific arguments from the PlayContext # (i.e. inventory or task settings or overrides on the command line). for opt in ['ssh_common_args', binary + '_extra_args']: attr = getattr(self._play_context, opt, None) if attr is not None: args = self._split_ssh_args(attr) self._add_args("PlayContext set %s" % opt, args) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(self._command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath('$HOME/.ansible/cp') # The directory must exist and be writable. makedirs_safe(cpdir, 0o700) if not os.access(cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % cpdir) args = ("-o", "ControlPath={0}".format( to_bytes(C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir)))) self._add_args("found only ControlPersist; added ControlPath", args) ## Finally, we add any caller-supplied extras. if other_args: self._command += other_args return self._command
def find_command(command): for dir in os.getenv('PATH', '/usr/bin:/usr/sbin').split(os.pathsep): p = os.path.join(dir, command) if os.access(p, os.X_OK): return p raise IOError(errno.ENOENT, 'Command not found: %r' % command)
def ftopen(file, mode, forceremote=False, nooverwrite=False, db=None, \ chanid=None, starttime=None, download=False): """ ftopen(file, mode, forceremote=False, nooverwrite=False, db=None) -> FileTransfer object -> file object Method will attempt to open file locally, falling back to remote access over mythprotocol if necessary. 'forceremote' will force a FileTransfer object if possible. 'file' takes a standard MythURI: myth://<group>@<host>:<port>/<path> 'mode' takes a 'r' or 'w' 'nooverwrite' will refuse to open a file writable, if a local file is found. """ db = DBCache(db) log = MythLog('Python File Transfer', db=db) reuri = re.compile(\ 'myth://((?P<group>.*)@)?(?P<host>[\[\]a-zA-Z0-9_\-\.]*)(:[0-9]*)?/(?P<file>.*)') reip = re.compile('(?:\d{1,3}\.){3}\d{1,3}') if mode not in ('r','w'): raise TypeError("File I/O must be of type 'r' or 'w'") if chanid and starttime: protoopen = lambda host, file, storagegroup: \ RecordFileTransfer(host, file, storagegroup,\ mode, chanid, starttime, db) elif download: protoopen = lambda host, lfile, storagegroup: \ DownloadFileTransfer(host, lfile, storagegroup, \ mode, file, db) else: protoopen = lambda host, file, storagegroup: \ FileTransfer(host, file, storagegroup, mode, db) # process URI (myth://<group>@<host>[:<port>]/<path/to/file>) match = None try: match = reuri.match(file) except: pass if match: host = match.group('host') filename = match.group('file') sgroup = match.group('group') if sgroup is None: sgroup = 'Default' elif len(file) == 3: host, sgroup, filename = file else: raise MythError('Invalid FileTransfer input string: '+file) # get full system name host = host.strip('[]') if reip.match(host) or check_ipv6(host): host = db._gethostfromaddr(host) # user forced to remote access if forceremote: if (mode == 'w') and (filename.find('/') != -1): raise MythFileError(MythError.FILE_FAILED_WRITE, file, 'attempting remote write outside base path') if nooverwrite and FileOps(host, db=db).fileExists(filename, sgroup): raise MythFileError(MythError.FILE_FAILED_WRITE, file, 'refusing to overwrite existing file') return protoopen(host, filename, sgroup) if mode == 'w': # check for pre-existing file path = FileOps(host, db=db).fileExists(filename, sgroup) sgs = list(db.getStorageGroup(groupname=sgroup)) if path is not None: if nooverwrite: raise MythFileError(MythError.FILE_FAILED_WRITE, file, 'refusing to overwrite existing file') for sg in sgs: if sg.dirname in path: if sg.local: return open(sg.dirname+filename, mode) else: return protoopen(host, filename, sgroup) # prefer local storage for new files for i,v in reversed(list(enumerate(sgs))): if not v.local: sgs.pop(i) else: st = os.statvfs(v.dirname) v.free = st[0]*st[3] if len(sgs) > 0: # choose path with most free space sg = sorted(sgs, key=lambda sg: sg.free, reverse=True)[0] # create folder if it does not exist if filename.find('/') != -1: path = sg.dirname+filename.rsplit('/',1)[0] if not os.access(path, os.F_OK): os.makedirs(path) log(log.FILE, log.INFO, 'Opening local file (w)', sg.dirname+filename) return open(sg.dirname+filename, mode) # fallback to remote write else: if filename.find('/') != -1: raise MythFileError(MythError.FILE_FAILED_WRITE, file, 'attempting remote write outside base path') return protoopen(host, filename, sgroup) else: # search for file in local directories sg = findfile(filename, sgroup, db) if sg is not None: # file found, open local log(log.FILE, log.INFO, 'Opening local file (r)', sg.dirname+filename) return open(sg.dirname+filename, mode) else: # file not found, open remote return protoopen(host, filename, sgroup)
def migrate(self): if os.path.isfile(self.SCRIPT) and os.access(self.SCRIPT, os.X_OK): subprocess.check_call([sys.executable, self.SCRIPT, "dns"])
basename = os.path.splitext(os.path.basename(ctrl_file))[0] machine_file = basename + ".machines" rank_file = basename + ".ranks" log_file = basename + ".log" try: # Make sure the delay files are up to date, and generate new ones # if they're not. procs = {} success = True for station in stations: path = urlparse.urlparse(json_input['delay_directory']).path delay_file = path + '/' + exper + '_' + station + '.del' if not os.access(delay_file, os.R_OK) or \ os.stat(delay_file).st_mtime < os.stat(vex_file).st_mtime: args = ['generate_delay_model', vex_file, station, delay_file] #procs[station] = subprocess.Popen(args, stdout=subprocess.PIPE) pass continue for station in procs: output = procs[station].communicate()[0] procs[station].wait() if procs[station].returncode != 0: print "Delay model couldn't be generated for " + station + ":" print output path = urlparse.urlparse(json_input['delay_directory']).path delay_file = path + '/' + exper + '_' + station + '.del' os.remove(delay_file) success = False
def access(self, path, mode): full_path = self._full_path(path) if not os.access(full_path, mode): raise FuseOSError(errno.EACCES)
def cmd_exists(cmd): return any(os.access(os.path.join(path, cmd), os.X_OK) for path in os.environ["PATH"].split(os.pathsep))
def grace_find_months(base_dir, PROC, DREL, DSET='GSM'): """ Parses date index file from grace_date.py Finds the months available for a GRACE/GRACE-FO product Finds the all months missing from the product Arguments --------- base_dir: working data directory PROC: GRACE data processing center CSR: University of Texas Center for Space Research GFZ: German Research Centre for Geosciences (GeoForschungsZentrum) JPL: Jet Propulsion Laboratory CNES: French Centre National D'Etudes Spatiales DREL: GRACE/GRACE-FO data release Keyword arguments ----------------- DSET: GRACE/GRACE-FO dataset GAA: non-tidal atmospheric correction GAB: non-tidal oceanic correction GAC: combined non-tidal atmospheric and oceanic correction GAD: ocean bottom pressure product GSM: corrected monthly static gravity field product Returns ------- start: First month in a GRACE/GRACE-FO dataset end: Last month in a GRACE/GRACE-FO dataset missing: missing months in a GRACE/GRACE-FO dataset months: all available months in a GRACE/GRACE-FO dataset time: center dates of all available months in a GRACE/GRACE-FO dataset """ #-- Directory of exact product (using date index from GSM) grace_dir = os.path.join(base_dir, PROC, DREL, DSET) #-- check that GRACE/GRACE-FO date file exists date_file = os.path.join(grace_dir,'{0}_{1}_DATES.txt'.format(PROC, DREL)) if not os.access(date_file, os.F_OK): grace_date(base_dir,PROC=PROC,DREL=DREL,DSET=DSET,OUTPUT=True) #-- read GRACE/GRACE-FO date ascii file from grace_date.py #-- skip the header row and extract dates (decimal format) and months date_input = np.loadtxt(date_file, skiprows=1) tdec = date_input[:,0] months = date_input[:,1].astype(np.int) #-- array of all possible months (or in case of CNES RL01/2: 10-day sets) all_months = np.arange(1,months.max(),dtype=np.int) #-- missing months (values in all_months but not in months) missing = sorted(set(all_months)-set(months)) #-- If CNES RL01/2: simply convert into numpy array #-- else: remove months 1-3 and convert into numpy array if ((PROC == 'CNES') & (DREL in ('RL01','RL02'))): missing = np.array(missing,dtype=np.int) else: missing = np.array(missing[3:],dtype=np.int) return {'time':tdec, 'start':months[0], 'end':months[-1], 'months':months, 'missing':missing}
def ensure_writable(self) -> None: '''Make sure there's a place for the data to go''' os.makedirs(self.DATA_PATH, exist_ok=True) os.access(self.DATA_PATH, os.W_OK | os.X_OK) logging.info('Verified scan write permissions')
#convert to array for file_function points_array = num.array(points, num.float) points_array = ensure_absolute(points_array) #print 'points_array', points_array dir_name, base = os.path.split(sww_file) #need to get current directory so when path and file #are "joined" below the directory is correct if dir_name == '': dir_name = getcwd() if access(sww_file, R_OK): if verbose: log.critical('File %s exists' % sww_file) else: msg = 'File "%s" could not be opened: no read permission' % sww_file raise Exception(msg) sww_files = get_all_swwfiles(look_in_dir=dir_name, base_name=base, verbose=verbose) # fudge to get SWW files in 'correct' order, oldest on the left sww_files.sort() if verbose: log.critical('sww files=%s' % sww_files)
# No options... print help. if numOpts < 2: parser.print_help() # List parameters elif options.params: printGetParameters() # Get parameter details elif options.paramDetail: printGetParameterDetails(options.paramDetail) # Submit job elif options.email and not options.jobid: params = {} if len(args) > 0: if os.access(args[0], os.R_OK): # Read file into content params[u'sequence'] = readFile(args[0]) else: # Argument is a sequence id params[u'sequence'] = args[0] elif options.sequence: # Specified via option if os.access(options.sequence, os.R_OK): # Read file into content params[u'sequence'] = readFile(options.sequence) else: # Argument is a sequence id params[u'sequence'] = options.sequence # Booleans need to be represented as 1/0 rather than True/False if options.matrix: params['matrix'] = options.matrix if options.gapopen: params['gapopen'] = options.gapopen if options.gapext:
def process(self, params): input_path = params[0] output_path = params[1] csv_name = params[2] print("**********") print(input_path) print(output_path) print(csv_name) logfile_for_reading = None # We'll find it in the data folder. if input_path == "spec_temp_data_loc": input_path = self.temp_data_loc if output_path == "spec_temp_data_loc": for file in os.listdir(self.temp_data_loc): os.remove(os.path.join(self.temp_data_loc, file)) output_path = self.temp_data_loc # check if the input directory exists. if not, send an error back if not os.path.exists(input_path): utils.send(self.client, "processerrornodirectory", []) return # Look through files in data directory until you find a log file for potential_log in os.listdir(input_path): if ".txt" in potential_log: try: with open(input_path + "\\" + potential_log, "r") as f: firstline = f.readline() if "#AutoSpec log" in firstline or "# Tanager log" in firstline: logfile_for_reading = input_path + "\\" + potential_log break except OSError as e: traceback.print_exc() if logfile_for_reading is None: print("ERROR: No logfile found in data directory") if os.path.isfile(output_path + "\\" + csv_name) and csv_name != "proc_temp.csv": utils.send(self.client, "processerrorfileexists", []) return elif os.path.isfile(output_path + "\\" + csv_name): writeable = os.access(output_path, os.W_OK) if not writeable: utils.send(self.client, "processerrorcannotwrite", []) return os.remove(output_path + "\\" + csv_name) writeable = os.access(output_path, os.W_OK) if not writeable: utils.send(self.client, "processerrorcannotwrite", []) return else: # If the specified output path is in the C drive, we can write straight to it. Otherwise, # we're going to temporarily store the file in the temp data location if output_path[0:3] != "C:\\": temp_output_path = self.temp_data_loc else: temp_output_path = output_path datafile = temp_output_path + "\\" + csv_name #Don't give warnings about all the temp files that get dropped into the save directroy print("*************************************************") print(input_path) print(self.spec_controller.save_dir) if input_path == self.spec_controller.save_dir: self.data_files_to_ignore.append(csv_name) batches = int( len(self.data_files_to_ignore) / self.process_controller.batch_size) + 1 base = csv_name.split(".csv")[0] for i in range(batches): ignore_file = f"{base}_{i}.csv" print(ignore_file) self.data_files_to_ignore.append(ignore_file) try: self.process_controller.process(input_path, temp_output_path, csv_name, self.watchdog_monitor) except Exception as e: self.process_controller.reset() utils.send(self.client, "processerror", []) traceback.print_exc() return # Check that the expected file arrived fine after processing. # This sometimes wasn't happening if you fed ViewSpecPro data without # taking a white referencetra or optimizing. saved = False t0 = time.perf_counter() t = time.perf_counter() while t - t0 < 200 and not saved: saved = os.path.isfile(datafile) time.sleep(0.2) t = time.perf_counter() corrected = False if not saved: print("Datafile not saved.") print(datafile) if saved: # Load headers from the logfile, then apply correction if logfile_for_reading is not None: print("Loading headers from log file") warnings = self.set_headers(datafile, logfile_for_reading) print( "Applying correction for non-Lambertian behavior of Spectralon" ) try: self.corrector.correct( datafile ) # applies a correction based on measured BRDF for spectralon corrected = True except Exception as e: raise e traceback.print_exc() print("Warning! correction not applied") else: print("Warning! No log file found!") self.tsv_to_csv(datafile) # still replace tabs with commas warnings = "no log found" print("done") final_datafile = ( output_path + "\\" + csv_name ) # May or may not be the same loc as temporary. data_base = ".".join(csv_name.split( ".")[0:-1]) # E.g. for a csv name of foo.csv, returns foo final_logfile = ( output_path + "\\" + data_base + "_log" ) # We're going to copy the logfile along with it, # givnig it a sensible name e.g. foo_log.txt # But first we have to make sure there isn't an existing file with that name. i = 1 logfile_base = final_logfile while os.path.isfile(final_logfile + ".txt"): final_logfile = logfile_base + "_" + str(i) i += 1 final_logfile += ".txt" # Ok, now copy! if logfile_for_reading is not None: os.system("copy " + logfile_for_reading + " " + final_logfile) if output_path == self.spec_controller.save_dir: self.data_files_to_ignore.append( final_logfile.split("\\")[-1]) # If we need to move the data to get it to its final destination, do it! if temp_output_path != output_path: tempfilename = datafile os.system("move " + tempfilename + " " + final_datafile) # Read data to send to control computer spec_data = "" with open(final_datafile, "r") as f: spec_data = f.read() log_data = "" with open(final_logfile, "r") as f: log_data = f.read() # If the output directory is the same (or within) the data directory, # there's no need to alert the user to an unexpected file being introduced # since clearly it was expected. if self.spec_controller.save_dir is not None and self.spec_controller.save_dir != "": if self.spec_controller.save_dir in final_datafile: expected = final_datafile.split( self.spec_controller.save_dir)[1].split("\\")[1] self.spec_controller.hopefully_saved_files.append( expected) if corrected == True and logfile_for_reading is not None: utils.send(self.client, "processsuccess", []) elif logfile_for_reading is not None: utils.send(self.client, "processsuccessnocorrection", []) else: utils.send(self.client, "processsuccessnolog", []) # We don't actually know for sure that processing failed because of failing # to optimize or white reference, but ViewSpecPro sometimes silently fails if # you haven't been doing those things. else: utils.send(self.client, "processerrorwropt", [])
def _generate_figures(plot_quantity, file_loc, report, reportname, surface, leg_label, f_list, gauges, locations, elev, gauge_index, production_dirs, time_min, time_max, time_unit, title_on, label_id, generate_fig, verbose): """ Generate figures based on required quantities and gauges for each sww file """ from os import sep, altsep, getcwd, mkdir, access, F_OK, environ if generate_fig is True: from pylab import ion, hold, plot, axis, figure, legend, savefig, \ xlabel, ylabel, title, close, subplot if surface is True: import pylab as p1 import mpl3d.mplot3d as p3 if report == True: texdir = getcwd() + sep + 'report' + sep if access(texdir, F_OK) == 0: mkdir(texdir) if len(label_id) == 1: label_id1 = label_id[0].replace(sep, '') label_id2 = label_id1.replace('_', '') texfile = texdir + reportname + '%s' % label_id2 texfile2 = reportname + '%s' % label_id2 texfilename = texfile + '.tex' fid = open(texfilename, 'w') if verbose: log.critical('Latex output printed to %s' % texfilename) else: texfile = texdir + reportname texfile2 = reportname texfilename = texfile + '.tex' fid = open(texfilename, 'w') if verbose: log.critical('Latex output printed to %s' % texfilename) else: texfile = '' texfile2 = '' p = len(f_list) n = [] n0 = 0 for i in range(len(f_list)): n.append(len(f_list[i].get_time())) if n[i] > n0: n0 = n[i] n0 = int(n0) m = len(locations) model_time = num.zeros((n0, m, p), num.float) stages = num.zeros((n0, m, p), num.float) elevations = num.zeros((n0, m, p), num.float) momenta = num.zeros((n0, m, p), num.float) xmom = num.zeros((n0, m, p), num.float) ymom = num.zeros((n0, m, p), num.float) speed = num.zeros((n0, m, p), num.float) bearings = num.zeros((n0, m, p), num.float) due_east = 90.0 * num.ones((n0, 1), num.float) due_west = 270.0 * num.ones((n0, 1), num.float) depths = num.zeros((n0, m, p), num.float) eastings = num.zeros((n0, m, p), num.float) min_stages = [] max_stages = [] min_momentums = [] max_momentums = [] max_xmomentums = [] max_ymomentums = [] min_xmomentums = [] min_ymomentums = [] max_speeds = [] min_speeds = [] max_depths = [] model_time_plot3d = num.zeros((n0, m), num.float) stages_plot3d = num.zeros((n0, m), num.float) eastings_plot3d = num.zeros((n0, m), num.float) if time_unit is 'mins': scale = 60.0 if time_unit is 'hours': scale = 3600.0 ##### loop over each swwfile ##### for j, f in enumerate(f_list): if verbose: log.critical('swwfile %d of %d' % (j, len(f_list))) starttime = f.starttime comparefile = file_loc[j] + sep + 'gauges_maxmins' + '.csv' fid_compare = open(comparefile, 'w') file0 = file_loc[j] + 'gauges_t0.csv' fid_0 = open(file0, 'w') ##### loop over each gauge ##### for k in gauge_index: if verbose: log.critical('Gauge %d of %d' % (k, len(gauges))) g = gauges[k] min_stage = 10 max_stage = 0 max_momentum = max_xmomentum = max_ymomentum = 0 min_momentum = min_xmomentum = min_ymomentum = 100 max_speed = 0 min_speed = 0 max_depth = 0 gaugeloc = str(locations[k]) thisfile = file_loc[j] + sep + 'gauges_time_series' + '_' \ + gaugeloc + '.csv' if j == 0: fid_out = open(thisfile, 'w') s = 'Time, Stage, Momentum, Speed, Elevation, xmom, ymom, Bearing \n' fid_out.write(s) #### generate quantities ####### for i, t in enumerate(f.get_time()): if time_min <= t <= time_max: w = f(t, point_id=k)[0] z = f(t, point_id=k)[1] uh = f(t, point_id=k)[2] vh = f(t, point_id=k)[3] depth = w - z m = sqrt(uh * uh + vh * vh) if depth < 0.001: vel = 0.0 else: vel = m / (depth + 1.e-6 / depth) bearing = calc_bearing(uh, vh) model_time[i, k, j] = (t + starttime) / scale #t/60.0 stages[i, k, j] = w elevations[i, k, j] = z xmom[i, k, j] = uh ymom[i, k, j] = vh momenta[i, k, j] = m speed[i, k, j] = vel bearings[i, k, j] = bearing depths[i, k, j] = depth thisgauge = gauges[k] eastings[i, k, j] = thisgauge[0] s = '%.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f,\n' \ % (t, w, m, vel, z, uh, vh, bearing) fid_out.write(s) if t == 0: s = '%.2f, %.2f, %.2f\n' % (g[0], g[1], w) fid_0.write(s) if t / 60.0 <= 13920: tindex = i if w > max_stage: max_stage = w if w < min_stage: min_stage = w if m > max_momentum: max_momentum = m if m < min_momentum: min_momentum = m if uh > max_xmomentum: max_xmomentum = uh if vh > max_ymomentum: max_ymomentum = vh if uh < min_xmomentum: min_xmomentum = uh if vh < min_ymomentum: min_ymomentum = vh if vel > max_speed: max_speed = vel if vel < min_speed: min_speed = vel if z > 0 and depth > max_depth: max_depth = depth s = '%.2f, %.2f, %.2f, %.2f, %s\n' \ % (max_stage, min_stage, z, thisgauge[0], leg_label[j]) fid_compare.write(s) max_stages.append(max_stage) min_stages.append(min_stage) max_momentums.append(max_momentum) max_xmomentums.append(max_xmomentum) max_ymomentums.append(max_ymomentum) min_xmomentums.append(min_xmomentum) min_ymomentums.append(min_ymomentum) min_momentums.append(min_momentum) max_depths.append(max_depth) max_speeds.append(max_speed) min_speeds.append(min_speed) #### finished generating quantities for each swwfile ##### model_time_plot3d[:, :] = model_time[:, :, j] stages_plot3d[:, :] = stages[:, :, j] eastings_plot3d[:, ] = eastings[:, :, j] if surface is True: log.critical('Printing surface figure') for i in range(2): fig = p1.figure(10) ax = p3.Axes3D(fig) if len(gauges) > 80: ax.plot_surface(model_time[:, :, j], eastings[:, :, j], stages[:, :, j]) else: ax.plot3D(num.ravel(eastings[:, :, j]), num.ravel(model_time[:, :, j]), num.ravel(stages[:, :, j])) ax.set_xlabel('time') ax.set_ylabel('x') ax.set_zlabel('stage') fig.add_axes(ax) p1.show() surfacefig = 'solution_surface%s' % leg_label[j] p1.savefig(surfacefig) p1.close() #### finished generating quantities for all swwfiles ##### # x profile for given time if surface is True: figure(11) plot(eastings[tindex, :, j], stages[tindex, :, j]) xlabel('x') ylabel('stage') profilefig = 'solution_xprofile' savefig('profilefig') elev_output = [] if generate_fig is True: depth_axis = axis( [starttime / scale, time_max / scale, -0.1, max(max_depths) * 1.1]) stage_axis = axis([ starttime / scale, time_max / scale, min(min_stages), max(max_stages) * 1.1 ]) vel_axis = axis([ starttime / scale, time_max / scale, min(min_speeds), max(max_speeds) * 1.1 ]) mom_axis = axis([ starttime / scale, time_max / scale, min(min_momentums), max(max_momentums) * 1.1 ]) xmom_axis = axis([ starttime / scale, time_max / scale, min(min_xmomentums), max(max_xmomentums) * 1.1 ]) ymom_axis = axis([ starttime / scale, time_max / scale, min(min_ymomentums), max(max_ymomentums) * 1.1 ]) cstr = ['g', 'r', 'b', 'c', 'm', 'y', 'k'] nn = len(plot_quantity) no_cols = 2 if len(label_id) > 1: graphname_report = [] pp = 1 div = 11. cc = 0 for k in gauge_index: g = gauges[k] count1 = 0 if report == True and len(label_id) > 1: s = '\\begin{figure}[ht] \n' \ '\\centering \n' \ '\\begin{tabular}{cc} \n' fid.write(s) if len(label_id) > 1: graphname_report = [] #### generate figures for each gauge #### for j, f in enumerate(f_list): ion() hold(True) count = 0 where1 = 0 where2 = 0 word_quantity = '' if report == True and len(label_id) == 1: s = '\\begin{figure}[hbt] \n' \ '\\centering \n' \ '\\begin{tabular}{cc} \n' fid.write(s) for which_quantity in plot_quantity: count += 1 where1 += 1 figure(count, frameon=False) if which_quantity == 'depth': plot(model_time[0:n[j] - 1, k, j], depths[0:n[j] - 1, k, j], '-', c=cstr[j]) units = 'm' axis(depth_axis) if which_quantity == 'stage': if elevations[0, k, j] <= 0: plot(model_time[0:n[j] - 1, k, j], stages[0:n[j] - 1, k, j], '-', c=cstr[j]) axis(stage_axis) else: plot(model_time[0:n[j] - 1, k, j], depths[0:n[j] - 1, k, j], '-', c=cstr[j]) #axis(depth_axis) units = 'm' if which_quantity == 'momentum': plot(model_time[0:n[j] - 1, k, j], momenta[0:n[j] - 1, k, j], '-', c=cstr[j]) axis(mom_axis) units = 'm^2 / sec' if which_quantity == 'xmomentum': plot(model_time[0:n[j] - 1, k, j], xmom[0:n[j] - 1, k, j], '-', c=cstr[j]) axis(xmom_axis) units = 'm^2 / sec' if which_quantity == 'ymomentum': plot(model_time[0:n[j] - 1, k, j], ymom[0:n[j] - 1, k, j], '-', c=cstr[j]) axis(ymom_axis) units = 'm^2 / sec' if which_quantity == 'speed': plot(model_time[0:n[j] - 1, k, j], speed[0:n[j] - 1, k, j], '-', c=cstr[j]) axis(vel_axis) units = 'm / sec' if which_quantity == 'bearing': plot(model_time[0:n[j] - 1, k, j], bearings[0:n[j] - 1, k, j], '-', model_time[0:n[j] - 1, k, j], due_west[0:n[j] - 1], '-.', model_time[0:n[j] - 1, k, j], due_east[0:n[j] - 1], '-.') units = 'degrees from North' #ax = axis([time_min, time_max, 0.0, 360.0]) legend(('Bearing', 'West', 'East')) if time_unit is 'mins': xlabel('time (mins)') if time_unit is 'hours': xlabel('time (hours)') #if which_quantity == 'stage' \ # and elevations[0:n[j]-1,k,j] > 0: # ylabel('%s (%s)' %('depth', units)) #else: # ylabel('%s (%s)' %(which_quantity, units)) #ylabel('%s (%s)' %('wave height', units)) ylabel('%s (%s)' % (which_quantity, units)) if len(label_id) > 1: legend((leg_label), loc='upper right') #gaugeloc1 = gaugeloc.replace(' ','') #gaugeloc2 = gaugeloc1.replace('_','') gaugeloc2 = str(locations[k]).replace(' ', '') graphname = '%sgauge%s_%s' % (file_loc[j], gaugeloc2, which_quantity) if report == True and len(label_id) > 1: figdir = getcwd() + sep + 'report_figures' + sep if access(figdir, F_OK) == 0: mkdir(figdir) latex_file_loc = figdir.replace(sep, altsep) # storing files in production directory graphname_latex = '%sgauge%s%s' \ % (latex_file_loc, gaugeloc2, which_quantity) # giving location in latex output file graphname_report_input = '%sgauge%s%s' % \ ('..' + altsep + 'report_figures' + altsep, gaugeloc2, which_quantity) graphname_report.append(graphname_report_input) # save figures in production directory for report savefig(graphname_latex) if report == True: figdir = getcwd() + sep + 'report_figures' + sep if access(figdir, F_OK) == 0: mkdir(figdir) latex_file_loc = figdir.replace(sep, altsep) if len(label_id) == 1: # storing files in production directory graphname_latex = '%sgauge%s%s%s' % \ (latex_file_loc, gaugeloc2, which_quantity, label_id2) # giving location in latex output file graphname_report = '%sgauge%s%s%s' % \ ('..' + altsep + 'report_figures' + altsep, gaugeloc2, which_quantity, label_id2) s = '\includegraphics' \ '[width=0.49\linewidth, height=50mm]{%s%s}' % \ (graphname_report, '.png') fid.write(s) if where1 % 2 == 0: s = '\\\\ \n' where1 = 0 else: s = '& \n' fid.write(s) savefig(graphname_latex) if title_on == True: title('%s scenario: %s at %s gauge' % \ (label_id, which_quantity, gaugeloc2)) #title('Gauge %s (MOST elevation %.2f, ' \ # 'ANUGA elevation %.2f)' % \ # (gaugeloc2, elevations[10,k,0], # elevations[10,k,1])) savefig(graphname) # save figures with sww file if report == True and len(label_id) == 1: for i in range(nn - 1): if nn > 2: if plot_quantity[i] == 'stage' \ and elevations[0,k,j] > 0: word_quantity += 'depth' + ', ' else: word_quantity += plot_quantity[i] + ', ' else: if plot_quantity[i] == 'stage' \ and elevations[0,k,j] > 0: word_quantity += 'depth' + ', ' else: word_quantity += plot_quantity[i] if plot_quantity[nn - 1] == 'stage' and elevations[0, k, j] > 0: word_quantity += ' and ' + 'depth' else: word_quantity += ' and ' + plot_quantity[nn - 1] caption = 'Time series for %s at %s location ' \ '(elevation %.2fm)' % \ (word_quantity, locations[k], elev[k]) if elev[k] == 0.0: caption = 'Time series for %s at %s location ' \ '(elevation %.2fm)' % \ (word_quantity, locations[k], elevations[0,k,j]) east = gauges[0] north = gauges[1] elev_output.append( [locations[k], east, north, elevations[0, k, j]]) label = '%sgauge%s' % (label_id2, gaugeloc2) s = '\end{tabular} \n' \ '\\caption{%s} \n' \ '\label{fig:%s} \n' \ '\end{figure} \n \n' % (caption, label) fid.write(s) cc += 1 if cc % 6 == 0: fid.write('\\clearpage \n') savefig(graphname_latex) if report == True and len(label_id) > 1: for i in range(nn - 1): if nn > 2: if plot_quantity[i] == 'stage' and elevations[0, k, j] > 0: word_quantity += 'depth' + ',' else: word_quantity += plot_quantity[i] + ', ' else: if plot_quantity[i] == 'stage' and elevations[0, k, j] > 0: word_quantity += 'depth' else: word_quantity += plot_quantity[i] where1 = 0 count1 += 1 index = j * len(plot_quantity) for which_quantity in plot_quantity: where1 += 1 s = '\includegraphics' \ '[width=0.49\linewidth, height=50mm]{%s%s}' % \ (graphname_report[index], '.png') index += 1 fid.write(s) if where1 % 2 == 0: s = '\\\\ \n' where1 = 0 else: s = '& \n' fid.write(s) word_quantity += ' and ' + plot_quantity[nn - 1] label = 'gauge%s' % (gaugeloc2) caption = 'Time series for %s at %s location ' \ '(elevation %.2fm)' % \ (word_quantity, locations[k], elev[k]) if elev[k] == 0.0: caption = 'Time series for %s at %s location ' \ '(elevation %.2fm)' % \ (word_quantity, locations[k], elevations[0,k,j]) thisgauge = gauges[k] east = thisgauge[0] north = thisgauge[1] elev_output.append( [locations[k], east, north, elevations[0, k, j]]) s = '\end{tabular} \n' \ '\\caption{%s} \n' \ '\label{fig:%s} \n' \ '\end{figure} \n \n' % (caption, label) fid.write(s) if float((k + 1) / div - pp) == 0.: fid.write('\\clearpage \n') pp += 1 #### finished generating figures ### close('all') return texfile2, elev_output
import importlib from .common import epsilon from .common import floatx from .common import set_epsilon from .common import set_floatx from .common import cast_to_floatx from .common import image_data_format from .common import set_image_data_format # Set Keras base dir path given KERAS_HOME env variable, if applicable. # Otherwise either ~/.keras or /tmp. if 'KERAS_HOME' in os.environ: _keras_dir = os.environ.get('KERAS_HOME') else: _keras_base_dir = os.path.expanduser('~') if not os.access(_keras_base_dir, os.W_OK): _keras_base_dir = '/tmp' _keras_dir = os.path.join(_keras_base_dir, '.keras') # Default backend: TensorFlow. _BACKEND = 'tensorflow' # Attempt to read Keras config file. _config_path = os.path.expanduser(os.path.join(_keras_dir, 'keras.json')) if os.path.exists(_config_path): try: with open(_config_path) as f: _config = json.load(f) except ValueError: _config = {} _floatx = _config.get('floatx', floatx())
def collect(self): """ Collect network interface stats. """ # Initialize results results = {} if os.access(self.PROC, os.R_OK): # Open File file = open(self.PROC) # Build Regular Expression greed = '' if self.config['greedy'].lower() == 'true': greed = '\S*' exp = ( '^(?:\s*)((?:%s)%s):(?:\s*)' + '(?P<rx_bytes>\d+)(?:\s*)' + '(?P<rx_packets>\w+)(?:\s*)' + '(?P<rx_errors>\d+)(?:\s*)' + '(?P<rx_drop>\d+)(?:\s*)' + '(?P<rx_fifo>\d+)(?:\s*)' + '(?P<rx_frame>\d+)(?:\s*)' + '(?P<rx_compressed>\d+)(?:\s*)' + '(?P<rx_multicast>\d+)(?:\s*)' + '(?P<tx_bytes>\d+)(?:\s*)' + '(?P<tx_packets>\w+)(?:\s*)' + '(?P<tx_errors>\d+)(?:\s*)' + '(?P<tx_drop>\d+)(?:\s*)' + '(?P<tx_fifo>\d+)(?:\s*)' + '(?P<tx_frame>\d+)(?:\s*)' + '(?P<tx_compressed>\d+)(?:\s*)' + '(?P<tx_multicast>\d+)(?:.*)$') % ( ('|'.join(self.config['interfaces'])), greed) reg = re.compile(exp) # Match Interfaces for line in file: match = reg.match(line) if match: device = match.group(1) results[device] = match.groupdict() # Close File file.close() else: if not psutil: self.log.error('Unable to import psutil') self.log.error('No network metrics retrieved') return None network_stats = psutil.network_io_counters(True) for device in network_stats.keys(): network_stat = network_stats[device] results[device] = {} results[device]['rx_bytes'] = network_stat.bytes_recv results[device]['tx_bytes'] = network_stat.bytes_sent results[device]['rx_packets'] = network_stat.packets_recv results[device]['tx_packets'] = network_stat.packets_sent for device in results: stats = results[device] for s, v in stats.items(): # Get Metric Name metric_name = '.'.join([device, s]) # Get Metric Value metric_value = self.derivative(metric_name, long(v), diamond.collector.MAX_COUNTER) # Convert rx_bytes and tx_bytes if s == 'rx_bytes' or s == 'tx_bytes': convertor = diamond.convertor.binary(value=metric_value, unit='byte') for u in self.config['byte_unit']: # Public Converted Metric self.publish(metric_name.replace('bytes', u), convertor.get(unit=u), 2) else: # Publish Metric Derivative self.publish(metric_name, metric_value) return None
#!/usr/bin/python import sys, os, subprocess cwd = os.path.dirname(__file__) pod2man = None for path in os.environ.get('PATH', '/bin:/usr/bin').split(':'): sfile = '%s/pod2man' % path if os.access(sfile, os.X_OK): pod2man = sfile break if not pod2man: sys.stderr.write('Perl is not installed\n') sys.exit(1) sys.stdout.write('Generating man pages...\n') for root, sdir, lfiles in os.walk('%s/../src/tools' % cwd): for sfile in lfiles: if sfile.endswith('.pod'): ifile = '%s/%s' % (root, sfile) ofile = ifile.replace('.pod', '.1') subprocess.check_call( (pod2man, '--release=Katie 4.12.0', '--center=Katie Manual', '--section=1', '--utf8', ifile, ofile))
def _parse_config(self, args=None): if args is None: args = [] opt, args = self.parser.parse_args(args) def die(cond, msg): if cond: self.parser.error(msg) # Ensures no illegitimate argument is silently discarded (avoids insidious "hyphen to dash" problem) die(args, "unrecognized parameters: '%s'" % " ".join(args)) die(bool(opt.syslog) and bool(opt.logfile), "the syslog and logfile options are exclusive") die(opt.translate_in and (not opt.language or not opt.db_name), "the i18n-import option cannot be used without the language (-l) and the database (-d) options") die(opt.overwrite_existing_translations and not (opt.translate_in or opt.update), "the i18n-overwrite option cannot be used without the i18n-import option or without the update option") die(opt.translate_out and (not opt.db_name), "the i18n-export option cannot be used without the database (-d) option") # Check if the config file exists (-c used, but not -s) die(not opt.save and opt.config and not os.access(opt.config, os.R_OK), "The config file '%s' selected with -c/--config doesn't exist or is not readable, "\ "use -s/--save if you want to generate it"% opt.config) # place/search the config file on Win32 near the server installation # (../etc from the server) # if the server is run by an unprivileged user, he has to specify location of a config file where he has the rights to write, # else he won't be able to save the configurations, or even to start the server... # TODO use appdirs if os.name == 'nt': rcfilepath = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'odoo.conf') else: rcfilepath = os.path.expanduser('~/.odoorc') old_rcfilepath = os.path.expanduser('~/.openerp_serverrc') die(os.path.isfile(rcfilepath) and os.path.isfile(old_rcfilepath), "Found '.odoorc' and '.openerp_serverrc' in your path. Please keep only one of "\ "them, preferrably '.odoorc'.") if not os.path.isfile(rcfilepath) and os.path.isfile(old_rcfilepath): rcfilepath = old_rcfilepath self.rcfile = os.path.abspath( self.config_file or opt.config or os.environ.get('OPENERP_SERVER') or rcfilepath) self.load() # Verify that we want to log or not, if not the output will go to stdout if self.options['logfile'] in ('None', 'False'): self.options['logfile'] = False # the same for the pidfile if self.options['pidfile'] in ('None', 'False'): self.options['pidfile'] = False # if defined dont take the configfile value even if the defined value is None keys = ['xmlrpc_interface', 'xmlrpc_port', 'longpolling_port', 'db_name', 'db_user', 'db_password', 'db_host', 'db_port', 'db_template', 'logfile', 'pidfile', 'smtp_port', 'email_from', 'smtp_server', 'smtp_user', 'smtp_password', 'db_maxconn', 'import_partial', 'addons_path', 'xmlrpc', 'syslog', 'without_demo', 'dbfilter', 'log_level', 'log_db', 'log_db_level', 'geoip_database', 'dev_mode', 'shell_interface' ] for arg in keys: # Copy the command-line argument (except the special case for log_handler, due to # action=append requiring a real default, so we cannot use the my_default workaround) if getattr(opt, arg): self.options[arg] = getattr(opt, arg) # ... or keep, but cast, the config file value. elif isinstance(self.options[arg], basestring) and self.casts[arg].type in optparse.Option.TYPE_CHECKER: self.options[arg] = optparse.Option.TYPE_CHECKER[self.casts[arg].type](self.casts[arg], arg, self.options[arg]) if isinstance(self.options['log_handler'], basestring): self.options['log_handler'] = self.options['log_handler'].split(',') self.options['log_handler'].extend(opt.log_handler) # if defined but None take the configfile value keys = [ 'language', 'translate_out', 'translate_in', 'overwrite_existing_translations', 'dev_mode', 'shell_interface', 'smtp_ssl', 'load_language', 'stop_after_init', 'logrotate', 'without_demo', 'xmlrpc', 'syslog', 'list_db', 'proxy_mode', 'test_file', 'test_enable', 'test_commit', 'test_report_directory', 'osv_memory_count_limit', 'osv_memory_age_limit', 'max_cron_threads', 'unaccent', 'data_dir', ] posix_keys = [ 'workers', 'limit_memory_hard', 'limit_memory_soft', 'limit_time_cpu', 'limit_time_real', 'limit_request', 'limit_time_real_cron' ] if os.name == 'posix': keys += posix_keys else: self.options.update(dict.fromkeys(posix_keys, None)) # Copy the command-line arguments... for arg in keys: if getattr(opt, arg) is not None: self.options[arg] = getattr(opt, arg) # ... or keep, but cast, the config file value. elif isinstance(self.options[arg], basestring) and self.casts[arg].type in optparse.Option.TYPE_CHECKER: self.options[arg] = optparse.Option.TYPE_CHECKER[self.casts[arg].type](self.casts[arg], arg, self.options[arg]) self.options['root_path'] = os.path.abspath(os.path.expanduser(os.path.expandvars(os.path.join(os.path.dirname(__file__), '..')))) if not self.options['addons_path'] or self.options['addons_path']=='None': default_addons = [] base_addons = os.path.join(self.options['root_path'], 'addons') if os.path.exists(base_addons): default_addons.append(base_addons) main_addons = os.path.abspath(os.path.join(self.options['root_path'], '../addons')) if os.path.exists(main_addons): default_addons.append(main_addons) self.options['addons_path'] = ','.join(default_addons) else: self.options['addons_path'] = ",".join( os.path.abspath(os.path.expanduser(os.path.expandvars(x.strip()))) for x in self.options['addons_path'].split(',')) self.options['init'] = opt.init and dict.fromkeys(opt.init.split(','), 1) or {} self.options['demo'] = (dict(self.options['init']) if not self.options['without_demo'] else {}) self.options['update'] = opt.update and dict.fromkeys(opt.update.split(','), 1) or {} self.options['translate_modules'] = opt.translate_modules and map(lambda m: m.strip(), opt.translate_modules.split(',')) or ['all'] self.options['translate_modules'].sort() dev_split = opt.dev_mode and map(str.strip, opt.dev_mode.split(',')) or [] self.options['dev_mode'] = 'all' in dev_split and dev_split + ['pdb', 'reload', 'qweb', 'werkzeug', 'xml'] or dev_split if opt.pg_path: self.options['pg_path'] = opt.pg_path if self.options.get('language', False): if len(self.options['language']) > 5: raise Exception('ERROR: The Lang name must take max 5 chars, Eg: -lfr_BE') if opt.save: self.save() odoo.conf.addons_paths = self.options['addons_path'].split(',') if opt.server_wide_modules: odoo.conf.server_wide_modules = map(lambda m: m.strip(), opt.server_wide_modules.split(',')) else: odoo.conf.server_wide_modules = ['web','web_kanban']
def url_save(url, filepath, bar, refer=None, is_part=False, faker=False, headers=None, timeout=None): tmp_headers = headers.copy() if headers is not None else {} # When a referer specified with param refer, the key must be 'Referer' for the hack here if refer is not None: tmp_headers['Referer'] = refer file_size = url_size(url, faker=faker, headers=tmp_headers) if os.path.exists(filepath): if file_size == os.path.getsize(filepath): if not is_part: if bar: bar.done() print('Skipping %s: file already exists' % tr(os.path.basename(filepath))) else: if bar: bar.update_received(file_size) return else: if not is_part: if bar: bar.done() print('Overwriting %s' % tr(os.path.basename(filepath)), '...') elif not os.path.exists(os.path.dirname(filepath)): os.mkdir(os.path.dirname(filepath)) temp_filepath = filepath + '.download' if file_size != float( 'inf') else filepath received = 0 open_mode = 'wb' if received < file_size: if faker: tmp_headers = fake_headers if received: tmp_headers['Range'] = 'bytes=' + str(received) + '-' if refer: tmp_headers['Referer'] = refer if timeout: response = urlopen_with_retry(request.Request(url, headers=tmp_headers), timeout=timeout) else: response = urlopen_with_retry( request.Request(url, headers=tmp_headers)) try: range_start = int(response.headers['content-range'][6:].split('/') [0].split('-')[0]) end_length = int( response.headers['content-range'][6:].split('/')[1]) range_length = end_length - range_start except: content_length = response.headers['content-length'] range_length = int( content_length) if content_length != None else float('inf') if file_size != received + range_length: received = 0 if bar: bar.received = 0 open_mode = 'wb' with open(temp_filepath, open_mode) as output: while True: buffer = None try: buffer = response.read(1024 * 256) except socket.timeout: pass if not buffer: if received == file_size: # Download finished break # Unexpected termination. Retry request tmp_headers['Range'] = 'bytes=' + str(received) + '-' response = urlopen_with_retry( request.Request(url, headers=tmp_headers)) continue output.write(buffer) received += len(buffer) if bar: bar.update_received(len(buffer)) assert received == os.path.getsize(temp_filepath), '%s == %s == %s' % ( received, os.path.getsize(temp_filepath), temp_filepath) if os.access(filepath, os.W_OK): os.remove( filepath ) # on Windows rename could fail if destination filepath exists os.rename(temp_filepath, filepath)
def async_enable_logging(hass: core.HomeAssistant, verbose: bool=False, log_rotate_days=None) -> None: """Set up the logging. This method must be run in the event loop. """ logging.basicConfig(level=logging.INFO) fmt = ("%(asctime)s %(levelname)s (%(threadName)s) " "[%(name)s] %(message)s") colorfmt = "%(log_color)s{}%(reset)s".format(fmt) datefmt = '%Y-%m-%d %H:%M:%S' # Suppress overly verbose logs from libraries that aren't helpful logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('urllib3').setLevel(logging.WARNING) logging.getLogger('aiohttp.access').setLevel(logging.WARNING) try: from colorlog import ColoredFormatter logging.getLogger().handlers[0].setFormatter(ColoredFormatter( colorfmt, datefmt=datefmt, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', } )) except ImportError: pass # Log errors to a file if we have write access to file or config dir err_log_path = hass.config.path(ERROR_LOG_FILENAME) err_path_exists = os.path.isfile(err_log_path) # Check if we can write to the error log if it exists or that # we can create files in the containing directory if not. if (err_path_exists and os.access(err_log_path, os.W_OK)) or \ (not err_path_exists and os.access(hass.config.config_dir, os.W_OK)): if log_rotate_days: err_handler = logging.handlers.TimedRotatingFileHandler( err_log_path, when='midnight', backupCount=log_rotate_days) else: err_handler = logging.FileHandler( err_log_path, mode='w', delay=True) err_handler.setLevel(logging.INFO if verbose else logging.WARNING) err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt)) async_handler = AsyncHandler(hass.loop, err_handler) @asyncio.coroutine def async_stop_async_handler(event): """Cleanup async handler.""" logging.getLogger('').removeHandler(async_handler) yield from async_handler.async_close(blocking=True) hass.bus.async_listen_once( EVENT_HOMEASSISTANT_CLOSE, async_stop_async_handler) logger = logging.getLogger('') logger.addHandler(async_handler) logger.setLevel(logging.INFO) else: _LOGGER.error( "Unable to setup error log %s (access denied)", err_log_path)
def assert_file(file): return os.access(file, os.F_OK)
else: self.remote_path = '/tmp/{}'.format(self.script_name) def run(self): fabric.api.put(self.script_path, self.remote_path, mirror_local_mode=True, use_sudo=self.sudo) if self.sudo: fabric.api.sudo(self.remote_path) else: fabric.api.run(self.remote_path) __all__ = [] tasks = {} for filename in os.listdir(SCRIPT_DIR): if filename in [ "__init__.py", ]: continue filename = os.path.join(SCRIPT_DIR, filename) if os.path.isfile(filename) and os.access(filename, os.X_OK): task_is_sudo = bool(SUDO_PREFIX.search(filename)) task = ScriptTask(filename, sudo=task_is_sudo) tasks[task.name] = task for task_name, task in tasks.iteritems(): globals()[task_name] = task __all__.append(task_name)