def get_sys_path(rcpath, app_name, section_name=None): """Return a folder path if it exists. First will check if it is an existing system path, if it is, will return it expanded and absoluted. If this fails will look for the rcpath variable in the app_name rcfiles or exclusively within the given section_name, if given. Parameters ---------- rcpath: str Existing folder path or variable name in app_name rcfile with an existing one. section_name: str Name of a section in the app_name rcfile to look exclusively there for variable names. app_name: str Name of the application to look for rcfile configuration files. Returns ------- sys_path: str A expanded absolute file or folder path if the path exists. Raises ------ IOError if the proposed sys_path does not exist. """ # first check if it is an existing path if op.exists(rcpath): return op.realpath(op.expanduser(rcpath)) # look for the rcfile try: settings = rcfile(app_name, section_name) except: raise # look for the variable within the rcfile configutarions try: sys_path = op.expanduser(settings[rcpath]) except KeyError: raise IOError('Could not find an existing variable with name {0} in' ' section {1} of {2}rc config setup. Maybe it is a ' ' folder that could not be found.'.format(rcpath, section_name, app_name)) # found the variable, now check if it is an existing path else: if not op.exists(sys_path): raise IOError('Could not find the path {3} indicated by the ' 'variable {0} in section {1} of {2}rc config ' 'setup.'.format(rcpath, section_name, app_name, sys_path)) # expand the path and return return op.realpath(op.expanduser(sys_path))
def get_field(field, directory, partitions, mesh, surface = "average"): """ Get the values of a field from Elmer's output on either the top or bottom surface or averaged throughout a vertical column. Paramters: ========= field: name of the field to get, e.g. "beta", "pressure", "velod 1" directory: path to the files output by Elmer partitions: the number of partitions of the Elmer mesh mesh: a matplotlib.tri object encapsulating the original Triangle mesh used to generated the Elmer mesh surface: either "top", "bottom" or "average"; the layer we want to get the field from Outputs: ======= q: the desired field, reconciled to the node ordering of `mesh` """ filename = "Test_Robin_Beta.result" data = get_variable(field, expanduser(directory), expanduser(filename), partitions) x, y, q = get_layer(data, surface) permutation = reconcile_elmer_with_mesh(mesh.x, mesh.y, x, y) return q[permutation]
def load_config(usr_cfg, def_cfg=None): cfg = ConfigObj() cfg.merge(ConfigObj(def_cfg, interpolation=False)) cfg.merge(ConfigObj(expanduser(usr_cfg), interpolation=False, encoding='utf-8')) cfg.filename = expanduser(usr_cfg) return cfg
def __init__(self, base): self.base = base self.cfg = [expanduser("~/.config/nitrogen/bg-saved.cfg"), expanduser("~/.config/nitrogen/nitrogen.cfg")] # This is the path to where crunchbox stores all this plugin's cfg self.class_name = self.__class__.__name__ self.plugin_cfg_dir = expanduser("~/.config/crunchbox/configs/" + self.class_name)
def config_location(): if platform.system() == 'Windows': return os.getenv('USERPROFILE') + '\\AppData\\Local\\dbcli\\pgcli\\' elif 'XDG_CONFIG_HOME' in os.environ: return '%s/pgcli/' % expanduser(os.environ['XDG_CONFIG_HOME']) else: return expanduser('~/.config/pgcli/')
def __init__(self): # http://standards.freedesktop.org/basedir-spec/latest/ar01s03.html self.app_dir = join(getenv('XDG_DATA_HOME', expanduser('~/.local/share')), appname) if not isdir(self.app_dir): makedirs(self.app_dir) self.plugin_dir = join(self.app_dir, 'plugins') if not isdir(self.plugin_dir): mkdir(self.plugin_dir) self.home = expanduser('~') self.respath = dirname(__file__) self.filename = join(getenv('XDG_CONFIG_HOME', expanduser('~/.config')), appname, '%s.ini' % appname) if not isdir(dirname(self.filename)): makedirs(dirname(self.filename)) self.config = RawConfigParser() try: self.config.readfp(codecs.open(self.filename, 'r', 'utf-8')) except: self.config.add_section('config') if not self.get('outdir') or not isdir(self.get('outdir')): self.set('outdir', expanduser('~'))
def read_process_write(self, input, input_filename, output_filename): """Reads input, executes any processing and writes output.""" # Read input if needed. if input is None and not self._no_input: if input_filename is None: raise RuntimeError('No file to read from.') input_filename = self.input_root + input_filename input_filename = path.expanduser(input_filename) logger.info("%s reading data from file %s." % (self.__class__.__name__, input_filename)) input = self.read_input(input_filename) # Analyse. if self._no_input: if not input is None: # This should never happen. Just here to catch bugs. raise RuntimeError("Somehow `input` was set.") output = self.process() else: output = self.process(input) # Write output if needed. if self.output_root != 'None' and not output is None: if output_filename is None: raise RuntimeError('No file to write to.') output_filename = self.output_root + output_filename output_filename = path.expanduser(output_filename) logger.info("%s writing data to file %s." % (self.__class__.__name__, output_filename)) output_dirname = os.path.dirname(output_filename) if not os.path.isdir(output_dirname): os.makedirs(output_dirname) self.write_output(output_filename, output) return output
def load_config(config_path=DEFAULT_CONFIG): logger = logging.getLogger(__name__) try: config = yaml.load(open(expanduser(config_path))) except IOError as e: if e.errno == errno.ENOENT and config_path == DEFAULT_CONFIG: if isfile(expanduser('~/.ntfy.json')): logger.error('~/.ntfy.json no longer supported, use {}'.format( DEFAULT_CONFIG)) logger.info('{} not found'.format(config_path)) config = {} else: logger.error('Failed to open {}'.format(config_path), exc_info=True) exit(1) except ValueError as e: logger.error('Failed to load {}'.format(config_path), exc_info=True) exit(1) if 'backend' in config: logger.warning( "The 'backend' config option is deprecated, use 'backends'") if 'backends' in config: logger.warning("Both 'backend' and 'backends' in config, " "ignoring 'backend'.") else: config['backends'] = [config['backend']] return config
def link(target, lnk, force=False): """ Creates symbolic link 'lnk' pointing to 'target'. """ if system() not in ('Linux', 'Windows', 'MSYS_NT-6.1'): print("{} operating system is not supported.".format(system())) return isdir = False lnk = path.normpath(path.expandvars(path.expanduser(lnk))) if path.isdir(target): isdir = True target = path.normpath(path.expandvars(path.expanduser(target))) if isdir: print("\n{} -> {} : DIR".format(lnk, target)) else: print("\n{} -> {} : FILE".format(lnk, target)) if path.isdir(lnk) or path.isfile(lnk): if not force: print("'{}': link exists".format(lnk)) return else: remove(lnk) if system() in ('Linux', 'MSYS_NT-6.1'): Popen(['ln', '-s', target, lnk]).wait() elif system() == 'Windows': if isdir: CreateSymbolicLink(lnk, target, 1) else: CreateSymbolicLink(lnk, target, 0)
def find_config_files(metadata_or_path, additional_files=None, ignore_system_config=False, exclusive_config_files=None): """Find files to load variables from. Note that order here determines clobbering. Later files clobber earlier ones. order is user-wide < cwd < recipe dir < additional files""" files = [ os.path.abspath(os.path.expanduser(config_file)) for config_file in (exclusive_config_files or []) ] if not ignore_system_config and not exclusive_config_files: if cc_conda_build.get('config_file'): system_path = abspath(expanduser(expandvars(cc_conda_build['config_file']))) else: system_path = os.path.join(expanduser('~'), "conda_build_config.yaml") if os.path.isfile(system_path): files.append(system_path) cwd = os.path.join(os.getcwd(), 'conda_build_config.yaml') if os.path.isfile(cwd): files.append(cwd) if hasattr(metadata_or_path, 'path'): recipe_config = os.path.join(metadata_or_path.path, "conda_build_config.yaml") else: recipe_config = os.path.join(metadata_or_path, "conda_build_config.yaml") if os.path.isfile(recipe_config): files.append(recipe_config) if additional_files: files.extend([os.path.expanduser(additional_file) for additional_file in additional_files]) return files
def connect_key(hostname, username, filename): import paramiko import os.path my_hostname = hostname my_username = username my_keyfile = filename if "~" in my_keyfile: from os.path import expanduser import re home = expanduser("~") my_keyfile = re.sub('~', home, my_keyfile.rstrip()) if "$HOME" in my_keyfile: from os.path import expanduser import re home = expanduser("$HOME") my_keyfile = re.sub('$HOME', home, my_keyfile.rstrip()) if not os.path.isfile(my_keyfile): raise ValueError('Key file not found') sys.exit(0) sshcon = paramiko.SSHClient() sshcon.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # no known_hosts error sshcon.connect(my_hostname,username=my_username,key_filename=my_keyfile) return sshcon
def main(): from sys import argv if len(argv) < 4: print "Insufficient arguments!" print "Proper Usage: %s [db_path] [experiment_path] [offset_file]" return db_path = path.expanduser(argv[1]) db_man = io.ImageDbManager(db_path, readonly=False) experiment_path = path.expanduser(argv[2]) meta_man = io.MetadataManager(experiment_path) offset_path = path.expanduser(argv[3]) offsets = json.load(open(offset_path)) for i, offset in enumerate(offsets): print "Updating image %d/%d" % (i, len(offsets)) key = offset['vsi_path'][2:] try: image = db_man.get_image(key) image.region_map_offset = offset['pad_size'] metadata = meta_man.get_entry_by_attribute('vsiPath', key) region_map = io.load_mhd(path.join(experiment_path, metadata['registeredAtlasLabelsPath']))[0] hemisphere_map = io.load_mhd(path.join(experiment_path, metadata['registeredHemisphereLabelsPath']))[0] image.region_map = numpy.rot90(region_map, k=2) image.hemisphere_map = numpy.rot90(hemisphere_map, k=2) db_man.add_image(image) except: print "Failed to update image with key: %s" % key
def pad_update(self): modified = bool(int(vim.eval("b:pad_modified"))) if modified: old_path = expanduser(vim.current.buffer.name) new_path = expanduser(join(self.save_dir, pad_timestamp())) vim.command("bw") move(old_path, new_path)
def test_default_directories3(alfred3): """Default directories (Alfred 3)""" from os.path import expanduser _test_default_directories( expanduser('~/Library/Application Support/Alfred 3/Workflow Data/'), expanduser('~/Library/Caches/com.runningwithcrayons.Alfred-3/' 'Workflow Data/'))
def shell(self): if hasattr(self, 'services'): self.services.start_interactive_mode() import sys import time time.sleep(0.5) if path.exists(path.join( path.expanduser("~"), ".ipython/profile_default/security/", self.ipython_kernel_filename) ): call([ sys.executable, "-m", "IPython", "console", "--existing", "--no-confirm-exit", path.join( path.expanduser("~"), ".ipython/profile_default/security/", self.ipython_kernel_filename ) ]) else: call([ sys.executable, "-m", "IPython", "console", "--existing", self.ipython_kernel_filename ]) self.services.stop_interactive_mode()
def main(argv): manifestPath = op.expanduser(argv[1]) moduleDepPath = op.expanduser(argv[2]) otbDir = op.expanduser(argv[3]) appManifest = argv[4] csvAppDepends = argv[5] #app_dir = op.join(otbDir,"Applications") # Standard Manifest parsing, extract simple and full dependencies [groups,moduleList,sourceList] = manifestParser.parseManifest(manifestPath) depList = manifestParser.parseDependList(moduleDepPath) fullDepList = manifestParser.buildFullDep(depList) [appGroups,appModuleList,appSourceList] = manifestParser.parseManifest(appManifest) # add application sources to sourceList for item in appSourceList: sourceList[item] = appSourceList[item] appDependsList = manifestParser.buildSimpleDep(otbDir,appModuleList,sourceList) #manifestParser.printDepList(appDependsList) manifestParser.outputCSVEdgeList(appDependsList,csvAppDepends)
def populate_store(self, store): """ @description: load the treeview from the google2ubuntu.xml file or from the default.xml file @param: store the listStore that will be modify """ config = expanduser('~') +'/.config/google2ubuntu/google2ubuntu.xml' path = os.path.dirname(os.path.abspath(__file__)).strip('librairy') lang = locale.getlocale()[0] if os.path.isdir(path+'config/'+lang) == False: lang='en_EN' default = path +'config/'+lang+'/default.xml' try: if os.path.isfile(config): tree = ET.parse(config) else: if os.path.exists(expanduser('~') +'/.config/google2ubuntu') == False: os.makedirs(expanduser('~') +'/.config/google2ubuntu') os.system('cp -r /usr/share/google2ubuntu/modules '+expanduser('~') +'/.config/google2ubuntu') tree = ET.parse(default) root = tree.getroot() for entry in root.findall('entry'): Type=entry.get('name') Key=entry.find('key').text Command=entry.find('command').text store.append([Key, Command, Type]) except Exception: print 'Error while reading config file'
def loggerInit(): debuglog = logging.getLogger('GGPODebug') debuglog.setLevel(logging.INFO) fh = logging.handlers.RotatingFileHandler( os.path.join(expanduser("~"), 'fightcade-debug.log'), mode='a', maxBytes=500000, backupCount=10) if Settings.value(Settings.DEBUG_LOG): fh.setLevel(logging.INFO) else: fh.setLevel(logging.ERROR) ch = logging.StreamHandler() ch.setLevel(logging.ERROR) debuglog.addHandler(fh) debuglog.addHandler(ch) def handle_exception(exc_type, exc_value, exc_traceback): if issubclass(exc_type, KeyboardInterrupt): sys.__excepthook__(exc_type, exc_value, exc_traceback) return debuglog.error("<Uncaught exception>", exc_info=(exc_type, exc_value, exc_traceback)) sys.excepthook = handle_exception if __name__ == "__main__": raise RuntimeError("Test unhandled") userlog = logging.getLogger('GGPOUser') userlog.setLevel(logging.INFO) fh = logging.handlers.RotatingFileHandler( os.path.join(expanduser("~"), 'fightcade.log'), mode='a', maxBytes=500000, backupCount=10) fh.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(message)s', "%Y-%m-%d %H:%M") fh.setFormatter(formatter) userlog.addHandler(fh)
def get_drives(): drives = [] if platform == 'win': bitmask = windll.kernel32.GetLogicalDrives() GetVolumeInformationW = windll.kernel32.GetVolumeInformationW for letter in string.ascii_uppercase: if bitmask & 1: name = create_unicode_buffer(64) # get name of the drive drive = letter + u':' res = GetVolumeInformationW(drive + sep, name, 64, None, None, None, None, 0) drives.append((drive, name.value)) bitmask >>= 1 elif platform == 'linux': drives.append((sep, sep)) drives.append((expanduser(u'~'), '~/')) places = (sep + u'mnt', sep + u'media') for place in places: if isdir(place): for directory in walk(place).next()[1]: drives.append((place + sep + directory, directory)) elif platform == 'macosx' or platform == 'ios': drives.append((expanduser(u'~'), '~/')) vol = sep + u'Volume' if isdir(vol): for drive in walk(vol).next()[1]: drives.append((vol + sep + drive, drive)) return drives
def find_executable(executable): # dir_paths is referenced as a module-level variable # in other code global dir_paths if sys.platform == 'win32': dir_paths = [join(config.build_prefix, 'Scripts'), join(config.build_prefix, 'Library\\bin'), join(cc.root_dir, 'Scripts'), join(cc.root_dir, 'Library\\bin'), 'C:\\cygwin\\bin'] else: dir_paths = [join(config.build_prefix, 'bin'), join(cc.root_dir, 'bin'),] dir_paths.extend(os.environ['PATH'].split(os.pathsep)) for dir_path in dir_paths: if sys.platform == 'win32': for ext in '.exe', '.bat', '': path = join(dir_path, executable + ext) if isfile(path): return path else: path = join(dir_path, executable) if isfile(expanduser(path)): return expanduser(path) return None
def convert(source, destination, resolutions, dpi): # expand user path incase that '~' is used in source / destination source = path.expanduser(source) destination = path.expanduser(destination) # get files in current directory files = [path.join(source,file) for file in listdir(source) if path.isfile(path.join(source, file))] # iterate through each resolution depend on mode supplied for res in resolutions: # iterate through all files for file in files: # load image im = Image.open(file) # compute target image size in pixels based on RES_MULTIPLIER_MAP target_size = (int(dpi * RES_MULTIPLIER_MAP[res]), int(dpi * RES_MULTIPLIER_MAP[res])) # resize image ! need to keep returned value im = im.resize(target_size, Image.ANTIALIAS) # generate output paths to comply with android drawable directory structure # this allows copy-paste output_dir_path = path.join(destination, 'drawable-{}'.format(res)) output_file_path = path.join(output_dir_path, path.basename(file)) click.echo(output_dir_path) click.echo(output_file_path) # create directory if not path.exists(output_dir_path): makedirs(output_dir_path) # save output file as png with 100% quality im.save(output_file_path, 'png', quality=100)
def fill_tree(self, fav_list): if platform == 'win': user_path = expanduser(u'~') if not isdir(user_path + sep + 'Desktop'): user_path = dirname(user_path) + sep else: user_path += sep else: user_path = expanduser(u'~') + sep self._favs = self.add_node(TreeLabel(text='Favorites', is_open=True, no_selection=True)) self.reload_favs(fav_list) if not is_mobile_platform(): libs = self.add_node(TreeLabel(text='Libraries', is_open=True, no_selection=True)) places = ('Documents', 'Music', 'Pictures', 'Videos') for place in places: if isdir(user_path + place): self.add_node(TreeLabel(text=place, path=user_path + place), libs) self._computer_node = self.add_node(TreeLabel(text='Computer',\ is_open=True, no_selection=True)) self._computer_node.bind(on_touch_down=self._drives_touch) self.reload_drives()
def __init__(self, pathToMain): #Get path on mac if platform.system() == 'Darwin': # logging.debug('self.pathToMain'); # logging.debug(self.pathToMain); # logging.debug('PWD='); # logging.debug(os.environ.get('PWD')); #logging.debug('PYTHONPATH='); #logging.debug(os.environ.get('PYTHONPATH')); # logging.debug('ENVIRON='); # logging.debug(os.environ); if os.environ.get('PYTHONPATH') != None: self.pathToMain = os.environ.get('PYTHONPATH') else: self.pathToMain = pathToMain elif platform.system() == 'Windows' or platform.system() == 'Linux': self.pathToMain = pathToMain if platform.system() == 'Linux': self.pathToSketchbook = expanduser("~").decode('latin1')+'/Arduino' elif platform.system() == 'Windows' or platform.system() == 'Darwin': self.pathToSketchbook = expanduser("~").decode('latin1')+'/Documents/Arduino' self.pathToSketchbook = self.pathToSketchbook.decode('latin1') self.pathToArduinoDir = pathToMain+'/res/arduino/' self.uploader = Uploader(pathToMain) self.compiler = Compiler(pathToMain) self.boardSettings = defaultdict(BoardConfig) self.parseBoardSettings(self.pathToMain+"/res/boards.txt") self.board = 'uno' self.port = None
def _configure(self): """ Configure the ssh parameters from the config file. """ configfile = expanduser("~/.ssh/config") if not isfile(configfile): raise GerritError("ssh config file '%s' does not exist" % configfile) config = SSHConfig() config.parse(open(configfile)) data = config.lookup(self.hostname) if not data: raise GerritError("No ssh config for host %s" % self.hostname) if 'hostname' not in data or 'port' not in data or 'user' not in data: raise GerritError("Missing configuration data in %s" % configfile) self.hostname = data['hostname'] self.username = data['user'] if 'identityfile' in data: key_filename = abspath(expanduser(data['identityfile'][0])) if not isfile(key_filename): raise GerritError("Identity file '%s' does not exist" % key_filename) self.key_filename = key_filename try: self.port = int(data['port']) except ValueError: raise GerritError("Invalid port: %s" % data['port']) if 'proxycommand' in data: self.proxy = ProxyCommand(data['proxycommand'])
def find_executable(executable, include_others=True): # backwards compatibility global dir_paths if include_others: if sys.platform == 'win32': dir_paths = [join(sys.prefix, 'Scripts'), 'C:\\cygwin\\bin'] else: dir_paths = [join(sys.prefix, 'bin')] else: dir_paths = [] dir_paths.extend(os.environ['PATH'].split(os.pathsep)) for dir_path in dir_paths: if sys.platform == 'win32': for ext in ('.exe', '.bat', ''): path = join(dir_path, executable + ext) if isfile(path): return path else: path = join(dir_path, executable) if isfile(expanduser(path)): return expanduser(path) return None
def _config_files(dointeractive=False): from os.path import exists, expanduser, expandvars, dirname, join from glob import iglob from os import environ # pattern to distinguish files to run only in interactive mode. # these files are loaded by the pylada-ipython extension itself. pattern = "*.py" if not dointeractive else "ipy_*.py" # dictionary with stuff we want defined when reading config files. global_dict = {"pyladamodules": __all__} local_dict = {} # first configuration files installed with pylada. for filename in iglob(join(join(dirname(__file__), "config"), pattern)): if dointeractive == False and filename[:4] == 'ipy_': continue execfile(filename, global_dict, local_dict) # then configuration files installed in a global config directory. if "PYLADA_CONFIG_DIR" in environ: for directory in environ["PYLADA_CONFIG_DIR"].split(':'): for filename in iglob(join(directory, pattern)): if dointeractive == False and filename[:4] == 'ipy_': continue execfile(filename, global_dict, local_dict) # then user configuration file. if exists(expandvars(expanduser('~/.pylada'))): execfile(expandvars(expanduser('~/.pylada')), global_dict, local_dict) return local_dict
def next(self, value, index, region): file = self.options.get("file", None) folder = self.options.get("folder", None) script = self.options.get("script", None) sugar = self.options.get("sugar", True) if file: folder = folder if folder else expanduser("~") file = normpath(join(folder, file)) if isfile(file): with open(file, "r") as f: script = f.read() elif script and sugar: if not 'return ' in script and not ';' in script: script = "value = " + script script = 'var result=(function(value, index, begin, end){{{SCRIPT};return value;}}({VALUE}, {INDEX}, {BEGIN}, {END}));process.stdout.write('' + result);'.format( SCRIPT=script, VALUE=json.dumps(value), INDEX=index, BEGIN=region.a, END=region.b ) if not script: print('No script found, canceling') return None cmd = "/usr/local/bin/node" cwd = expanduser("~") print('Running nodejs script:', script) proc = subprocess.Popen([cmd, '-e', script], cwd=cwd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) result = proc.communicate()[0] if proc.returncode == 0: print('script result:', result.decode('UTF-8')) return result.decode('UTF-8') else: print('error while processing script:', result.decode('UTF-8')) return None
def __make_icon_osx(): lisa_shortcut = op.expanduser("~/Desktop/lisa") if not os.path.exists(lisa_shortcut): with open(lisa_shortcut, 'w') as outfile: outfile.write( "\ #!/bin/bash\n\ export PATH=$HOME/miniconda2/bin:$HOME/anaconda2/bin:$HOME/miniconda/bin:$HOME/anaconda/bin:$PATH\n\ lisa" ) os.chmod(lisa_shortcut, stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | stat.S_IRUSR | stat.S_IRGRP | stat.S_IXOTH | stat.S_IWUSR | stat.S_IWGRP ) lisa_icon_path= op.expanduser("~/lisa_data/.lisa/LISA256.icns") if not os.path.exists(lisa_icon_path): try: download_file( # wget.download( "https://raw.githubusercontent.com/mjirik/lisa/master/applications/LISA256.icns", filename=lisa_icon_path # out=lisa_icon_path ) except: logger.warning('logo download failed') pass
def complete_files(self, text, state): str_delim = text[0] path = text[1:] if path.startswith("~/"): path = expanduser("~/") + path[2:] elif path.startswith("~"): i = path.find(pathsep) if i > 0: path = expanduser(path[:i]) + path[i:] else: return [ str_delim + "~" + i[0] + pathsep for i in getpwall() if i[0].startswith(path[1:]) ][state] dir, fname = splitpath(path) if not dir: dir = os.curdir return [ str_delim + joinpath(dir, i) for i in os.listdir(dir) if i.startswith(fname) ][state]
def parse_config_file(text): config = configparser.RawConfigParser() # default options config.add_section("Save") config.set("Save", "magnets", "false") config.set("Save", "torrents", "false") config.set("Save", "directory", os.getcwd()) config.add_section("LocalDB") config.set("LocalDB", "enabled", "false") config.set("LocalDB", "path", expanduser("~/downloads/pirate-get/db")) config.add_section("Misc") # TODO: try to use https://docs.python.org/3/library/configparser.html#configparser.BasicInterpolation for interpolating in the command config.set("Misc", "openCommand", "") config.set("Misc", "transmission", "false") config.set("Misc", "colors", "true") config.read_string(text) # expand env variables directory = expanduser(expandvars(config.get("Save", "Directory"))) path = expanduser(expandvars(config.get("LocalDB", "path"))) config.set("Save", "Directory", directory) config.set("LocalDB", "path", path) return config
def restore_skillset(self, sess): for skill in self.skillset: skill.restore_skill(path=osp.expanduser(skill.restore_path), sess=sess)
search_flag = False last_flag = False number = 1 point_count1 = 0 count = 0 count1 = 0 count2 = 0 count3 = 0 stat2 = 0 stat3 = 0 find_flag = False patrol_targetFind_flag = False tic_flag = False point = PoseStamped() point_count = 0 home = expanduser("~") way_last = len(os.walk("%s/owayeol/map23/path1" % (home)).next()[2]) baglist = [ 0, ] robot2_waynum = 0 robot3_waynum = 0 total_list1 = [3, 2, 1, 7, 8, 9] total_list2 = [4, 5, 6, 13, 12, 11, 10] stop_flag3 = 1 xangle = 0 pub11 = rospy.Publisher('/robot3/cmd_vel', Twist, queue_size=10) twist = Twist() twist.linear.x = 0.0 twist.linear.y = 0.0 twist.linear.z = 0.0
def __init__(self): self.youtube_manager = Youtube_manager() self.gui_manager = Gui_manager() self.FOLDER_SAVE = expanduser("~/{name}-{key}.mp3") self.TEMP_FILE = expanduser("~/{name}.m4a") self.run()
def handle(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.paths_to_remove = [] self.verbosity = options['verbosity'] self.validate_name(name, app_or_project) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except FileExistsError: raise CommandError("'%s' already exists" % top_dir) except OSError as e: raise CommandError(e) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple(handle_extensions(options['extensions'])) extra_files = [] for file in options['files']: extra_files.extend(map(lambda x: x.strip(), file.split(','))) if self.verbosity >= 2: self.stdout.write("Rendering %s template files with " "extensions: %s\n" % (app_or_project, ', '.join(extensions))) self.stdout.write("Rendering %s template files with " "filenames: %s\n" % (app_or_project, ', '.join(extra_files))) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project base_directory = '%s_directory' % app_or_project camel_case_name = 'camel_case_%s_name' % app_or_project camel_case_value = ''.join(x for x in name.title() if x != '_') context = Context( { **options, base_name: name, base_directory: top_dir, camel_case_name: camel_case_value, 'docs_version': get_docs_version(), 'server_version': server.__version__, }, autoescape=False) # Setup a stub settings environment for template rendering if not settings.configured: settings.configure() server.setup() template_dir = self.handle_template(options['template'], base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) if relative_dir: target_dir = path.join(top_dir, relative_dir) if not path.exists(target_dir): os.mkdir(target_dir) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = path.join(root, filename) new_path = path.join(top_dir, relative_dir, filename.replace(base_name, name)) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if path.exists(new_path): raise CommandError("%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path) # Only render the Python files, as we don't want to # accidentally render Server templates files if new_path.endswith(extensions) or filename in extra_files: with open(old_path, 'r', encoding='utf-8') as template_file: content = template_file.read() template = Engine().from_string(content) content = template.render(context) with open(new_path, 'w', encoding='utf-8') as new_file: new_file.write(content) else: shutil.copyfile(old_path, new_path) if self.verbosity >= 2: self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) if self.paths_to_remove: if self.verbosity >= 2: self.stdout.write("Cleaning up temporary files.\n") for path_to_remove in self.paths_to_remove: if path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove)
def __init__(self, parent=None): QWidget.__init__(self, parent) self.mainWindow = parent self.resize(500, 150) self.setWindowTitle('Load new data') self.home = osp.expanduser("~") self.outerLayout = QVBoxLayout() # Image self.imageLayout = QHBoxLayout() self.form_layout_image = QFormLayout() self.ImagePath = QLineEdit(self) self.form_layout_image.addRow('Image:', self.ImagePath) self.openButtonImage = QPushButton('Browse', self) self.openButtonImage.clicked.connect(self.clickedOpenImage) self.imageLayout.addLayout(self.form_layout_image) self.imageLayout.addWidget(self.openButtonImage) # Localisations self.localizationLayout = QHBoxLayout() self.form_layout_loc = QFormLayout() self.LocalisationPath = QLineEdit(self) self.form_layout_loc.addRow('Localizations:', self.LocalisationPath) self.openButtonLoc = QPushButton('Browse', self) self.openButtonLoc.clicked.connect(self.clickedOpenLocalizations) self.localizationLayout.addLayout(self.form_layout_loc) self.localizationLayout.addWidget(self.openButtonLoc) self.outerLayout.addLayout(self.imageLayout) self.outerLayout.addLayout(self.localizationLayout) # Make the check boxes for the file types self.groupBoxLocalisations = QGroupBox("Localisations") self.radioLoc1 = QRadioButton("&RapidSTORM") # self.radioLoc2 = QRadioButton("&Peakselector") self.radioLoc4 = QRadioButton("&XYT") self.radioLoc1.setChecked(True) self.vboxLoc = QVBoxLayout() self.vboxLoc.addWidget(self.radioLoc1) # self.vboxLoc.addWidget(self.radioLoc2) self.vboxLoc.addWidget(self.radioLoc4) self.vboxLoc.addStretch(1) # Add the field for the pixel size in nm self.pixelSizeLayout = QFormLayout() self.pixelSize = QLineEdit(self) self.CountsPerPhoton = QLineEdit(self) self.pixelSize.setPlaceholderText("100") self.CountsPerPhoton.setPlaceholderText("1") self.pixelSizeLayout.addRow('Pixel size (in nm):', self.pixelSize) self.pixelSizeLayout.addRow('Counts per photon:', self.CountsPerPhoton) self.vboxLoc.addLayout(self.pixelSizeLayout) self.groupBoxLocalisations.setLayout(self.vboxLoc) self.vboxButtons = QHBoxLayout() self.vboxButtons.addWidget(self.groupBoxLocalisations) self.vboxButtons.addStretch(1) self.outerLayout.addLayout(self.vboxButtons) self.outerLayout.addStretch(1) # Create and add the label to show the close and open buttons self.buttonLayout = QHBoxLayout() self.buttonLayout.addStretch(1) self.openButton = QPushButton('&Open', self) self.openButton.clicked.connect(self.clickedOpen) self.closeButton = QPushButton('&Cancel', self) self.closeButton.clicked.connect(self.reject) self.buttonLayout.addWidget(self.openButton) self.buttonLayout.addWidget(self.closeButton) self.outerLayout.addLayout(self.buttonLayout) self.setLayout(self.outerLayout)
#!/usr/bin/env python # coding=utf8 from itchat.content import * import json import os from os.path import expanduser import time MSG_STORAGE_HOME = '%s/weixinmsg' % (expanduser("~")) def handle_friend_msg(nick_name, msg): msg_type = msg['Type'] if msg_type == TEXT: save_text_msg(nick_name, msg) elif msg_type == PICTURE or msg_type == RECORDING or msg_type == VIDEO or msg_type == ATTACHMENT: msg = download_multi_media_msg(nick_name, msg) save_text_msg(nick_name, msg) else: print "NOT INTERESTED MESSAGE OF TYPE: %s" % msg_type def handle_mp_msg(nick_name, msg): return def handle_chatroom_msg(nick_name, msg): return def save_text_msg(fromUser, msg): home = expanduser("~") text_msg_home = '%s/%s' % (MSG_STORAGE_HOME, fromUser) if not os.path.isdir(text_msg_home):
print("namespace gem5") print("{") print() print("std::set<std::string> version_tags = {") for tag in Upgrader.tag_set: print(" \"{}\",".format(tag)) print("};") print() print("} // namespace gem5") exit(0) elif not args.checkpoint: parser.error("You must specify a checkpoint file to modify or a " "directory of checkpoints to recursively update") # Deal with shell variables and ~ path = osp.expandvars(osp.expanduser(args.checkpoint)) # Process a single file if we have it if osp.isfile(path): process_file(path, **vars(args)) # Process an entire directory elif osp.isdir(path): cpt_file = osp.join(path, 'm5.cpt') if args.recurse: # Visit very file and see if it matches for root,dirs,files in os.walk(path): for name in files: if name == 'm5.cpt': process_file(osp.join(root,name), **vars(args)) for dir in dirs: pass
# Don't go further if we generate documentation if any(name in sys.argv[0] for name in ('sphinx-build', 'autobuild.py')): environ['KIVY_DOC'] = '1' if 'sphinx-build' in sys.argv[0]: environ['KIVY_DOC_INCLUDE'] = '1' if any('nosetests' in arg for arg in sys.argv): environ['KIVY_UNITTEST'] = '1' if any('pyinstaller' in arg.lower() for arg in sys.argv): environ['KIVY_PACKAGING'] = '1' if not environ.get('KIVY_DOC_INCLUDE'): # Configuration management if 'KIVY_HOME' in environ: kivy_home_dir = expanduser(environ['KIVY_HOME']) else: user_home_dir = expanduser('~') if platform == 'android': user_home_dir = environ['ANDROID_APP_PATH'] elif platform == 'ios': user_home_dir = join(expanduser('~'), 'Documents') kivy_home_dir = join(user_home_dir, '.kivy') kivy_config_fn = join(kivy_home_dir, 'config.ini') kivy_usermodules_dir = join(kivy_home_dir, 'mods') kivy_userexts_dir = join(kivy_home_dir, 'extensions') icon_dir = join(kivy_home_dir, 'icon') if 'KIVY_NO_CONFIG' not in environ: if not exists(kivy_home_dir): mkdir(kivy_home_dir)
def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument('-g', '--gpu', type=int, required=True, help='gpu id') parser.add_argument('--resume', help='checkpoint path') # configurations (same configuration as original work) # https://github.com/shelhamer/fcn.berkeleyvision.org parser.add_argument('--max-iteration', type=int, default=100000, help='max iteration') parser.add_argument( '--lr', type=float, default=1.0e-12, help='learning rate', ) parser.add_argument( '--weight-decay', type=float, default=0.0005, help='weight decay', ) parser.add_argument( '--momentum', type=float, default=0.99, help='momentum', ) parser.add_argument( '--pretrained-model', default=torchfcn.models.FCN32s.download(), help='pretrained model of FCN32s', ) args = parser.parse_args() args.model = 'FCN16s' args.git_hash = git_hash() now = datetime.datetime.now() args.out = osp.join(here, 'logs', now.strftime('%Y%m%d_%H%M%S.%f')) os.makedirs(args.out) with open(osp.join(args.out, 'config.yaml'), 'w') as f: yaml.safe_dump(args.__dict__, f, default_flow_style=False) os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu) cuda = torch.cuda.is_available() torch.manual_seed(1337) if cuda: torch.cuda.manual_seed(1337) # 1. dataset root = osp.expanduser('~/data/datasets') kwargs = {'num_workers': 4, 'pin_memory': True} if cuda else {} train_loader = torch.utils.data.DataLoader(torchfcn.datasets.SBDClassSeg( root, split='train', transform=True), batch_size=1, shuffle=True, **kwargs) val_loader = torch.utils.data.DataLoader(torchfcn.datasets.VOC2011ClassSeg( root, split='seg11valid', transform=True), batch_size=1, shuffle=False, **kwargs) # 2. model model = torchfcn.models.FCN16s(n_class=21) start_epoch = 0 start_iteration = 0 if args.resume: checkpoint = torch.load(args.resume) model.load_state_dict(checkpoint['model_state_dict']) start_epoch = checkpoint['epoch'] start_iteration = checkpoint['iteration'] else: fcn32s = torchfcn.models.FCN32s() state_dict = torch.load(args.pretrained_model) try: fcn32s.load_state_dict(state_dict) except RuntimeError: fcn32s.load_state_dict(state_dict['model_state_dict']) model.copy_params_from_fcn32s(fcn32s) if cuda: model = model.cuda() # 3. optimizer optim = torch.optim.SGD([ { 'params': get_parameters(model, bias=False) }, { 'params': get_parameters(model, bias=True), 'lr': args.lr * 2, 'weight_decay': 0 }, ], lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) if args.resume: optim.load_state_dict(checkpoint['optim_state_dict']) trainer = torchfcn.Trainer( cuda=cuda, model=model, optimizer=optim, train_loader=train_loader, val_loader=val_loader, out=args.out, max_iter=args.max_iteration, interval_validate=4000, ) trainer.epoch = start_epoch trainer.iteration = start_iteration trainer.train()
from os.path import expanduser SETTINGS_DIR = expanduser("~") + '/.config/futgui/' SETTINGS_FILE = SETTINGS_DIR + 'settings.json' LOGIN_FILE = SETTINGS_DIR + 'login.json' PLAYERS_FILE = SETTINGS_DIR + 'players.json'
if self._convert is convert_str: return "String" if self._convert is convert_bool: return "Bool" if self._convert is convert_logging: return "Log Level" if self._convert is convert_str_seq: return "List[String]" if self._convert is convert_validation: return "Validation Level" if self._convert is convert_ico_path: return "Ico Path" raise RuntimeError("unreachable") _config_user_locations: Sequence[str] = (join(expanduser("~"), ".bokeh", "bokeh.yaml"), ) class Settings: ''' ''' _config_override: Dict[str, Any] _config_user: Dict[str, Any] _config_system: Dict[str, Any] def __init__(self) -> None: self._config_override = {} self._config_user = self._try_load_config(_config_user_locations)
def main(): parser = argparse.ArgumentParser() parser.add_argument('model_file', help='Model path') parser.add_argument('-g', '--gpu', type=int, default=0) args = parser.parse_args() os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu) model_file = args.model_file root = osp.expanduser('~/data/datasets') val_loader = torch.utils.data.DataLoader(SBDClassSeg(root, split='new_val1', transform=True), batch_size=1, shuffle=False, num_workers=4, pin_memory=True) n_class = len(val_loader.dataset.class_names) print(osp.basename(model_file)) model = FCN8s(n_class=2) if torch.cuda.is_available(): model = model.cuda() print('==> Loading %s model file: %s' % (model.__class__.__name__, model_file)) model_data = torch.load(model_file) try: model.load_state_dict(model_data) except Exception: model.load_state_dict(model_data['model_state_dict']) model.eval() print('==> Evaluating with VOC2011ClassSeg seg11valid') visualizations = [] label_trues, label_preds = [], [] roc_prob, roc_label = [], [] img_id = 1 for batch_idx, (data, target) in tqdm.tqdm(enumerate(val_loader), total=len(val_loader), ncols=80, leave=False): if torch.cuda.is_available(): data, target = data.cuda(), target.cuda() data, target = Variable(data, volatile=True), Variable(target) score = model(data) imgs = data.data.cpu() roc_prob.extend( list(1 / (1 + np.exp(-score.data.cpu().numpy()[0][1].flatten())))) roc_label.extend(list(target.data.cpu().numpy()[0].flatten())) #print(target.data.cpu().numpy()[0].flatten().shape) lbl_pred = score.data.max(1)[1].cpu().numpy()[:, :, :] lbl_true = target.data.cpu() for img, lt, lp in zip(imgs, lbl_true, lbl_pred): img, lt = val_loader.dataset.untransform(img, lt) label_trues.append(lt) label_preds.append(lp) if img_id % 1 == 0: imsave('img/' + str(img_id).zfill(4) + 'orig' + '.png', img) imsave('img/' + str(img_id).zfill(4) + 'mask' + '.png', lp) #print(str(batch_idx) + ' ' + str(img_id)) img_id = img_id + 1 if len(visualizations) < 9: viz = fcnutils.visualize_segmentation( lbl_pred=lp, lbl_true=lt, img=img, n_class=n_class, label_names=val_loader.dataset.class_names) visualizations.append(viz) metrics = utils.label_accuracy_score(label_trues, label_preds, n_class=n_class) print('The length of labels is:') print(len(roc_label)) print(roc_label[:10]) print(roc_prob[:10]) ''' fpr, tpr, thres = roc_curve(roc_label, roc_prob, pos_label=1) precision, recall, thres = precision_recall_curve(roc_label, roc_prob, pos_label=1) print(len(fpr)) print(len(tpr)) f_fpr = open('precision.txt', 'wb') for x in range(0, len(precision), 1000): f_fpr.write(str(precision[x]) + ' ') f_fpr.close() f_tpr = open('recall.txt', 'wb') for x in range(0, len(recall), 1000): f_tpr.write(str(recall[x]) + ' ') f_tpr.close() print('The auc is ') print(auc(fpr, tpr)) ''' metrics = np.array(metrics) metrics *= 100 print('''\ Accuracy: {0} Accuracy Class: {1} Mean IU: {2} FWAV Accuracy: {3}'''.format(*metrics)) viz = fcnutils.get_tile_image(visualizations) skimage.io.imsave('viz_evaluate.png', viz)
def subprocess_cmd(command): process = subprocess.Popen(command,stdout=subprocess.PIPE, shell=True) proc_stdout = process.communicate()[0].strip() print proc_stdout if __name__ == "__main__": global name main(sys.argv[1:]) command = "/var/lib/libvirt/lxc/{}/".format(name) root = command + "root" #scp java jdk to lxc root home subprocess.call(["sudo","cp","{}/jdk-7u79-linux-x64.gz".format(expanduser("~")),"{}".format(root)]) #untar subprocess.call(["chroot", command, "/bin/bash", "-c", "tar xfz jdk-7u79-linux-x64.gz"]) #append the following lines to bash_profile with open("{}/.bash_profile".format(root), "a") as myfile: myfile.write("export PATH=/root/jdk1.7.0_79/bin:$PATH \n \ export JAVA_HOME=/root/jdk1.7.0_79") #restart bash subprocess.call(["chroot", command, "/bin/bash", "-c", "source {}/.bash_profile".format(expanduser("~"))]) #install dependencies subprocess.call(["chroot", command, "/bin/bash", "-c", "yum install -y wget git maven"]) subprocess.call(["chroot", command, "/bin/bash", "-c", "yum -y update"])
program_build_date) program_shortdesc = __import__('__main__').__doc__.split("\n")[1] program_license = '''%s Created by Andrés Cancer on %s. Copyright 2020 Personal. All rights reserved. Licensed under the Apache License 2.0 http://www.apache.org/licenses/LICENSE-2.0 Distributed on an "AS IS" basis without warranties or conditions of any kind, either express or implied. USAGE ''' % (program_shortdesc, str(__date__)) defaultCargoFile = "{}/.config/encfsgui/{}".format(expanduser("~"), "encfsgui.yaml") defaultConfigDir = "/usr/share/encfsgui" # Setup argument parser parser = ArgumentParser(description=program_license, formatter_class=RawDescriptionHelpFormatter, epilog=program_version_message) parser.add_argument("-d", "--debug", default=False, dest="debug", action="store_true", help="Activate Debug [default: %(default)s]") parser.add_argument(
@property @wraps(fn) def _lazy_property(self): if not hasattr(self, attr_name): setattr(self, attr_name, fn(self)) return getattr(self, attr_name) return _lazy_property # Default proj wgs84 = pyproj.Proj(proj='latlong', datum='WGS84') # Path to the cache directory cache_dir = path.join(path.expanduser('~'), '.salem_cache') if not path.exists(cache_dir): makedirs(cache_dir) download_dir = path.join(cache_dir, 'downloads') if not path.exists(download_dir): makedirs(download_dir) sample_data_gh_commit = 'b6d201fd8c228d5a1a6ea97964ef769dfef186ec' sample_data_dir = path.join(cache_dir, 'salem-sample-data-' + sample_data_gh_commit) # python version python_version = 'py3' if sys.version_info.major == 2: python_version = 'py2'
def run_docker(dockername, prompt="", dockerConfig = None, sudo = False, options = "" ): if not (dockerConfig is None): if "su" in dockerConfig: sudo = True if "options" in dockerConfig and len(options)<=0: options = dockerConfig["options"] uid = os.getuid() username = getpass.getuser() username = username.split()[0] groupid = pwd.getpwnam(username).pw_gid groupname = grp.getgrgid(groupid).gr_name groupname = groupname.split()[0] homedir = expanduser("~") currentdir = os.path.abspath(os.getcwd()) mapVolume = "-v " + homedir + ":" + homedir if not (dockerConfig is None) and "workdir" in dockerConfig: currentdir = dockerConfig["workdir"] if "volumes" in dockerConfig: for volume,mapping in dockerConfig["volumes"].iteritems(): if "from" in mapping and "to" in mapping: mapdir = os.path.abspath(mapping["from"]) mapVolume += " -v " + mapdir + ":" + mapping["to"] else: if not (homedir in currentdir): mapVolume += " -v "+ currentdir + ":" + currentdir mapVolume += " --net=host" print "Running docker " + dockername + " as Userid: " + str(uid) + "(" + username +"), + Group:"+str(groupid) + "("+groupname+") at " + homedir dirname = tempfile.mkdtemp() wname = os.path.join(dirname,"run.sh") fw = open( wname, "w+" ) fw.write("#!/bin/bash\n") fw.write("if [ -f /etc/lsb-release ]; then \n") fw.write("addgroup --force-badname --gid "+str(groupid)+" " +groupname+"\n") fw.write("adduser --force-badname --home " + homedir + " --shell /bin/bash --no-create-home --uid " + str(uid)+ " -gecos '' "+username+" --disabled-password --gid "+str(groupid)+"\n" ) fw.write("adduser "+username +" sudo\n") fw.write("adduser "+username +" docker\n") fw.write("fi\n") fw.write("if [ -f /etc/redhat-release ]; then \n") fw.write("groupadd --gid "+str(groupid)+" " +groupname+"\n") fw.write("useradd --home " + homedir + " --shell /bin/bash --no-create-home --uid " + str(uid)+ " "+username+" --password '' --gid "+str(groupid)+"\n" ) fw.write("usermod -aG wheel "+username +"\n") fw.write("fi\n") fw.write("echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers\n") fw.write("chmod --recursive 0755 /root\n") # Please note: setting HOME environment in docker may nullify additional environment variables, # such as GOPATH. fw.write("export HOME="+homedir+"\n") fw.write("cd "+currentdir+"\n") fw.write("dockerd > /dev/null 2>&1 &\n") fw.write("""echo "export PATH=\$PATH:\$GOPATH/bin" | cat >> /etc/bash.bashrc \n""") fw.write("""echo "export GOPATH=\$GOPATH" | cat >> /etc/bash.bashrc \n""") if not sudo: fw.write("su -m "+username +"\n") else: print "Run in super user mode..." fw.write("/bin/bash") fw.close() os.chmod(wname, 0755) if prompt == "": hostname = "Docker["+dockername+"]" else: hostname = prompt if homedir in currentdir: cmd = "docker run --privileged --hostname " + hostname + " " + options + " --rm -ti " + mapVolume + " -v "+dirname+ ":/tmp/runcommand -w "+homedir + " " + dockername + " /tmp/runcommand/run.sh" else: cmd = "docker run --privileged --hostname " + hostname + " " + options + " --rm -ti " + mapVolume + " -v "+dirname+ ":/tmp/runcommand -w "+homedir + " " + dockername + " /tmp/runcommand/run.sh" print "Execute: " + cmd os.system(cmd)
long_description=readme, author='Ernesto Mendoza Blanco', author_email='*****@*****.**', install_requires=[ 'requests', 'responses', 'pytest', 'ntplib', 'pytest-cov', 'requests[security]' ], url='https://github.com/MentaNetwork/NetStorageKit-Python', download_url='https://github.com/MentaNetwork/NetStorageKit-Python', packages=['netstoragekit'], package_dir={'netstoragekit': 'netstoragekit'}, package_data={'': ['*.json.dist']}, include_package_data=True, data_files=[(expanduser('~'), ['netstoragekit_test_credentials.json.dist'])], license='MIT License', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Programming Language :: Python'] )
class Config: """ QCoDeS config system Start with sane defaults, which you can't change, and then customize your experience using files that update the configuration. """ config_file_name = "qcodesrc.json" """Name of config file""" schema_file_name = "qcodesrc_schema.json" """Name of schema file""" # get abs path of packge config file default_file_name = pkgr.resource_filename(__name__, config_file_name) """Filename of default config""" current_config_path = default_file_name """Path of the last loaded config file""" _loaded_config_files = [default_file_name] # get abs path of schema file schema_default_file_name = pkgr.resource_filename(__name__, schema_file_name) """Filename of default schema""" # home dir, os independent home_file_name = expanduser(os.path.join("~", config_file_name)) """Filename of home config""" schema_home_file_name = home_file_name.replace(config_file_name, schema_file_name) """Filename of home schema""" # this is for *nix people env_file_name = os.environ.get("QCODES_CONFIG", "") """Filename of env config""" schema_env_file_name = env_file_name.replace(config_file_name, schema_file_name) """Filename of env schema""" # current working dir cwd_file_name = os.path.join(Path.cwd(), config_file_name) """Filename of cwd config""" schema_cwd_file_name = cwd_file_name.replace(config_file_name, schema_file_name) """Filename of cwd schema""" current_schema: Optional[dict] = None """Validators and descriptions of config values""" current_config: Optional[dict] = None """Valid config values""" defaults: dict """The default configuration""" defaults_schema: dict """The default schema""" _diff_config: Dict[str, Any] = {} _diff_schema: Dict[str, Any] = {} def __init__(self, path: Optional[str] = None) -> None: """ Args: path: Optional path to directory containing a `qcodesrc.json` config file """ self.config_file_path = path self.defaults, self.defaults_schema = self.load_default() self.update_config() def load_default(self) -> Tuple[dict, dict]: defaults = self.load_config(self.default_file_name) defaults_schema = self.load_config(self.schema_default_file_name) self.validate(defaults, defaults_schema) return defaults, defaults_schema def update_config(self, path: Optional[str] = None) -> dict: """ Load defaults updates with cwd, env, home and the path specified and validates. A configuration file must be called qcodesrc.json A schema file must be called qcodesrc_schema.json Configuration files (and their schema) are loaded and updated from the directories in the following order: - default json config file from the repository - user json config in user home directory - user json config in $QCODES_CONFIG - user json config in current working directory - user json file in the path specified If a key/value is not specified in the user configuration the default is used. Key/value pairs loaded later will take preference over those loaded earlier. Configs are validated after every update. Validation is also performed against a user provided schema if it's found in the directory. Args: path: Optional path to directory containing a `qcodesrc.json` config file """ config = copy.deepcopy(self.defaults) self.current_schema = copy.deepcopy(self.defaults_schema) self._loaded_config_files = [self.default_file_name] self._update_config_from_file(self.home_file_name, self.schema_home_file_name, config) self._update_config_from_file(self.env_file_name, self.schema_env_file_name, config) self._update_config_from_file(self.cwd_file_name, self.schema_cwd_file_name, config) if path is not None: self.config_file_path = path if self.config_file_path is not None: config_file = os.path.join(self.config_file_path, self.config_file_name) schema_file = os.path.join(self.config_file_path, self.schema_file_name) self._update_config_from_file(config_file, schema_file, config) if config is None: raise RuntimeError("Could not load config from any of the " "expected locations.") self.current_config = config self.current_config_path = self._loaded_config_files[-1] return config def _update_config_from_file(self, file_path: str, schema: str, config: dict ) -> None: """ Updated ``config`` dictionary with config information from file in ``file_path`` that has schema specified in ``schema`` Args: file_path: Path to `qcodesrc.json` config file schema: Path to `qcodesrc_schema.json` to be used config: Config dictionary to be updated. """ if os.path.isfile(file_path): self._loaded_config_files.append(file_path) my_config = self.load_config(file_path) config = update(config, my_config) self.validate(config, self.current_schema, schema) def validate(self, json_config: Optional[dict] = None, schema: Optional[dict] = None, extra_schema_path: Optional[str] = None ) -> None: """ Validate configuration; if no arguments are passed, the default validators are used. Args: json_config: json file to validate schema: schema dictionary extra_schema_path: schema path that contains extra validators to be added to schema dictionary """ if extra_schema_path is not None: # add custom validation if os.path.isfile(extra_schema_path): with open(extra_schema_path) as f: # user schema has to be both valid in itself # but then just update the user properties # so that default types and values can NEVER # be overwritten new_user = json.load(f)["properties"]["user"] if schema is None: if self.current_schema is None: raise RuntimeError("Cannot validate as " "current_schema is None") schema = self.current_schema user = schema["properties"]['user'] user["properties"].update(new_user["properties"]) jsonschema.validate(json_config, schema) else: logger.warning(EMPTY_USER_SCHEMA.format(extra_schema_path)) else: if json_config is None and schema is None: jsonschema.validate(self.current_config, self.current_schema) else: jsonschema.validate(json_config, schema) def add(self, key: str, value: Any, value_type: Optional[str] = None, description: Optional[str] = None, default: Optional[Any] = None ) -> None: """Add custom config value in place Adds ``key``, ``value`` with optional ``value_type`` to user config and schema. If ``value_type`` is specified then the new value is validated. Args: key: key to be added under user config value: value to add to config value_type: type of value, allowed are string, boolean, integer description: description of key to add to schema default: default value, stored only in the schema Examples: >>> defaults.add("trace_color", "blue", "string", "description") will update the config: :: ... "user": { "trace_color": "blue"} ... and the schema: :: ... "user":{ "type" : "object", "description": "controls user settings of qcodes" "properties" : { "trace_color": { "description" : "description", "type": "string" } } } ... Todo: - Add enum support for value_type - finish _diffing """ if self.current_config is None: raise RuntimeError("Cannot add value to empty config") self.current_config["user"].update({key: value}) if self._diff_config.get("user", True): self._diff_config["user"] = {} self._diff_config["user"].update({key: value}) if value_type is None: if description is not None: logger.warning(MISS_DESC) else: # update schema! schema_entry: Dict[str, Dict[str, Union[str, Any]]] schema_entry = {key: {"type": value_type}} if description is not None: schema_entry = { key: { "type": value_type, "default": default, "description": description } } # the schema is nested we only update properties of the user object if self.current_schema is None: raise RuntimeError("Cannot add value as no current schema is " "set") user = self.current_schema['properties']["user"] user["properties"].update(schema_entry) self.validate(self.current_config, self.current_schema) # TODO(giulioungaretti) finish diffing # now we update the entire schema # and the entire configuration # if it's saved then it will always # take precedence even if the defaults # values are changed upstream, and the local # ones were actually left to their default # values if not self._diff_schema: self._diff_schema = BASE_SCHEMA props = self._diff_schema['properties'] if props.get("user", True): props["user"] = {} props["user"].update(schema_entry) @staticmethod def load_config(path: str) -> 'DotDict': """Load a config JSON file Args: path: path to the config file Return: a dot accessible dictionary config object Raises: FileNotFoundError: if config is missing """ with open(path, "r") as fp: config = json.load(fp) logger.debug(f'Loading config from {path}') config_dot_dict = DotDict(config) return config_dot_dict def save_config(self, path: str) -> None: """ Save current config to file at given path. Args: path: path of new file """ with open(path, "w") as fp: json.dump(self.current_config, fp, indent=4) def save_schema(self, path: str) -> None: """ Save current schema to file at given path. Args: path: path of new file """ with open(path, "w") as fp: json.dump(self.current_schema, fp, indent=4) def save_to_home(self) -> None: """Save config and schema to files in home dir""" self.save_config(self.home_file_name) self.save_schema(self.schema_home_file_name) def save_to_env(self) -> None: """Save config and schema to files in path specified in env variable""" self.save_config(self.env_file_name) self.save_schema(self.schema_env_file_name) def save_to_cwd(self) -> None: """Save config and schema to files in current working dir""" self.save_config(self.cwd_file_name) self.save_schema(self.schema_cwd_file_name) def describe(self, name: str) -> str: """ Describe a configuration entry Args: name: name of entry to describe in 'dotdict' notation, e.g. name="user.scriptfolder" """ val = self.current_config if val is None: raise RuntimeError(f"Config is empty, cannot describe entry.") if self.current_schema is None: raise RuntimeError("No schema found, cannot describe entry.") sch = self.current_schema["properties"] for key in name.split('.'): if val is None: raise RuntimeError(f"Cannot describe {name} Some part of it " f"is null") val = val[key] if sch.get(key): sch = sch[key] else: sch = sch['properties'][key] description = sch.get("description", None) or "Generic value" _type = str(sch.get("type", None)) or "Not defined" default = sch.get("default", None) or "Not defined" # add cool description to docstring base_docstring = """{}.\nCurrent value: {}. Type: {}. Default: {}.""" doc = base_docstring.format(description, val, _type, default) return doc def __getitem__(self, name: str) -> Any: val = self.current_config for key in name.split('.'): if val is None: raise KeyError(f"{name} not found in current config") val = val[key] return val def __getattr__(self, name: str) -> Any: return getattr(self.current_config, name) def __repr__(self) -> str: old = super().__repr__() output = (f"Current values: \n {self.current_config} \n" f"Current paths: \n {self._loaded_config_files} \n" f"{old}") return output
def link(src, target, io): (abs_src, abs_tgt) = (expanduser(src), expanduser(target)) return pipe(dirs(abs_src, io), map(lambda d: perform_link(d, abs_tgt, io)), list) # noqa yapf: disable
def create_stub(_path=None): _path = _path or path.expanduser('~/.cephdeploy.conf') logger.debug('creating new configuration file: %s' % _path) with open(_path, 'w') as cd_conf: cd_conf.write(cd_conf_template)
def get_test(self, config_data, hostname, snap_file, post_snap, action): """ Analyse testfile and return object of operator.Operator containing test details called by connect() function and other functions of Jsnapy module functions :param config_data: data of main config file :param hostname: hostname :param snap_file: pre snapshot file name :param post_snap: post snapshot file name :param action: action to be taken (check, snapcheck, snap) :return: object of testop.Operator containing test details """ res = Operator() res = self.compare_tests( hostname, config_data, snap_file, post_snap, action) result_status = res.result mail_condition = 'all' if result_status == 'Passed': mail_condition = 'pass' elif result_status == 'Failed': mail_condition = 'fail' mail_pref = config_data.get("mail") #we don't want to send mail when diff operation is run if mail_pref is not None and self.args.diff is False: mail_file_path = None if type(mail_pref) is str: mail_file_path = mail_pref elif type(mail_pref) is dict: if mail_condition in mail_pref: mail_file_path = mail_pref.get(mail_condition) else: self.logger.error( colorama.Fore.RED + "ERROR!! Type of mail preferences should be either dictionary or string", extra=self.log_detail) if mail_file_path is not None and mail_file_path != '' : mfile = os.path.join(expanduser(get_path('DEFAULT', 'test_file_path')), mail_file_path)\ if os.path.isfile(mail_file_path) is False else mail_file_path if os.path.isfile(mfile): mail_file = open(mfile, 'r') mail_file = yaml.load(mail_file) if "passwd" not in mail_file: passwd = getpass.getpass( "Please enter ur email password ") else: passwd = mail_file['passwd'] send_mail = Notification() send_mail.notify(mail_file, hostname, passwd, res) else: self.logger.error( colorama.Fore.RED + "ERROR!! Path of file containing mail content is not correct", extra=self.log_detail) # else: # res = self.compare_tests( # hostname, # config_data, # snap_file, # post_snap, # action) self.q.put(res) return res
#!/usr/bin/python3 import os from os.path import expanduser homedir=expanduser('~') import sys if not os.geteuid() == 0: sys.exit("\nOnly root can run this script\n") os.system('clear') print("\n--------------------------------------------------\nWelcome To HibernateScript\n\nJob: Hibernate after a specific period of sleep\n\nMade by: Muhammad Ashraf\n-----------------------------------------------------\n\n") def printdc():print(str(run1.dcval)+' '+ str(run1.dcunit) +' which is/are ' + str(run1.dctime) + ' seconds') def printac():print(str(run2.acval)+' '+ str(run2.acunit) +' which is/are ' + str(run2.actime) + ' seconds') def run1(): run1.dc = input("Please Enter the Period Time You want the PC to hibernate in Battery Power(if present) (d,h,m,s) (default:m) : ") while run1.dc=='': print('\nplease input value\n') run1.dc = input("Please Enter the Period Time You want the PC to hibernate in Battery Power(if present) (d,h,m,s) (default:m) : ") #dc: input #dctime: time used finally #dcval: first part of input #dcunit: unit of time run1.dcunit=run1.dc[-1] try: if isinstance(int(run1.dcunit),int):run1.dc = str(run1.dc) + 'm' run1.dcunit=run1.dc[-1] except : pass run1.dcval=run1.dc.split(run1.dcunit)[0] try : run1.dcvalint=int(run1.dcval) except : run1.dcvalint=''
#!/usr/bin/env python import argparse import praw from slackclient import SlackClient import os from os.path import expanduser import fileinput import tempfile import shutil import subprocess import difflib home = expanduser('~') config = 'config' # This series of opens below may look odd to someone more versed in python, but I just could not get the variables working in another more concise way. Will improve later when I have a better grasp on this. #Search Variable search_conf = open(config, 'r').readlines()[0] s = search_conf.split("search=") x = s[1].strip() #Subreddit variable subreddit_conf = open(config, 'r').readlines()[1] sub = subreddit_conf.split("subreddit=") y = sub[1].strip() #UN un_config = open(config, 'r').readlines()[2] un = un_config.split("user=") u = un[1].strip() #PW pw_config = open(config, 'r').readlines()[3]
def glob(self, root, recursive=True): # walk: root [dirs] [files] -> for each dir (tup.2), for # each file (tup.1), we need to join with root return pipe(os.walk(expanduser(root)), mapcat(lambda tup: map(lambda f: join(tup[0], f)) (concat([tup[2], tup[1]]))), list) # noqa
def login(self, output_file): """ Extract device information from main config file. Stores device information and call connect function, device can be single or multiple. Instead of connecting to all devices mentioned in yaml file, user can connect to some particular group of device also. :param output_file: name of snapshot file """ self.host_list = [] if self.args.hostname is None: host_dict={} try: hosts_val = self.main_file['hosts'] except KeyError as ex: self.logger.error(colorama.Fore.RED + "\nERROR occurred !! Hostname not given properly %s" % str(ex), extra=self.log_detail) #raise Exception(ex) except Exception as ex: self.logger.error(colorama.Fore.RED + "\nERROR occurred !! %s" % str(ex), extra=self.log_detail) #raise Exception(ex) else: # when group of devices are given, searching for include keyword in # hosts in main.yaml file first_entry = hosts_val[0] if 'include' in first_entry: devices_file_name = first_entry['include'] if os.path.isfile(devices_file_name): lfile = devices_file_name else: lfile = os.path.join( expanduser(get_path( 'DEFAULT', 'test_file_path')), devices_file_name) login_file = open(lfile, 'r') dev_file = yaml.load(login_file) gp = first_entry.get('group', 'all') dgroup = [i.strip().lower() for i in gp.split(',')] for dgp in dev_file: if dgroup[0].lower() == 'all' or dgp.lower() in dgroup: for val in dev_file[dgp]: hostname = list(val)[0] self.log_detail = {'hostname': hostname} if val.get(hostname) is not None and hostname not in host_dict: host_dict[hostname] = deepcopy(val.get(hostname)) self.host_list.append(hostname) # login credentials are given in main config file, can connect to multiple devices else: #key_value = deepcopy(k) for host in hosts_val: try: hostname = host['device'] self.log_detail = {'hostname': hostname} except KeyError as ex: self.logger.error( colorama.Fore.RED + "ERROR!! KeyError 'device' key not found", extra=self.log_detail) #raise Exception(ex) except Exception as ex: self.logger.error( colorama.Fore.RED + "ERROR!! %s" % ex, extra=self.log_detail) #raise Exception(ex) else: if hostname not in host_dict: self.host_list.append(hostname) # host.pop('device') host_dict[hostname] = deepcopy(host) for (hostname, key_value) in iteritems(host_dict): #The file config takes precedence over cmd line params -- no changes made username = self.args.login or key_value.get('username') password = self.args.passwd or key_value.get('passwd') #if --port arg is given on the cmd then that takes precedence port = self.args.port if port is not None: key_value['port'] = port key_value = self.get_values(key_value) t = Thread( target=self.connect, args=( hostname, username, password, output_file ), kwargs= key_value ) t.start() t.join() # login credentials are given from command line else: hostname = self.args.hostname self.log_detail = {'hostname': hostname} username = self.args.login password = self.args.passwd # if self.args.passwd is not None else getpass.getpass("\nEnter # Password: ") self.host_list.append(hostname) port = self.args.port key_value = {'port': port} if port is not None else {} self.connect(hostname, username, password, output_file, **key_value)
from distutils.spawn import find_executable from os import path from subprocess import run from sys import argv import pandocConvert # Adjust for pandoc-citeproc's different versions on pandoc > 2.10.x pandocVersion = run(['pandoc', '--version'], encoding='utf8', capture_output=True) pandocVersionNumber = pandocVersion.stdout.split('\n')[0][7:] pandocVersionList = pandocVersionNumber.split('.') if int(pandocVersionList[0]) > 2 or (int(pandocVersionList[0]) == 2 and int(pandocVersionList[1]) > 10): extraOptions = '--citeproc' addedFilter = [] else: extraOptions = '' addedFilter = [find_executable('pandoc-citeproc')] toFormat = 'latex' toExtension = '.pdf' bookOptions = '' articleOptions = '' theFile = argv[1].strip('"') pandocTempDir = path.expanduser(argv[2]) pandocConvert.convertMd(pandocTempDir, theFile, toFormat, toExtension, extraOptions, bookOptions, articleOptions, addedFilter)
def multiple_device_details( self, hosts, config_data, pre_name, action, post_name): """ Called when multiple devices are given in config file :param hosts: List of devices or a includes a file :param config_data: data of main config file :param pre_name: pre snapshot filename or file tag :param action: action to be taken, snap, snapcheck, check :param post_name: post snapshot filename or file tag :return: return object of testop.Operator containing test details """ res_obj = [] self.host_list = [] host_dict={} first_entry = hosts[0] if 'include' in first_entry: devices_file_name = first_entry['include'] if os.path.isfile(devices_file_name): lfile = devices_file_name else: lfile = os.path.join( expanduser(get_path( 'DEFAULT', 'test_file_path')), devices_file_name) login_file = open(lfile, 'r') dev_file = yaml.load(login_file) gp = first_entry.get('group', 'all') dgroup = [i.strip().lower() for i in gp.split(',')] for dgp in dev_file: if dgroup[0].lower() == 'all' or dgp.lower() in dgroup: for val in dev_file[dgp]: hostname = list(val)[0] self.log_detail = {'hostname': hostname} if val.get(hostname) is not None and hostname not in host_dict: host_dict[hostname] = deepcopy(val.get(hostname)) self.host_list.append(hostname) else: for host in hosts: try: hostname = host['device'] self.log_detail = {'hostname': hostname} except KeyError as ex: self.logger.error( colorama.Fore.RED + "ERROR!! KeyError 'device' key not found", extra=self.log_detail) except Exception as ex: self.logger.error( colorama.Fore.RED + "ERROR!! %s" % ex, extra=self.log_detail) else: if hostname not in host_dict: self.host_list.append(hostname) host_dict[hostname] = deepcopy(host) for (hostname, key_value) in iteritems(host_dict): username = key_value.get('username') password = key_value.get('passwd') key_value = self.get_values(key_value) t = Thread( target=self.connect, args=( hostname, username, password, pre_name, config_data, action, post_name), kwargs= key_value ) t.start() t.join() if action == "snap": if not self.snap_q.empty(): res_obj.append(self.snap_q.get()) elif action in ["snapcheck", "check"]: if not self.q.empty(): res_obj.append(self.q.get()) else: res_obj.append(None) return res_obj
def __init__(self, observation_shape=(1, ), normalize_observations=True, observation_range=(-5., 5.), action_range=(-1., 1.), nb_actions=3, layer_norm=True, skill_name=None, restore_path=None, action_func=None, obs_func=None, num_params=None, termination=None, get_full_state_func=None, next_state_query_idx=None): # Inputs. self.obs0 = tf.placeholder(tf.float32, shape=(None, ) + observation_shape, name='obs0') # Parameters. self.skill_name = skill_name self.restore_path = osp.expanduser(restore_path) self.normalize_observations = normalize_observations self.action_range = action_range self.observation_range = observation_range self.actor = Actor(nb_actions=nb_actions, name="%s/actor" % skill_name, layer_norm=layer_norm) self.critic = Critic(layer_norm=layer_norm, name="%s/critic" % skill_name) self.successor_prob_model = classifier(in_shape=observation_shape[0], out_shape=1, name="%s/suc_pred_model" % skill_name, sess=None, log_dir=None, train=False, in_tensor=self.obs0) self.num_params = num_params # load memory print("searching for memory in %s" % osp.join(self.restore_path, 'memory')) memory_filename = glob.glob( osp.join(self.restore_path, 'memory', '*.csv'))[0] self.memory = np.loadtxt(memory_filename, delimiter=',') self.starting_state_goal = self.memory[:, :observation_shape[0]] self.ending_state = self.memory[:, observation_shape[0]:] # load the trajectories print("traj in %s" % osp.join(self.restore_path, 'traj')) self.traj_dir_path = osp.join(self.restore_path, 'traj') # memory_filename = glob.glob(osp.join(self.restore_path, 'traj' , '*.csv'))[0] # # self.trajectories = np.loadtxt(traj_filename, delimiter=',') if next_state_query_idx is not None: self.next_state_query_idx = next_state_query_idx else: self.next_state_query_idx = list(range(observation_shape[0])) if termination: self.termination = termination else: self.termination = lambda x, y: False # funcs self.get_action = action_func if action_func is not None else mirror self.get_obs = obs_func if obs_func is not None else mirror self.get_full_state = get_full_state_func if get_full_state_func is not None else mirror # Observation normalization. if self.normalize_observations: with tf.variable_scope('%s/obs_rms' % skill_name): self.obs_rms = RunningMeanStd(shape=observation_shape) else: self.obs_rms = None normalized_obs0 = tf.clip_by_value(normalize(self.obs0, self.obs_rms), self.observation_range[0], self.observation_range[1]) self.actor_tf = self.actor(normalized_obs0) self.critic_tf = self.critic(normalized_obs0, self.actor_tf) self.success_prob = self.successor_prob_model.prob ## loader and saver self.loader_ddpg = tf.train.Saver(self.create_restore_var_dict()) self.loader_successor_model = tf.train.Saver( self.create_restore_var_dict_successor_model())
logging.info("Leaving Vault file: %s" % clear_vault_file) else: logging.info("Moving Vault file: %s" % clear_vault_file) os.rename( clear_vault_file, backup_directory + "/" + os.path.basename(str(clear_vault_file))) logging.info("Clearing Complete") if __name__ == "__main__": # If the EC2_VAULT environ var is set then use it, otherwise default to ~/Vault/ try: os.environ['EC2_VAULT'] except KeyError: vault = expanduser("~") + '/Vault' else: vault = os.environ['EC2_VAULT'] # Exit if no vault directory is found if not os.path.isdir(vault): sys.exit("Vault directory not found.") # Create a logs directory in the vault directory if one does not exist if not os.path.exists(vault + "/logs"): os.makedirs(vault + "/logs") # Save a log to vault/logs/setup.log logging.basicConfig(filename=vault + "/logs/setup.log", format='%(asctime)s %(message)s', level=logging.INFO)
def __init__(self, bin='ndc'): self.bin = expanduser(bin) self.__configure_platform() self.__validate_bin_version()