def loadConfig(): cfg = configparser.ConfigParser() if os.path.isfile(os.path.expanduser("~/.config/tgFileManager.ini")): cfg.read(os.path.expanduser("~/.config/tgFileManager.ini")) else: print( "Config file not found, user input required for first time configuration." ) cfg['telegram'] = {} cfg['telegram']['api_id'] = '' cfg['telegram']['api_hash'] = '' cfg['telegram']['channel_id'] = 'me' cfg['telegram']['max_sessions'] = '4' cfg['paths'] = {} cfg['paths']['data_path'] = os.expanduser("~/tgFileManager") cfg['paths']['tmp_path'] = os.expanduser("~/.tmp/tgFileManager") cfg['keybinds'] = {} cfg['keybinds']['upload'] = 'u' cfg['keybinds']['download'] = 'd' cfg['keybinds']['resume'] = 'r' cfg['keybinds']['cancel'] = 'c' cfg['telegram']['api_id'] = input("api_id: ") cfg['telegram']['api_hash'] = input("api_hash: ") with open(os.path.expanduser("~/.config/tgFileManager.ini"), 'w') as f: cfg.write(f) return cfg
def addtocdb(smimol, sminame, smicat): emsg = False globs = globalvars() if not globs.custom_path or not os.path.exists(str(globs.custom_path)): print( 'To add to database, you need to set a custom path. Please enter a writeable file path:' ) new_path = eval(input('path=')) globs.add_custom_path(new_path) copy_to_custom_path() cpath = globs.custom_path + "/Cores/cores.dict" mcores = readdict(cpath) cores_folder = globs.custom_path + "/Cores/" # check if core exists if sminame in list(mcores.keys()): emsg = 'Core ' + sminame + ' already existing in core database.' return emsg else: # get connection atoms ccats = [_f for _f in re.split(' |,|\t', smicat) if _f] cats = [int(a) - 1 for a in ccats] if len(cats) == 0: cats = [0] cs = [str(a) for a in cats] css = ' '.join(cs) # convert to unicode smimol = unicodedata.normalize('NFKD', smimol).encode('ascii', 'ignore').decode() if '~' in smimol: smimol = smimol.replace('~', os.expanduser('~')) # convert ligand from smiles/file core, emsg = core_load(smimol, mcores) if emsg: return emsg core.convert2mol3D() # convert to mol3D # write xyz file in Cores directory # new entry for dictionary if '.mol' in smimol: shutil.copy2(smimol, cores_folder + sminame + '.mol') snew = sminame + ':' + sminame + '.mol,' + css + ',' + '1' elif '.xyz' in smimol: shutil.copy2(smimol, cores_folder + sminame + '.xyz') snew = sminame + ':' + sminame + '.xyz,' + css + ',' + '1' else: core.writexyz(cores_folder + sminame + '.xyz') # write xyz file # new entry for dictionary snew = sminame + ':' + sminame + '.xyz,' + css + ',' + '1' # update dictionary f = open(cpath, 'r') ss = f.read().splitlines() f.close() f = open(cpath, 'w') ss.append(snew) ssort = sorted(ss[1:]) f.write(ss[0] + '\n') for s in ssort: f.write(s + '\n') f.close() return emsg
def executeCommand(this, cmd): #Special commands split = [c.strip() for c in cmd.split(' ')] #Change working directory if(split[0] == "cd"): if(len(split) == 1): os.chdir(os.expanduser("~/")) else: if(os.path.isdir(split[1])): os.chdir(split[1]) return #Execute file if(split[0] == "execute"): this.run(split[1]) return #change update interval if(split[0] == "update"): #No number specified if(len(split) == 1): return try: this.updateTimeInterval = float(split[1]) except ValueError: print "Error, %s not an integer."%split[1] return #Change cmd file if(split[0] == "cmd"): if(len(split) != 1): this.cmdFile = os.getcwd() + "/" + split[1] return #Execute bash command os.system(cmd)
def precheck(debug, region): """ Runtime Dependency Checks: postinstall artifacts, environment """ if region == 'noregion': return False return True try: home_dir = os.expanduser('~') config_file = os.path.join(home_dir, '.spotlib.json') if os.path.exists(config_file): with open(config_file, 'r') as f1: defaults = json.loads(f1.read()) else: from spotlib.defaults import defaults _debug_output(home_dir, config_file) except OSError: fx = inspect.stack()[0][3] logger.exception( '{}: Problem installing user config files. Exit'.format(fx)) return False return defaults
def main(): parser = argparse.ArgumentParser(description='Dump a k8s namespace') parser.add_argument('--namespace', type=str, default='default', help='k8s namespace.') parser.add_argument('--kubeconfig', type=str, required=True, help='path to kubeconfig file.') parser.add_argument('--outpath', type=str, help="Path to write files to. " \ "If none is specified, print to stdout") args = parser.parse_args() output_path = args.outpath if output_path: if output_path.startswith('~'): output_path = os.expanduser(output_path) output = os.path.realpath(output_path) if not os.path.exists(output_path): os.makedirs(output_path) cluster = get_server(args.kubeconfig) dump('secrets', cluster, args.namespace, output_path) dump('services', cluster, args.namespace, output_path) dump('replicationcontrollers', cluster, args.namespace, output_path)
def pdf_download(request, filename): path = os.expanduser('~/media/pdf/') wrapper = FileWrapper(file(filename, 'rb')) response = HttpResponse(wrapper, content_type=mimetypes.guess_type(filename)[0]) response['Content-Disposition'] = "attachment; filename=" + filename return response
def get_default_path(): import platform if platform.system() == 'Windows': if platform.version()[0] < 6: return os.path.join(os.expanduser('~'), 'My Documents') # Windows XP return os.path.join(os.path.expanduser('~'), 'Documents')
def __init__(self, user=None): if user: self.socket_path = os.path.join(os.expanduser('~' + user), '.cmus/socket') else: self.socket_path = os.path.join(os.environ['HOME'], '.cmus/socket') self._connect()
def determineHomeFolder(name): ''' Determine process's user's home directory. No need to run on every Configr object creation, as it is assumed to be static throughout one configuration state lifetime. If any of the environment variables have been modified by the same process, call the function again. name: application name to use returns: None Side effect: sets module-global "home" variable ''' try: import appdirs # optional dependency which already solves some some problems for us home["value"] = appdirs.user_data_dir(name, "configr") # app/author except: try: # get user home regardless of currently set environment variables from win32com.shell import shell, shellcon home["value"] = shell.SHGetFolderPath(0, shellcon.CSIDL_PROFILE, None, 0) except: try: # unix-like native solution ignoring environment variables from pwd import getpwuid home["value"] = getpwuid(os.getuid()).pw_dir except: # now try standard approaches home["value"] = os.getenv("USERPROFILE") # for windows only if home["value"] is None: home["value"] = os.expanduser("~") # recommended cross-platform solution, but could refer to a mapped network drive on Windows if home["value"] is None: raise Exception("Cannot reliably determine user's home directory, please file a bug report at https://github.com/ArneBachmann/configr") debug("Determined home folder: %s" % home["value"]) return home["value"] # HINT this return is only for convenience and shouldn't be used by user code
def update(self, dt): if self.printing and not self.paused: self.layer_timer -= dt if self.layer_timer <= 0: self.current_layer = self.current_layer + 1 print "Next Layer: " + str(self.current_layer) if self.current_layer > len(self.layers): self.process_gcode(self.profile.find('GCodeFooter').text) self.current_layer = -1 self.printing = False os.remove(os.expanduser('~/.dppcv/tmp/')) return self.get_next_layer() self.layer_timer=float(self.profile.find('InkConfig').find('LayerTime').text) self.process_gcode(self.profile.find('GCodeLift').text) #while not self.printer.clear: #pass self.dipsurf.fill((0,0,0)) if self.printing and not self.paused: self.dipsurf.blit(self.current_layer_image, 0,0) #deal with pygame events for event in pygame.event.get(): if event.type == QUIT: pygame.quit() sys.exit() pygame.display.update()
def execute(self, command, shell="/bin/bash", verbose=False): """Execute the given string command and return the retcode.""" if verbose: print >> sys.stderr, ">> %s" % command # Trap calls to `cd` if command.strip().startswith("cd"): try: dest = command.strip().split()[1] if dest == "-": dest = self.old_cwd except IndexError: dest = os.expanduser("~") # Defaults to $HOME # Change directory self.old_cwd = os.getcwd() # Save for `cd -` os.chdir(dest) return 0 else: return call(str(command), executable=shell, shell=True, env=self.env, cwd=os.getcwd())
def testbin_to_fpath(testbin): if testbin.endswith('-win') or testbin.endswith('-windows'): testbin = testbin + '.exe' tmp = os.path.join('testbins', testbin) if '~' in tmp: tmp = os.expanduser(tmp) tmp = os.path.abspath(tmp) return tmp
def pdf_download(request, filename): return HttpResponse(filename) path = os.expanduser('~/files/pdf/') wrapper = FileWrapper(file(filename,'rb')) response = HttpResponse(wrapper, content_type=mimetypes.guess_type(filename)[0]) response['Content-Length'] = os.path.getsize(filename) response['Content-Disposition'] = "attachment; filename=" + filename return response
def test_prog_to_fpath(prog): if platform.system() == 'Windows': prog = prog + '.exe' tmp = os.path.join('testbins', prog) if '~' in tmp: tmp = os.expanduser(tmp) tmp = os.path.abspath(tmp) return tmp
def addtobdb(smimol,sminame): globs = globalvars() if not globs.custom_path or not os.path.exists(str(globs.custom_path)): print('To add to database, you need to set a custom path. Please enter a writeable file path:') new_path = input('path=') globs.add_custom_path(new_path) copy_to_custom_path() bpath = globs.custom_path + "/Bind/bind.dict" bindcores = readdict(bpath) bind_folder = globs.custom_path + "/Bind/" # check if binding species exists if sminame in bindcores.keys(): emsg = 'Molecule '+sminame+' already existing in binding species database.' return emsg else: # convert to unicode smimol = unicodedata.normalize('NFKD',smimol).encode('ascii','ignore') sminame = unicodedata.normalize('NFKD',sminame).encode('ascii','ignore') if '~' in smimol: smimol = smimol.replace('~',os.expanduser('~')) # convert ligand from smiles/file bind,bsmi,emsg = bind_load(smimol,bindcores) if emsg: return emsg bind.convert2mol3D() # convert to mol3D # new entry for dictionary # create shortname if len(sminame) > 5: shortname = sminame[0:3]+sminame[-2:] else: shortname = sminame if '.mol' in smimol: shutil.copy2(smimol,bind_folder+sminame+'.mol') snew = sminame+':'+sminame+'.mol,'+shortname+',' elif '.xyz' in smimol: shutil.copy2(smimol,bind_folder +sminame+'.xyz') snew = sminame+':'+sminame+'.xyz,'+shortname+',' elif bind.OBmol: # write smiles file in Bind species directory bind.OBmol.write('smi',bind_folder +sminame+'.smi') snew = sminame+':'+sminame+'.smi,'+shortname+',' else: # write xyz file in Bind species directory bind.writexyz(bind_folder +sminame+'.xyz') # write xyz file snew = sminame+':'+sminame+'.xyz,'+shortname+',' # update dictionary f = open(bpath,'r') ss = f.read().splitlines() f.close() f = open(bpath,'w') ss.append(snew) ssort = sorted(ss[1:]) f.write(ss[0]+'\n') for s in ssort: f.write(s+'\n') f.close() return emsg
def addtocdb(smimol,sminame,smicat): emsg = False globs = globalvars() if not globs.custom_path or not os.path.exists(str(globs.custom_path)): print('To add to database, you need to set a custom path. Please enter a writeable file path:') new_path = input('path=') globs.add_custom_path(new_path) copy_to_custom_path() cpath = globs.custom_path + "/Cores/cores.dict" mcores = readdict(cpath) cores_folder = globs.custom_path + "/Cores/" # check if core exists if sminame in mcores.keys(): emsg = 'Core '+sminame+' already existing in core database.' return emsg else: # get connection atoms ccats = filter(None,re.split(' |,|\t',smicat)) cats = [int(a)-1 for a in ccats] if len(cats)==0: cats=[0] cs = [str(a) for a in cats] css = ' '.join(cs) # convert to unicode smimol = unicodedata.normalize('NFKD',smimol).encode('ascii','ignore') if '~' in smimol: smimol = smimol.replace('~',os.expanduser('~')) # convert ligand from smiles/file core,emsg = core_load(smimol,mcores) if emsg: return emsg core.convert2mol3D() # convert to mol3D # write xyz file in Cores directory # new entry for dictionary if '.mol' in smimol: shutil.copy2(smimol,cores_folder+sminame+'.mol') snew = sminame+':'+sminame+'.mol,'+css+','+'1' elif '.xyz' in smimol: shutil.copy2(smimol,cores_folder + sminame+'.xyz') snew = sminame+':'+sminame+'.xyz,'+css+','+'1' else: core.writexyz(cores_folder +sminame+'.xyz') # write xyz file # new entry for dictionary snew = sminame+':'+sminame+'.xyz,'+css+','+'1' # update dictionary f = open(cpath,'r') ss = f.read().splitlines() f.close() f = open(cpath,'w') ss.append(snew) ssort = sorted(ss[1:]) f.write(ss[0]+'\n') for s in ssort: f.write(s+'\n') f.close() return emsg
def desktop_name(): global _desktop_name if not _desktop_name: _desktop_name = getoutput(""" test -f ${XDG_CONFIG_HOME:-~/.config}/user-dirs.dirs && . ${XDG_CONFIG_HOME:-~/.config}/user-dirs.dirs echo -n ${XDG_DESKTOP_DIR:-$HOME/Desktop}""") if (not os.path.isdir(_desktop_name) or not os.access(_desktop_name, os.W_OK)): _desktop_name = os.expanduser("~") return _desktop_name
def _set_base_dir(self): self.base_dir = '' if platform == "win32": self.base_dir = os.path.join(os.getenv('APPDATA'), APP_NAME) else: # Directory of the executed script # SO: https://stackoverflow.com/questions/4934806/how-can-i-find-scripts-directory-with-python # ~/.para/ self.base_dir = os.path.join(os.expanduser("~"), '.para')
def load_stellar_population(self, complist = None): """read all the stellar pops info from Magphys outputs""" mpdir = os.expanduser('~/WRITING/KINGFISH/Uexample/plots/eta/magphys_out/') if complist is None: complist = ['tot','bulge','disk'] self.spectrum = {} for i,c in enumerate(complist): spec = utils.read_mp(galaxy = self.aname, component = c, mpdir = mpdir) lbol = observate.Lbol(spec['wavelength'], spec['f_lambda_int'], wave_max = 1e5) self.spectrum[c] = spec self.spectrum[c]['eta'] = self.get_eta(spec['wavelength'], spec['f_lambda_int']) self.spectrum[c]['log_Lbol'] = np.log10(observate.Lbol(spec['wavelength'], spec['f_lambda_int'],wave_max = 1e5))
class IPlayer(): if 'HOME' in os.environ: HOME_DIR = os.environ['HOME'] elif 'USERPROFILE' in os.environ: HOME_DIR = os.environ['USERPROFILE'] else: HOME_DIR = os.expanduser("~") PROFILE_DIR = os.path.join(HOME_DIR, ".get_iplayer") TV_CACHE_FILE = os.path.join(PROFILE_DIR, "tv.cache") ITV_CACHE_FILE = os.path.join(PROFILE_DIR, "itv.cache") CH4_CACHE_FILE = os.path.join(PROFILE_DIR, "ch4.cache") FIVE_CACHE_FILE = os.path.join(PROFILE_DIR, "five.cache") OPTIONS_FILE = os.path.join(PROFILE_DIR, ".guiplayer") download_dir = "/home/pete/Movies" def __init__(self, log, progress): self.log = log self.programmes = {} progress.Pulse("Downloading BBC Programme List...") self._refresh_cache("tv", progress) self._parse_cache(self.TV_CACHE_FILE) progress.Pulse("Downloading ITV Programme List...") self._refresh_cache("itv", progress) self._parse_cache(self.ITV_CACHE_FILE) progress.Pulse("Downloading Channel 4 Programme List...") self._refresh_cache("ch4", progress) self._parse_cache(self.CH4_CACHE_FILE) progress.Pulse("Downloading Five Programme List...") self._refresh_cache("five", progress) self._parse_cache(self.FIVE_CACHE_FILE) self.ignored_programmes = set() self.downloaded_episodes = set() self.ignored_episodes = set() self.log.write("Reading options file...") try: file = open(self.OPTIONS_FILE, "r") self.ignored_programmes = pickle.load(file) self.downloaded_episodes = pickle.load(file) self.ignored_episodes = pickle.load(file) except Exception, inst: self.log.write("Error reading options file: %s" % str(inst)) pass
def main(argv=sys.argv): suspendcmd = ' '.join([ 'bash', pipes.quote(os.path.join(os.path.dirname(__file__), 'scrub.bash')), 'suspend' ]) # if on battery or explicitly suspending if is_on_battery(): print 'On battery' suspend_running_bitrot() return if len(argv) > 1: print 'Suspending per explicit request (>0 args)' suspend_running_bitrot() return running_bitrot = get_running_bitrot() if running_bitrot: pync.Notifier.notify( 'Resuming bitrot scubber. Click to suspend for 4h.', execute=suspendcmd) running_bitrot.resume() # in case it was suspended return # if scrub ran too recently (run according to schedule, e.g. every 1h): timestamp_path = os.expanduser('~/.maintenance-timestamp') with open(timestamp_path) as f: last_start_time = int(f.read()) if time.time() - last_start_time < 30 * 24 * 60 * 60: print 'Last start was less than a month ago (%s)' % \ dt.datetime.fromtimestamp(last_start_time) return # run scrub pync.Notifier.notify('Started bitrot scrubber. Click to suspend for 4h.', execute=suspendcmd) p = psutil.Process(os.getpid()) p.set_nice(20) p.set_ionice(psutil.IOPRIO_CLASS_IDLE, 7) with open(timestamp_path, 'w') as f: f.write(time.time()) execl('bitrot')
def __init__(self, user=None, passwd=None, conf=None, write=False): self.config = ConfigParser() if conf is None and user is None and passwd is None: self.conf_file = os.join(os.expanduser("~"), ".mintconfig.ini") self.user = input("Please enter you Mint.com username: "******"Please enter you Mint.com username: "******"{} not found".format(conf)) self.user = input("Please enter you Mint.com username: "******"password" not in keys: self.password = self.confirm_pass() self.config['DEFAULT']['password'] = self.password self.password = self.config['DEFAULT']['password'] self.user = self.config['DEFAULT']['user']
def get_file(source, target): print "Getting %s from %s" % (source, target) source = os.expanduser(source) if source.startswith('~') else source if source.startswith('http'): res = requests.get(source) if res.status_code == 200: with open(target, 'wb') as target: for chunk in res: target.write(chunk) return errmsg = "Cannot download %s from %s" % (os.path.basename(source), source) raise pyduin.arduino.ArduinoConfigError(errmsg) else: if not os.path.isfile(source): errmsg = "Source file %s does not exist" raise pyduin.arduino.ArduinoConfigError(errmsg) elif not os.path.isdir(os.path.dirname(target)): errmsg = "Target dir %s does not exist" raise pyduin.arduino.ArduinoConfigError(errmsg) copyfile(source, target)
def main(argv=sys.argv): suspendcmd = ' '.join(['bash', pipes.quote(os.path.join(os.path.dirname(__file__), 'scrub.bash')), 'suspend']) # if on battery or explicitly suspending if is_on_battery(): print 'On battery' suspend_running_bitrot() return if len(argv) > 1: print 'Suspending per explicit request (>0 args)' suspend_running_bitrot() return running_bitrot = get_running_bitrot() if running_bitrot: pync.Notifier.notify('Resuming bitrot scubber. Click to suspend for 4h.', execute=suspendcmd) running_bitrot.resume() # in case it was suspended return # if scrub ran too recently (run according to schedule, e.g. every 1h): timestamp_path = os.expanduser('~/.maintenance-timestamp') with open(timestamp_path) as f: last_start_time = int(f.read()) if time.time() - last_start_time < 30 * 24 * 60 * 60: print 'Last start was less than a month ago (%s)' % \ dt.datetime.fromtimestamp(last_start_time) return # run scrub pync.Notifier.notify('Started bitrot scrubber. Click to suspend for 4h.', execute=suspendcmd) p = psutil.Process(os.getpid()) p.set_nice(20) p.set_ionice(psutil.IOPRIO_CLASS_IDLE, 7) with open(timestamp_path, 'w') as f: f.write(time.time()) execl('bitrot')
def execute(self, command, shell="/bin/bash", verbose=False): """Execute the given string command and return the retcode.""" if verbose: print >>sys.stderr, ">> %s" % command # Trap calls to `cd` if command.strip().startswith("cd"): try: dest = command.strip().split()[1] if dest == "-": dest = self.old_cwd except IndexError: dest = os.expanduser("~") # Defaults to $HOME # Change directory self.old_cwd = os.getcwd() # Save for `cd -` os.chdir(dest) return 0 else: return call(str(command), executable=shell, shell=True, env=self.env, cwd=os.getcwd())
def __record(self, mode, name=None, path=None, timeout=0, **kwargs): if mode not in [PiCam.PHOTOCMD, PiCam.VIDEOCMD]: raise Exception("unknown mode: '%s'" % mode) if not name: name = "img_%s.jpg" % random.randint(1, 1000) target = name if path: path = os.expandvars(os.expanduser(path)) target = os.path.join(path, name) parts = [mode, "-o %s -t %s" % (target, timeout)] indicator = "-" if self.shortargs else "--" for key, value in kwargs.items(): if isinstance(value, bool): if value: parts.append("%s%s" % (indicator, key)) else: parts.append("%s%s %s" % (indicator, key, value)) cmd = " ".join(parts) os.system(cmd)
def get_config_or_prompt(repo, section, name, prompt, save=None): config = repo.repo.get_config_stack() try: value = config.get(section, name) except KeyError: value = input(prompt).encode() if save == None: reply = input('Save this setting? [y/n]') save = reply == 'y' if save: reply = input('Save globally (~/.gitconfig) for all repos? [y/n]') saveglobal = reply == 'y' if saveglobal: globalcfg = config.default_backends() if not globalcfg: open(os.expanduser('~/.gitconfig','w')).close() #create file globalcfg = config.default_backends()[0] globalcfg.set(section,name,value) globalcfg.write_to_path() else: config.set(section, name, value) config.writable.write_to_path() return value
def __init__(self, root_directory=None, project_name=None): super(SettingsFeature, self).__init__() self.selectable = True self.keylist = [ KeyDefinition('<cr>', SettingsFeature.SETTINGS_SELECT, False, self.handleSelectItem, "Select an Item."), KeyDefinition('u', SettingsFeature.SETTINGS_MOVE_TO_USER, False, self.handleMoveToUser, "Move config item to user config."), KeyDefinition('p', SettingsFeature.SETTINGS_MOVE_TO_PROJECT, False, self.handleMoveToProject, "Move config item to project config.") ] self.title = "Settings" self.project_name = project_name self.user_config = None self.project_config = None self.use_project_config = None self.using_default_user_config = True self.using_default_project_config = True if root_directory is None: self.root_directory = None elif root_directory[0] == '~': self.root_directory = os.expanduser(root_directory) else: self.root_directory = os.path.abspath(root_directory) self.settings = SettingsNode('general', 'SettingsFeature') self.menu_created = False self.is_new = False
# BatMan - Battery Manager for your Linux System # @author Vivek Shah import psutil import time from os import expanduser home_path = expanduser("~") log_file = home_path + "Documents/BatMan/battery_log.log" with open(log_file, "a") as logfile: try: battery_status = psutil.sensors_battery() percentage = battery_status.percent secondsleft = battery_status.secsleft power_plugged = battery_status.power_plugged timestamp = time.localtime() s = time.strftime("%D %H:%M:%S", timestamp) + "," + str(percentage) + "," + str( secondsleft) + "," + str(power_plugged) + "\n" logfile.write(s) except Exception as exp: print(exp) exit() ## This code takes less than 0.1 second to run. ## So it will take less than 0.1 second of CPU time in One minute(3600 seconds) ## Stores the battery logs in battery_log.log file
def detect_credentials(config_name, extra_environ=None, filenames=None, aws_profile_name=None, default_value=Ellipsis): ''' detect_credentials(config_name) attempts to locate Amazon S3 Bucket credentials from the given configuration item config_name. The following optional arguments are accepted: * extra_environ (default: None) may specify a string or a tuple (key_name, secret_name) or a list of strings or tuples; strings are treated as an additional environment variable that should be checked for credentials while tuples are treated as paired varialbes: if both are defined, then they are checked as separate holders of a key/secret pair. Note that a list of strings is considered a pair of solo environment varialbes while a tuple of strings is considered a single (key_name, secret_name) pair. * filenames (default: None) may specify a list of filenames that are checked in order for credentials. * aws_profile_name (default: None) may specify a profile name that appears in the ~/.aws/credentials file that will be checked for aws_access_key_id and aws_secret_access_key values. The files ~/.amazon/credentials and ~/.credentials are also checked. Note that this may be a list of profiles to check. * default_value (default: Ellipsis) may specify a value to return when no credentials are found; if this value is None, then it is always returned; otherwise, the value is passed through to_credentials() and any errors are allowed to propagate out of detect_credentials(). If default_value is Ellipsis then an error is simply raised stating that no credentials could be found. The detect_credentials() function looks at the following locations in the following order, assuming that it has been provided with the relevant information: * first, if the Neuropythy configuration variable config_name is set via either the npythyrc file or the associated environment variable, then it is coerced into credentials; * next, if the environment contains both the variables key_name and secret_name (from the optional argument key_secret_environ), then these values are used; * next, if the filenames argument is given, then all files it refers to are checked for credentials; these files are expanded with both os.expanduser and os.expandvars. * finally, if no credentials were detected, an error is raised. ''' # Check the config first: if config_name is not None and config[config_name] is not None: return config[config_name] # Okay, not found there; check the key/secret environment variables if extra_environ is None: extra_environ = [] elif pimms.is_str(extra_environ): extra_environ = [extra_environ] elif pimms.is_vector(extra_environ): if pimms.is_vector(extra_environ, str): if len(extra_environ) == 2 and isinstance(extra_environ, _tuple_type): extra_environ = [extra_environ] elif not pimms.is_matrix(extra_environ, str): raise ValueError('extra_environ must be a string, tuple of strings, or list of these') for ee in extra_environ: if pimms.is_str(ee): if ee in os.environ: try: return to_credentials(q) except: pass elif pimms.is_vector(ee, str) and len(ee) == 2: if ee[0] in os.environ and ee[1] in os.environ: (k,s) = [os.environ[q] for q in ee] if len(k) > 0 and len(s) > 0: continue return (k,s) else: raise ValueError('cannot interpret extra_environ argument: %s' % ee) # Okay, next we check the filenames if filenames is None: filenames = [] elif pimms.is_str(filenames): filenames = [filenames] for flnm in filenames: flnm = os.expanduser(os.expandvars(flnm)) if os.path.isfile(flnm): try: return to_credentials(flnm) except: pass # okay... let's check the AWS credentials file, if it exists if pimms.is_str(aws_profile_name): aws_profile_name = [aws_profile_name] elif aws_profile_name is None or len(aws_profile_name) == 0: aws_profile_name = None elif not pimms.is_vector(aws_profile_name, str): raise ValueError('Invalid aws_profile_name value: %s' % aws_profile_name) if aws_profile_name is not None: try: cc = confparse.ConfigParser() cc.read([os.expanduser(os.path.join('~', '.aws', 'credentials')), os.expanduser(os.path.join('~', '.amazon', 'credentials')), os.expanduser(os.path.join('~', '.credentials'))]) for awsprof in aws_profile_names: try: aws_access_key_id = cc.get(awsprof, 'aws_access_key_id') aws_secret_access_key = cc.get(awsprof, 'aws_secret_access_key') return (aws_access_key_id, aws_secret_access_key) except: pass except: pass # no match! if default_value is None: return None elif default_value is Ellipsis: if config_name is None: raise ValueError('No valid credentials were detected') else: raise ValueError('No valid credentials (%s) were detected' % config_name) else: return to_credentials(default_value)
<a>${POSTID}</a> </span> <a href="#" class="postMenuBtn" title="Post menu" data-cmd="post-menu">▶</a> </div> ${IMAGE} <blockquote class="postMessage" id="m${POSTID}">${CONTENT}</blockquote> </div> </div> """.decode( "utf-8" ) ) # Did we specify a JSON file? if len(sys.argv) > 1 and sys.argv[1][-4:] == "json": JSON_LOC = os.expanduser("%s/%s" % (os.getcwd(), sys.argv[1])) if not os.exists(JSON_LOC): print "JSON file %s not found at %s" % (sys.argv[1], JSON_LOC) sys.exit(1) else: # Find the json file in the current directory files = [f for f in os.listdir(os.getcwd()) if f[-4:] == "json" and f[-11:] != "-fetch.json"] if len(files) == 1: JSON_LOC = "%s/%s" % (os.getcwd(), files[0]) else: print "Multiple JSON files were found: %s" % (files) sys.exit(1) print "Reading from %s" % JSON_LOC HTML_FILE = codecs.open("%s/out.html" % os.path.dirname(JSON_LOC), "w", encoding="utf-8")
def addtobdb(smimol, sminame): globs = globalvars() if not globs.custom_path or not os.path.exists(str(globs.custom_path)): print( 'To add to database, you need to set a custom path. Please enter a writeable file path:' ) new_path = eval(input('path=')) globs.add_custom_path(new_path) copy_to_custom_path() bpath = globs.custom_path + "/Bind/bind.dict" bindcores = readdict(bpath) bind_folder = globs.custom_path + "/Bind/" # check if binding species exists if sminame in list(bindcores.keys()): emsg = 'Molecule ' + sminame + ' already existing in binding species database.' return emsg else: # convert to unicode smimol = unicodedata.normalize('NFKD', smimol).encode('ascii', 'ignore').decode() sminame = unicodedata.normalize('NFKD', sminame).encode('ascii', 'ignore').decode() if '~' in smimol: smimol = smimol.replace('~', os.expanduser('~')) # convert ligand from smiles/file bind, bsmi, emsg = bind_load(smimol, bindcores) if emsg: return emsg bind.convert2mol3D() # convert to mol3D # new entry for dictionary # create shortname if len(sminame) > 5: shortname = sminame[0:3] + sminame[-2:] else: shortname = sminame if '.mol' in smimol: shutil.copy2(smimol, bind_folder + sminame + '.mol') snew = sminame + ':' + sminame + '.mol,' + shortname + ',' elif '.xyz' in smimol: shutil.copy2(smimol, bind_folder + sminame + '.xyz') snew = sminame + ':' + sminame + '.xyz,' + shortname + ',' elif bind.OBmol: # write smiles file in Bind species directory bind.OBmol.write('smi', bind_folder + sminame + '.smi') snew = sminame + ':' + sminame + '.smi,' + shortname + ',' else: # write xyz file in Bind species directory bind.writexyz(bind_folder + sminame + '.xyz') # write xyz file snew = sminame + ':' + sminame + '.xyz,' + shortname + ',' # update dictionary f = open(bpath, 'r') ss = f.read().splitlines() f.close() f = open(bpath, 'w') ss.append(snew) ssort = sorted(ss[1:]) f.write(ss[0] + '\n') for s in ssort: f.write(s + '\n') f.close() return emsg
signal.signal(signal.SIGINT, handler_sigint) if platform.system() == 'Windows': adjust_ctrl_c() adapter = None if not sys.argv[1:]: raise Exception('specify target on command line') arg1 = sys.argv[1] # does it look like <server>:<port> ? if re.match(r'^.*:\d+$', arg1): (host, port) = arg1.split(':') adapter = gdblike.connect_sense(host, int(port)) # otherwise treat as a path else: if '~' in arg1: arg1 = os.expanduser(arg1) arg1 = os.path.abspath(arg1) if not os.path.exists(arg1): raise Exception('file not found: %s' % arg1) adapter = DebugAdapter.get_adapter_for_current_system() adapter.setup() target = arg1 target_args = [''] if '--terminal' in sys.argv[2:]: adapter.exec(arg1, target_args, terminal=True) else: adapter.exec(arg1, target_args) arch = adapter.target_arch()
def launch_instance(ami='ami-7341831a', instance_type='t1.micro', key_name='paws', key_extension='.pem', key_dir='~/.ssh', group_name='paws', ssh_port=22, cidr='0.0.0.0/0', tag='paws', user_data=None, cmd_shell=True, login_user='******', ssh_passwd=None): """ Launch an instance and wait for it to start running. Returns a tuple consisting of the Instance object and the CmdShell object, if request, or None. ami The ID of the Amazon Machine Image that this instance will be based on. Default is a 64-bit Amazon Linux EBS image. instance_type The type of the instance. key_name The name of the SSH Key used for logging into the instance. It will be created if it does not exist. key_extension The file extension for SSH private key files. key_dir The path to the directory containing SSH private keys. This is usually ~/.ssh. group_name The name of the security group used to control access to the instance. It will be created if it does not exist. ssh_port The port number you want to use for SSH access (default 22). cidr The CIDR block used to limit access to your instance. tag A name that will be used to tag the instance so we can easily find it later. user_data Data that will be passed to the newly started instance at launch and will be accessible via the metadata service running at http://169.254.169.254. cmd_shell If true, a boto CmdShell object will be created and returned. This allows programmatic SSH access to the new instance. login_user The user name used when SSH'ing into new instance. The default is 'ec2-user' ssh_passwd The password for your SSH key if it is encrypted with a passphrase. """ cmd = None # Create a connection to EC2 service. # You can pass credentials in to the connect_ec2 method explicitly # or you can use the default credentials in your ~/.boto config file # as we are doing here. ec2 = boto.connect_ec2() # Check to see if specified keypair already exists. # If we get an InvalidKeyPair.NotFound error back from EC2, # it means that it doesn't exist and we need to create it. try: key = ec2.get_all_key_pairs(keynames=[key_name])[0] except ec2.ResponseError, e: if e.code == 'InvalidKeyPair.NotFound': print 'Creating keypair: %s' % key_name # Create an SSH key to use when logging into instances. key = ec2.create_key_pair(key_name) # Make sure the specified key_dir actually exists. # If not, create it. key_dir = os.expanduser(key_dir) key_dir = os.expandvars(key_dir) if not os.path.isdir(key_dir): os.mkdir(key_dir, 0700) # AWS will store the public key but the private key is # generated and returned and needs to be stored locally. # The save method will also chmod the file to protect # your private key. key.save(key_dir) else: raise
adjust_ctrl_c() (host, port) = tok.split(':') adapter = gdblike.connect_sense(host, int(port)) # otherwise treat as a path else: adjust_ctrl_c() terminal = False if tok=='--terminal': terminal = True tok = args.pop() # determine target path fpath = tok if '~' in tok: fpath = os.expanduser(fpath) fpath = os.path.abspath(fpath) if not os.path.exists(fpath): raise Exception('file not found: %s' % fpath) adapter = DebugAdapter.get_adapter_for_current_system() # remaining debugger args become target args target_args = list(reversed(args)) print(target_args) if terminal: adapter.exec(fpath, target_args, terminal=True) else: adapter.exec(fpath, target_args) arch = adapter.target_arch()
#!/usr/bin/python import time, sys, os, email now = time.time() # get archive of previously-seen message-ids and times kde_dir = os.expanduser('~/.kde') if not os.path.isdir(kde_dir): os.mkdir(kde_dir) arfile = os.path.join(kde_dir, 'duplicate_mails') duplicates = {} try: archive = open(arfile) except IOError: pass else: for line in archive: when, msgid = line[:-1].split(' ', 1) duplicates[msgid] = float(when) archive.close() redo_archive = False # suck message in from stdin and study it msg = email.message_from_file(sys.stdin) msgid = msg['Message-ID'] if msgid: if msgid in duplicates: # duplicate message: alter its subject subject = msg['Subject'] if subject is None: msg['Subject'] = '**** DUP **** ' + msgid else: del msg['Subject'] msg['Subject'] = '**** DUP **** ' + subject
def addtoldb(smimol,sminame,smident,smicat,smigrps,smictg,ffopt): emsg = False globs = globalvars() if not globs.custom_path or not os.path.exists(str(globs.custom_path)): print('To add to database, you need to set a custom path. Please enter a writeable file path:') new_path = input('path=') globs.add_custom_path(new_path) copy_to_custom_path() lipath = globs.custom_path + "/Ligands/ligands.dict" licores = readdict(lipath) ligands_folder = globs.custom_path + "/Ligands/" print("ligands_folder is : " + str(ligands_folder)) # check if ligand exists if sminame in licores.keys(): emsg = 'Ligand '+sminame+' already existing in ligands database.' emsg += ' To replace, delete the existing entry first.' return emsg else: # get connection atoms ccats = filter(None,re.split(' |,|\t',smicat)) # get groups groups = filter(None,re.split(' |,|\t',smigrps)) grp = 'all '+' '.join(groups) grp += ' '+smictg if smicat=='': cats = range(0,int(smident)) else: cats = [int(a)-1 for a in ccats] cs = [str(a) for a in cats] css = ' '.join(cs) # convert to unicode smimol = unicodedata.normalize('NFKD',smimol).encode('ascii','ignore') sminame = unicodedata.normalize('NFKD',sminame).encode('ascii','ignore') if '~' in smimol: smimol = smimol.replace('~',os.expanduser('~')) # convert ligand from smiles/file lig,emsg = lig_load(smimol,licores) if emsg: return emsg lig.convert2mol3D() # convert to mol3D shortname = sminame print("smimol is "+str(smimol)) print("sminame is "+str(sminame)) # sanitize ff options: if not ffopt in ["A","B","BA"]: print('warning: incompatible ffopt choice. Options are ' + str(["A","B","BA","N"])) sys.exit(1) # new entry for dictionary if '.mol' in smimol: shutil.copy2(smimol,ligands_folder + sminame+'.mol') snew = sminame+':'+sminame+'.mol,'+shortname+','+css+','+grp+','+ffopt elif '.xyz' in smimol: shutil.copy2(smimol,ligands_folder + sminame+'.xyz') snew = sminame+':'+sminame+'.xyz,'+shortname+','+css+','+grp+','+ffopt elif lig.OBMol: # write smiles file in Ligands directory obConversion = openbabel.OBConversion() obConversion.SetOutFormat("smi") red = obConversion.Read(lig.OBMol) obConversion.WriteFile(lig.OBMol,ligands_folder + sminame+'.smi') #lig.OBMol.write('smi',ligands_folder + sminame+'.smi') snew = sminame+':'+sminame+'.smi,'+shortname+','+css+','+grp+','+ffopt else: # write xyz file in Ligands directory lig.writexyz(ligands_folder+sminame+'.xyz') # write xyz file snew = sminame+':'+sminame+'.xyz,'+shortname+','+css+','+grp+','+ffopt # update dictionary f = open(lipath,'r') ss = f.read().splitlines() f.close() f = open(lipath,'w') ss.append(snew) ssort = sorted(ss[1:]) f.write(ss[0]+'\n') for s in ssort: f.write(s+'\n') f.close() return emsg
# UPDATED BY: # Brett Buzzanga, 10.15.2016 # SWMM compiled as SOL (unix-like) # to get infiltration, evaporation, head, and theta (soil moisture) # to set groundwater head and moisture content """ # ------------------------- MODULES --------------------------- from ctypes import c_double, CDLL, c_float, pointer # Required to handle with SO variables from time import time # Required to get computational times. import os from os import remove # Required to clear info file. import math, re # Used to create .rpt and .out paths # ------------------------ CONSTANTS --------------------------- # path to SO SO_loc = os.path.join(os.expanduser('~'), 'Software_Thesis', 'SWMM', 'swmm_so', 'source5_1_011', 'swmm5') ### identical to engine #SO_loc = os.path.join(os.expanduser('~')'Software_Thesis', 'SWMM', 'swmm_so', # 'source_orig', 'swmm5') # Types of objects JUNCTION = 0 SUBCATCH = 1 NODE = 2 LINK = 3 STORAGE = 4 ORIFICE = 414 OUTFALL = 417
def get_dbpath(app): pth = app.conf.get('dbpath', '~/.local/db/tui') pth = os.expanduser(pth) if not os.path.exists(pth): os.makedirs(pth)
def get_default_path (): import platform if platform.system() == 'Windows': if platform.version()[0] < 6: return os.path.join(os.expanduser('~'), 'My Documents') # Windows XP return os.path.join(os.path.expanduser('~'), 'Documents')
# TODO NEXT # * write EQ5 scenario # * compile to single web site or exe CHEAT_MODE = True HOST = 'http://127.0.0.1' PORT = 2125 PREFIX = 'v1/' COOKIE_KEY = 'fb2_eq5' DIRECTIONS = {'n': 'north', 's': 'south', 'e':r'east', 'w': 'west'} _RE_TAGS = re.compile(r'\$(?P<cmd>\w*)(/(?P<thing>\w*)(/(?P<key>\w*)(/(?P<arg>[^$]*))?)?)?\$') SAVE_DIR = os.expanduser('~/.false_prophet') def _decode_context(raw): if not raw: return raw return json.loads( base64.b64decode(raw).decode('utf-8')) class ReplyError(Exception): pass def _get(session, endpoint): url = '{}:{}/{}{}'.format(HOST, PORT, PREFIX, endpoint) resp = session.get(url) return resp.text
#!/usr/bin/env python import sys import getpass import subprocess import os def exit_error(error): print >> sys.stderr, error exit(1) iniFile = os.expanduser(path.join("~", ".get-shit-done.ini")) restartNetworkingCommand = ["/etc/init.d/networking", "restart"] hostsFile = '/etc/hosts' startToken = '## start-gsd' endToken = '## end-gsd' siteList = [ 'reddit.com', 'forums.somethingawful.com', 'somethingawful.com', 'digg.com', 'break.com', 'news.ycombinator.com', 'infoq.com', 'bebo.com', 'twitter.com', 'facebook.com', 'blip.com', 'youtube.com', 'vimeo.com', 'delicious.com', 'flickr.com', 'friendster.com', 'hi5.com', 'linkedin.com', 'livejournal.com', 'meetup.com', 'myspace.com', 'plurk.com', 'stickam.com', 'stumbleupon.com', 'yelp.com', 'slashdot.com' ] if os.path.exists(iniFile): iniF = open(iniFile) try: for line in iniF:
import os PIPE_DATA_DIR = os.expanduser("~/dev/data/pipe") os.makedirs(PIPE_DATA_DIR, exist_ok=True) class PipeWorker: """ Base class for pipe worker, responsible for setting up data directory TODO * connecting pipelines """ def __init__(self, name): self.data_dir = os.path.join(PIPE_DATA_DIR, name) os.makedirs(self.data_dir, exist_ok=True)
#!/usr/bin/env python import sys import getpass import subprocess import os def exit_error(error): print >> sys.stderr, error exit(1) iniFile = os.expanduser(path.join("~", ".get-shit-done.ini")) restartNetworkingCommand = ["/etc/init.d/networking", "restart"] hostsFile = '/etc/hosts' startToken = '## start-gsd' endToken = '## end-gsd' siteList = ['reddit.com', 'forums.somethingawful.com', 'somethingawful.com', 'digg.com', 'break.com', 'news.ycombinator.com', 'infoq.com', 'bebo.com', 'twitter.com', 'facebook.com', 'blip.com', 'youtube.com', 'vimeo.com', 'delicious.com', 'flickr.com', 'friendster.com', 'hi5.com', 'linkedin.com', 'livejournal.com', 'meetup.com', 'myspace.com', 'plurk.com', 'stickam.com', 'stumbleupon.com', 'yelp.com', 'slashdot.com'] if os.path.exists(iniFile): iniF = open(iniFile) try: for line in iniF: key, value = map(str.strip, line.split("=", 1)) if key == "sites": siteList = [value]
action='store_const', const='ul', dest='origin', help="use upper left-hand corner as grid origin [default]") parser.add_argument('--ll', action='store_const', const='ll', dest='origin', help="use lower left-hand corner as grid origin") parser.add_argument('-i', '--indexes', action='store_true', default=False, dest='show_indexes', help='show grid cell indexes instead of coordinates') parser.add_argument('ncols', type=int, help="number of columns in the grid (i.e., the x-dimension)") parser.add_argument('nrows', type=int, help="number of rows in the grid (i.e., the y-dimension)") args = parser.parse_args() if args.output_file is None: out = sys.stdout else: out = open(os.expanduser(os.expandpaths(args.output_file))) out.write(grid_coords(args.ncols, args.nrows, args.origin=='ul', as_indexes=args.show_indexes))
fslist = [efs2(path) for path in pathlist] return decode_from_filesystem(os.path.commonprefix(fslist)) def expanduser(path): user_path = os.path.expanduser(path) print "expanduser: user_path=", user_path print "str?", isinstance(user_path,str) print "unicode?", isinstance(user_path,unicode) return decode_from_filesystem(user_path) if __name__ == "__main__": # unit test. n_tests = n_tests_passed = 0 n_tests += 1 if expanduser(u"~") == os.expanduser("~"): n_tests_passed += 1 else: print "FAIL!! expanduser returned %s when expected %s" % ( expanduser(u"~"), os.expanduser("~")) os.mkdir("foo") n_tests += 1 if exists(u"foo" ): n_tests_passed += 1 else: print "FAIL!! exists didn't find 'foo'." n_tests += 1 if isdir(u"foo"): n_tests_passed += 1
def get_cwd(): filename = sys.argv[0] return os.dirname(os.abspath(os.expanduser(filename)))
def __init__(self, path=None, shortargs=False): if path: path = os.expandvars(os.expanduser(path)) os.chdir(path) self.path = path self.shortargs = shortargs
import csv import os base_data_dir = os.expanduser("~") + "/grasp_data_processed" similar_description_dir = base_data_dir + "/grasp_similarities" joint_data_dir = base_data_dir + "/grasp_joints" contact_data_dir = base_data_dir + "/grasp_contacts" output_dir = base_data_dir + "/results" if __name__ == "__main__": print "Stat mesher online." obj_num = int(raw_input("Obj num: ")) sub_num = int(raw_input("Sub num: ")) grasp_set_id = int(raw_input("Similar Grasp Set ID: ")) similar_path = similar_description_dir + "/" + "obj" + str(obj_num) + "_sub" + str(sub_num) + "_obj5_similar_grasp_" + str(grasp_set_id) + ".csv" out_path = output_dir + "/" + "obj" + str(obj_num) + "_sub" + str(sub_num) + "_master" + str(grasp_set_id) out_file = open(out_path, "w") out_csv = csv.writer with open(similar_path, 'r') as similar_file: similar_csv = csv.reader(similar_file, delimiter=",")
<span class="dateTime" data-utc="${TIMESTAMP}" title="Timezone: UTC-5">${DATE}</span> <span class="postNum desktop"> <a href="#p${POSTID}" title="Link to this post">No.</a> <a>${POSTID}</a> </span> <a href="#" class="postMenuBtn" title="Post menu" data-cmd="post-menu">▶</a> </div> ${IMAGE} <blockquote class="postMessage" id="m${POSTID}">${CONTENT}</blockquote> </div> </div> """.decode("utf-8")) # Did we specify a JSON file? if len(sys.argv) > 1 and sys.argv[1][-4:] == "json": JSON_LOC = os.expanduser("%s/%s" % (os.getcwd(), sys.argv[1])) if not os.exists(JSON_LOC): print "JSON file %s not found at %s" % (sys.argv[1], JSON_LOC) sys.exit(1) else: # Find the json file in the current directory files = [ f for f in os.listdir(os.getcwd()) if f[-4:] == "json" and f[-11:] != "-fetch.json" ] if len(files) == 1: JSON_LOC = "%s/%s" % (os.getcwd(), files[0]) else: print "Multiple JSON files were found: %s" % (files) sys.exit(1) print "Reading from %s" % JSON_LOC
def addtoldb(smimol, sminame, smident, smicat, smigrps, smictg, ffopt): emsg = False globs = globalvars() if not globs.custom_path or not os.path.exists(str(globs.custom_path)): print( 'To add to database, you need to set a custom path. Please enter a writeable file path:' ) new_path = eval(input('path=')) globs.add_custom_path(new_path) copy_to_custom_path() lipath = globs.custom_path + "/Ligands/ligands.dict" licores = readdict(lipath) ligands_folder = globs.custom_path + "/Ligands/" print(("ligands_folder is : " + str(ligands_folder))) # check if ligand exists if sminame in list(licores.keys()): emsg = 'Ligand ' + sminame + ' already existing in ligands database.' emsg += ' To replace, delete the existing entry first.' return emsg else: # get connection atoms ccats = [_f for _f in re.split(' |,|\t', smicat) if _f] # get groups groups = [_f for _f in re.split(' |,|\t', smigrps) if _f] grp = 'build ' + ' '.join(groups) grp += ' ' + smictg if smicat == '': cats = list(range(0, int(smident))) else: cats = [int(a) - 1 for a in ccats] cs = [str(a) for a in cats] css = ' '.join(cs) # convert to unicode smimol = unicodedata.normalize('NFKD', str(smimol)).encode('ascii', 'ignore').decode() sminame = unicodedata.normalize('NFKD', str(sminame)).encode( 'ascii', 'ignore').decode() if '~' in smimol: smimol = smimol.replace('~', os.expanduser('~')) # convert ligand from smiles/file lig, emsg = lig_load(smimol, licores) if emsg: return emsg lig.convert2mol3D() # convert to mol3D shortname = sminame print(("smimol is " + str(smimol))) print(("sminame is " + str(sminame))) # sanitize ff options: if not ffopt in ["A", "B", "BA"]: print(('warning: incompatible ffopt choice. Options are ' + str(["A", "B", "BA", "N"]))) sys.exit(1) # new entry for dictionary if '.mol' in smimol: shutil.copy2(smimol, ligands_folder + sminame + '.mol') snew = str(sminame) + ':' + str(sminame) + '.mol,' + str( shortname) + ',' + str(css) + ',' + str(grp) + ',' + str( ffopt) + ',' + str(lig.charge) elif '.xyz' in smimol: shutil.copy2(smimol, ligands_folder + sminame + '.xyz') snew = str(sminame) + ':' + str(sminame) + '.xyz,' + str( shortname) + ',' + str(css) + ',' + str(grp) + ',' + str( ffopt) + ',' + str(lig.charge) elif lig.OBMol: # write smiles file in Ligands directory obConversion = openbabel.OBConversion() obConversion.SetOutFormat("smi") red = obConversion.Read(lig.OBMol) obConversion.WriteFile(lig.OBMol, ligands_folder + sminame + '.smi') #lig.OBMol.write('smi',ligands_folder + sminame+'.smi') snew = str(sminame) + ':' + str(sminame) + '.smi,' + str( shortname) + ',' + str(css) + ',' + str(grp) + ',' + str( ffopt) + ',' + str(lig.charge) else: # write xyz file in Ligands directory lig.writexyz(ligands_folder + sminame + '.xyz') # write xyz file snew = str(sminame) + ':' + str(sminame) + '.xyz,' + str( shortname) + ',' + str(css) + ',' + str(grp) + ',' + str( ffopt) + ',' + str(lig.charge) # update dictionary f = open(lipath, 'r') ss = f.read().splitlines() f.close() f = open(lipath, 'w') ss.append(snew) ssort = sorted(ss[1:]) f.write(ss[0] + '\n') for s in ssort: f.write(s + '\n') f.close() return emsg
import os import urllib2 import pydelicious from StringIO import StringIO DLCS_CACHE = os.expanduser('~/.dlcs-cache/') class CachedResponse(StringIO): def info(self): return self.headers def geturl(self): return self.url class CachedHandler(urllib2.BaseHandler): def __init__(self, cachedir): self.cachedir = cachedir def default_open(self, request): if request.get_method() != 'GET': return None url = quote(request.get_full_url(), '') path = os.path.join(self.cachedir, url) if os.path.exists(path): f = open(path) data = email.message_from_file(f) if data.get('x-cache-md5') is None: