def parseblocks(blocklycode): # this is where blockly code is parsed into a python file with the command list # saved in memory for transimission. global webotcommands panya = Panya() t = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") savedir = os.path.join(sdir, g.user.nickname, "sketches") if not os.path.exists(savedir): os.mkdir(savedir) filename = os.path.join(savedir, t + ".py") print "Saving python code to ", filename target = open(filename, "w") target.write(blocklycode) target.close() # We now compile the generated python strings in the blocklycode # into bytecode and execute the resultant .pyc through the exec function # in our current namespace (I can't figure out a better way to have the # webot class instance variables accessible) # Read about caveats here - http://lucumr.pocoo.org/2011/2/1/exec-in-python/ compiledcode = compile(blocklycode, "<string>", "exec") exec compiledcode print webotcommands sessionresponse = portsetup(webotcommands) webotcommands = "" return sessionresponse
def run(self): print('generating usage docs') # allows us to build docs without the C modules fully loaded during help generation from borg.archiver import Archiver parser = Archiver().build_parser(prog='borg') choices = {} for action in parser._actions: if action.choices is not None: choices.update(action.choices) print('found commands: %s' % list(choices.keys())) if not os.path.exists('docs/usage'): os.mkdir('docs/usage') for command, parser in choices.items(): print('generating help for %s' % command) with open('docs/usage/%s.rst.inc' % command, 'w') as doc: if command == 'help': for topic in Archiver.helptext: params = {"topic": topic, "underline": '~' * len('borg help ' + topic)} doc.write(".. _borg_{topic}:\n\n".format(**params)) doc.write("borg help {topic}\n{underline}\n::\n\n".format(**params)) doc.write(Archiver.helptext[topic]) else: params = {"command": command, "underline": '-' * len('borg ' + command)} doc.write(".. _borg_{command}:\n\n".format(**params)) doc.write("borg {command}\n{underline}\n::\n\n".format(**params)) epilog = parser.epilog parser.epilog = None doc.write(re.sub("^", " ", parser.format_help(), flags=re.M)) doc.write("\nDescription\n~~~~~~~~~~~\n") doc.write(epilog)
def copy_to_dist(self): print("Copy files of "+self.build_script+" to",self.build_path+self.build_dist) with open(self.build_path+self.build_script) as script: line_count = 0 in_lib = False for line in script: print(line_count,line) line_count += 1 if(("sys.path.append('"+self.build_script_lib+"')") in line): print("##### Dentro de lib") in_lib = True elif("#end "+self.build_script_lib+" imports" in line): print("##### Fuera de lib ") in_lib = False break elif(in_lib): if(line.startswith("from")): print("class lib") lineSplit = line.split(" ") src = self.build_path+self.build_script_lib+"\\"+lineSplit[1]+".py" dest = self.build_path+self.build_dist+"\\"+lineSplit[1]+".py" dest_path = self.build_path+self.build_dist print(src,"to",dest) if(os.path.exists(dest_path)): copyfile(src,dest) else: print("Making dest: ", dest_path) os.mkdir(dest_path) copyfile(src,dest) elif(line.startswith("import")): print("is ordinary lib") src = self.build_path + self.build_script dest = self.build_path + self.build_dist + "\\"+ self.build_script copyfile(src,dest)
def newprovision(names, session, smbconf, provdir, logger): """Create a new provision. This provision will be the reference for knowing what has changed in the since the latest upgrade in the current provision :param names: List of provision parameters :param creds: Credentials for the authentification :param session: Session object :param smbconf: Path to the smb.conf file :param provdir: Directory where the provision will be stored :param logger: A Logger """ if os.path.isdir(provdir): shutil.rmtree(provdir) os.mkdir(provdir) logger.info("Provision stored in %s", provdir) return provision(logger, session, smbconf=smbconf, targetdir=provdir, samdb_fill=FILL_FULL, realm=names.realm, domain=names.domain, domainguid=names.domainguid, domainsid=names.domainsid, ntdsguid=names.ntdsguid, policyguid=names.policyid, policyguid_dc=names.policyid_dc, hostname=names.netbiosname.lower(), hostip=None, hostip6=None, invocationid=names.invocation, adminpass=names.adminpass, krbtgtpass=None, machinepass=None, dnspass=None, root=None, nobody=None, users=None, serverrole="domain controller", backend_type=None, ldapadminpass=None, ol_mmr_urls=None, slapd_path=None, dom_for_fun_level=names.domainlevel, dns_backend=names.dns_backend, useeadb=True, use_ntvfs=True)
def testMercurialWithNewShare(self): shareBase = os.path.join(self.tmpdir, 'share') sharerepo = os.path.join(shareBase, self.repodir.lstrip("/")) os.mkdir(shareBase) mercurial(self.repodir, self.wc, shareBase=shareBase) self.assertEquals(getRevisions(self.repodir), getRevisions(self.wc)) self.assertEquals(getRevisions(self.repodir), getRevisions(sharerepo))
def testSavedModel(self): """Test PLS model saving Test to assure that a saved pls model gives the same predictions as before saving.""" # Create a pls model pls = AZorngPLS.PLSLearner(self.train_data) # Calculate classification accuracy Acc = evalUtilities.getClassificationAccuracy(self.test_data, pls) # Save the model scratchdir = os.path.join(AZOC.SCRATCHDIR, "scratchdir"+str(time.time())) os.mkdir(scratchdir) modelPath = os.path.join(scratchdir,"PLSModel") pls.write(modelPath) # Read in the model plsM = AZorngPLS.PLSread(modelPath) # Calculate classification accuracy savedAcc = evalUtilities.getClassificationAccuracy(self.test_data, plsM) # Test that the accuracy of the two classifiers is the exact same self.assertEqual(Acc, savedAcc) # Remove the scratch directory os.system("/bin/rm -rf "+scratchdir)
def __init__(self): self.slider_dic = {"slider": ""} self.settings_dic = {"label1": "", "label2": "", "label3": "", "label4": "", "label5": "", "label6": "", "label7": "", "label8": "", "is_autorun": "", "iconpacks": "", "switchtree": "", "dialogName": "", "column0": "", "column1": "", "tabspos": ""} self.icon_dic = {"about_comments": "", "about_site_label": "", "about_author": "", "restoreItem": "", "mixerItem": "", "muteItem": "", "icon_tooltip_mute": "", "icon_tooltip": ""} self.CP = ConfigParser() self.loader = loadResFile() curr_locale = locale.getlocale()[0][0:2] self.localepath = self.loader.get("pyalsavolume", "lang/"+"%s.lng"%curr_locale) if not os.path.exists(self.localepath): if os.path.exists(self.loader.get("pyalsavolume", "lang/en.lng")): self.localepath = "/usr/share/pyalsavolume/lang/en.lng" else: os.sys.stderr.write("Path %s not exists" %self.localepath) homepath = os.environ["HOME"] + "/.local/share/pyalsavolume" if not os.path.exists(homepath): os.mkdir(homepath, 0o775) langpath = os.environ["HOME"] + "/.local/share/pyalsavolume/lang" if not os.path.exists(langpath): os.mkdir(langpath, 0o775) if self.localepath: self.CP.read(self.localepath) else: self.setDefault() self.getLocale()
def test_upload_cleanup(self): dpath = mkdtemp() _write = self.client.write def write(hdfs_path, *args, **kwargs): if 'bar' in hdfs_path: raise RuntimeError() return _write(hdfs_path, *args, **kwargs) try: self.client.write = write npath = osp.join(dpath, 'hi') os.mkdir(npath) with open(osp.join(npath, 'foo'), 'w') as writer: writer.write('hello!') os.mkdir(osp.join(npath, 'bar')) with open(osp.join(npath, 'bar', 'baz'), 'w') as writer: writer.write('world!') try: self.client.upload('foo', dpath) except RuntimeError: ok_(not self._exists('foo')) else: ok_(False) # This shouldn't happen. finally: rmtree(dpath) self.client.write = _write
def test_upload_with_progress(self): def callback(path, nbytes, history=defaultdict(list)): history[path].append(nbytes) return history dpath = mkdtemp() try: path1 = osp.join(dpath, 'foo') with open(path1, 'w') as writer: writer.write('hello!') os.mkdir(osp.join(dpath, 'bar')) path2 = osp.join(dpath, 'bar', 'baz') with open(path2, 'w') as writer: writer.write('the world!') self.client.upload( 'up', dpath, chunk_size=4, n_threads=1, # Callback isn't thread-safe. progress=callback ) eq_(self._read('up/foo'), b'hello!') eq_(self._read('up/bar/baz'), b'the world!') eq_( callback('', 0), {path1: [4, 6, -1], path2: [4, 8, 10, -1], '': [0]} ) finally: rmtree(dpath)
def copy_wpt_tree(tree, dest): """Copy the working copy of a Tree to a destination directory. :param tree: The Tree to copy. :param dest: The destination directory""" if os.path.exists(dest): assert os.path.isdir(dest) shutil.rmtree(dest) os.mkdir(dest) for tree_path in tree.paths(): source_path = os.path.join(tree.root, tree_path) dest_path = os.path.join(dest, tree_path) dest_dir = os.path.split(dest_path)[0] if not os.path.isdir(source_path): if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.copy2(source_path, dest_path) for source, destination in [("testharness_runner.html", ""), ("testharnessreport.js", "resources/")]: source_path = os.path.join(here, os.pardir, source) dest_path = os.path.join(dest, destination, os.path.split(source)[1]) shutil.copy2(source_path, dest_path) add_license(dest)
def test_init_manifest_packageid(self): comm.setUp() os.chdir(comm.XwalkPath) comm.clear("org.xwalk.test") os.mkdir("org.xwalk.test") cmd = ( comm.HOST_PREFIX + comm.PackTools + "crosswalk-app manifest " + comm.XwalkPath + "org.xwalk.test --platforms=android --package-id=org.xwalk.test" ) os.system(cmd) with open(comm.ConstPath + "/../tools/org.xwalk.test/manifest.json") as json_file: data = json.load(json_file) updatecmd = ( comm.HOST_PREFIX + comm.PackTools + "crosswalk-app manifest " + comm.XwalkPath + "org.xwalk.test --platforms=android --package-id=org.test.foo" ) os.system(updatecmd) with open(comm.ConstPath + "/../tools/org.xwalk.test/manifest.json") as json_file_update: updatedata = json.load(json_file_update) comm.clear("org.xwalk.test") self.assertEquals(data["xwalk_package_id"].strip(os.linesep), "org.xwalk.test") self.assertEquals(updatedata["xwalk_package_id"].strip(os.linesep), "org.test.foo")
def zip_path(src, dst, volume_size, exe7z, isIgnoreCache=False): ''' zip a specify directory into several volumes, if the output directory already exist then the zip process will be skipped #>>> zip_volumes('f:/build', 'f:/7zip', exe7z='D:/Project/PersontalTools/fast-sync-by-ftp/sync-service/bin/7z.exe') #'f:/7zip' #>>> os.path.isfile('f:/7zip/archives.zip.001') #True ''' if os.path.isdir(dst): if isIgnoreCache: shutil.rmtree(dst) else: return os.mkdir(dst) archive_path = os.path.join(dst, file_pattern) cmd = '"{exe7z}" a {output} {source} -v{volume_size} '.format(exe7z=exe7z, output=archive_path, source=src, volume_size=volume_size) logging.info('Execute zip command: %s' % cmd) p = subprocess.Popen([exe7z, 'a', archive_path, src, '-v%s' % volume_size], stdout=subprocess.PIPE, stderr=subprocess.PIPE) logging.info(p.communicate())
def testConfigurationFileOperations(self): """ Does the config file exist, and can we create it? """ self.removeConfDir(self.confDir_) os.mkdir(self.c2_.getConfigurationDirectory()) self.assert_(self.c2_.configurationFileExists() == False) self.c2_.createSampleConfigurationFile(self.c2_.getConfigurationFileFullPath()) self.assert_(self.c2_.configurationFileExists() == True) os.remove(self.c2_.getConfigurationFileFullPath()) self.removeConfDir(self.c2_.getConfigurationDirectory()) # have it make its own config file. c3 = Configuration(self.confDir_, True) self.assert_(self.c2_.configurationFileExists() == True) # Now find out what happens when this is true, but the file already exists. c4 = Configuration(self.confDir_, True) # And when it's false c5 = Configuration(self.confDir_, False) os.remove(self.c2_.getConfigurationFileFullPath()) self.removeConfDir(self.c2_.getConfigurationDirectory())
def UnzipFilenameToDir(filename, directory): """Unzip |filename| to |directory|.""" cwd = os.getcwd() if not os.path.isabs(filename): filename = os.path.join(cwd, filename) zf = zipfile.ZipFile(filename) # Make base. if not os.path.isdir(directory): os.mkdir(directory) os.chdir(directory) # Extract files. for info in zf.infolist(): name = info.filename if name.endswith('/'): # dir if not os.path.isdir(name): os.makedirs(name) else: # file directory = os.path.dirname(name) if not os.path.isdir(directory): os.makedirs(directory) out = open(name, 'wb') out.write(zf.read(name)) out.close() # Set permissions. Permission info in external_attr is shifted 16 bits. os.chmod(name, info.external_attr >> 16L) os.chdir(cwd)
def make_nodes(self): self.nodes = [] for i in range(self.numnodes): nodedir = os.path.join(self.testdir, "node%d" % i) os.mkdir(nodedir) f = open(os.path.join(nodedir, "tahoe.cfg"), "w") f.write("[client]\n" "introducer.furl = %s\n" "shares.happy = 1\n" "[storage]\n" % (self.introducer_furl,)) # the only tests for which we want the internal nodes to actually # retain shares are the ones where somebody's going to download # them. if self.mode in ("download", "download-GET", "download-GET-slow"): # retain shares pass else: # for these tests, we tell the storage servers to pretend to # accept shares, but really just throw them out, since we're # only testing upload and not download. f.write("debug_discard = true\n") if self.mode in ("receive",): # for this mode, the client-under-test gets all the shares, # so our internal nodes can refuse requests f.write("readonly = true\n") f.close() c = self.add_service(client.Client(basedir=nodedir)) self.nodes.append(c)
def test_download_file_to_existing_folder(self): self.client.write('dl', 'hello') with temppath() as tpath: os.mkdir(tpath) self.client.download('dl', tpath) with open(osp.join(tpath, 'dl')) as reader: eq_(reader.read(), 'hello')
def createProject(i): # Rome lat = 41.893 + i*0.01 lng = 12.483 + i*0.02 # astronomical years (0 = 1BC) firstYear = -1500 + random.randint(0, 3000) lastYear = firstYear + 100 + random.randint(0, 50) projectName = "timetest" + str(i) projectFolderName = projectName # project folder pPath = os.path.join(outputDirName, projectFolderName) os.mkdir( pPath, 0755 ) # metadata file metadataFile = open (os.path.join(pPath,"metadata.xml"), 'a') metadataFile.write(constructMetadata()) metadataFile.close() # data folder dPath = os.path.join(pPath, "data") os.mkdir( dPath, 0755 ) # tridas file tridasFile = open (os.path.join(dPath,"tridas.xml"), 'a') tridasFile.write(constructTridas(projectName, lat, lng, firstYear, lastYear)) tridasFile.close() # associated and values when needed, but not yet! print "Created project in folder: " + projectFolderName # # create the zip file zipFilename = os.path.join(outputDirName, projectName+".zip") make_zipfile(zipFilename, pPath)
def test_download_file_to_existing_folder_with_matching_file(self): self.client.write('dl', 'hello') with temppath() as tpath: os.mkdir(tpath) with open(osp.join(tpath, 'dl'), 'w') as writer: writer.write('hey') self.client.download('dl', tpath)
def testMetaDataHandleForSavingModel(self): """Test the handling of SaveModel for Data with Meta Atributes """ #Test the save of a model created from a train data with meta attributes self.assert_(len(self.WMetaTest.domain.getmetas())>=1,"The dataset WMetaTest should have Meta Attributes") plsM = AZorngPLS.PLSLearner(self.WMetaTest) AccNoMetaBefore = evalUtilities.getClassificationAccuracy(self.NoMetaTrain,plsM) AccWMetaBefore = evalUtilities.getClassificationAccuracy(self.WMetaTest,plsM) # Save the model scratchdir = os.path.join(AZOC.SCRATCHDIR, "scratchdir"+str(time.time())) os.mkdir(scratchdir) modelPath = os.path.join(scratchdir,"PLSModel") plsM.write(modelPath) # Read in the model plsR = AZorngPLS.PLSread(modelPath) self.assert_(len(plsR.imputer.defaults.domain.getmetas())==0,"There shouldn't be any Meta data now!") # Calculate classification accuracy AccNoMetaAfter = evalUtilities.getClassificationAccuracy(self.NoMetaTrain, plsR) AccWMetaAfter = evalUtilities.getClassificationAccuracy(self.WMetaTest, plsR) # Test that the accuracy of the model before and after saved self.assertEqual(AccNoMetaBefore, AccNoMetaAfter,"NoMeta: Predictions after loading saved model were different") self.assertEqual(AccWMetaBefore, AccWMetaAfter, "WMeta: Predictions after loading saved model were different") self.assertEqual(round(AccWMetaAfter,9), round(0.888888888889,9),"Accuracy was not the expected value!") self.assertEqual(round(AccNoMetaAfter,9), round(0.605769230769,9),"Accuracy was not the expected value!") # Remove the scratch directory os.system("/bin/rm -rf "+scratchdir)
def download_artifact(url, dest, uid, token): print('url is %s dest is %s uid is %s token is %s' % (url,dest,uid,token)) # create dest if does not exist if dest: if os.path.exists(dest): print('dest exists: ', dest) else: print('dest does not exist, creating now : ', dest) os.mkdir(dest) else: dest = str(Path.home()) splittedurl = url.rsplit('/', 1).pop() dest = dest + '/' + splittedurl # https security handler if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)): ssl._create_default_https_context = ssl._create_unverified_context request = urllib.request.Request(url) if uid and token: credentials = ('%s:%s' % (uid, token)) encoded_credentials = base64.b64encode(credentials.encode('ascii')) request.add_header('Authorization', 'Basic %s' % encoded_credentials.decode("ascii")) try: with urllib.request.urlopen(request, timeout=2) as response, open(dest, 'wb') as out_file: data = response.read() out_file.write(data) print("Success!") except urllib.error.URLError: print("Artifactory connection timed out, please check URL, UID and Token.")
def setUp(self): # Create a temporary directory. f = tempfile.mktemp() system_tmp_dir = os.path.dirname(f) my_dir = 'testpath_tempdir_' + str(random.random())[2:] self.tempdir = os.path.join(system_tmp_dir, my_dir) os.mkdir(self.tempdir)
def monthly_card_import(db): data = request.files.data error = '' all_sqls = IMPORT_SQLS if data and data.file: tmp_root = './tmp/' if not isdir(tmp_root): # 若目录tmp_root不存在,则创建 os.mkdir(tmp_root) tmp_filename = os.path.join(tmp_root, current_time('tmp_monthly_card%Y%m%d%H%M%S.xls')) tmp_file = open(tmp_filename, 'w') # 新建一个xls后缀的文件,然后将读取的excel文件的内容写入该文件中 rows = data.file.readlines() if not rows: # 文件空 error = '数据格式错误[2]' return template('error', error=error) for row in rows: tmp_file.write(row) tmp_file.close() # 在导入新的数据前,先将数据库原有数据导出到tmp目录,作为备份,数据导入失败时可以恢复数据 export_sqls = EXPORT_SQLS try: # 若备份文件已存在,则删除重新写入 if os.path.exists(os.path.join(tmp_root, BACK_FILE)): os.remove(os.path.join(tmp_root, BACK_FILE)) excel_export(export_sqls, tmp_root, BACK_FILE, db) except Exception, e: print '数据备份错误: %s' %e error = excel_import(all_sqls, tmp_filename, db) os.remove(tmp_filename) # 删除上传的临时文件
def mkdir( folder ): """create a folder, ignore if it exists""" try: folder = os.path.join(os.getcwd(),folder) os.mkdir( folder ) except OSError as e: print "MakeDoc: Exception %s when creating folder" %repr(e), folder
def extract(self, file, dir): if not dir.endswith(':') and not os.path.exists(dir): os.mkdir(dir) zf = zipfile.ZipFile(file) # create directory structure to house files self._createstructure(file, dir) num_files = len(zf.namelist()) percent = self.percent divisions = 100 / percent perc = int(num_files / divisions) # extract files to directory structure for i, name in enumerate(zf.namelist()): if self.verbose == True: print "Extracting %s" % name elif perc > 0 and (i % perc) == 0 and i > 0: complete = int (i / perc) * percent if not name.endswith('/'): try: (path,namet) = os.path.split(os.path.join(dir, name)) os.makedirs( path) except: pass outfile = open(os.path.join(path, namet), 'wb') outfile.write(zf.read(name)) outfile.flush() outfile.close()
def testMercurialWithShareAndBundle(self): # First create the bundle bundle = os.path.join(self.tmpdir, 'bundle') run_cmd(['hg', 'bundle', '-a', bundle], cwd=self.repodir) # Create a commit open(os.path.join(self.repodir, 'test.txt'), 'w').write('hello!') run_cmd(['hg', 'add', 'test.txt'], cwd=self.repodir) run_cmd(['hg', 'commit', '-m', 'adding changeset'], cwd=self.repodir) # Wrap unbundle so we can tell if it got called orig_unbundle = unbundle try: called = [] def new_unbundle(*args, **kwargs): called.append(True) return orig_unbundle(*args, **kwargs) hg.unbundle = new_unbundle shareBase = os.path.join(self.tmpdir, 'share') sharerepo = os.path.join(shareBase, self.repodir.lstrip("/")) os.mkdir(shareBase) mercurial( self.repodir, self.wc, shareBase=shareBase, bundles=[bundle]) self.assertEquals(called, [True]) self.assertEquals( getRevisions(self.repodir), getRevisions(self.wc)) self.assertEquals( getRevisions(self.repodir), getRevisions(sharerepo)) finally: hg.unbundle = orig_unbundle
def animate(self, event): import os if os.path.exists("angle_rot_anim"): os.system("rm -rf angle_rot_anim") os.mkdir("angle_rot_anim") frame = 0 fname = "angle_rot_anim/frame%05i.png" % frame self.start(None) self.g.screenShot(fname=fname) roll = float(self.roll_angle.GetValue()) roll = -1*roll*pi/180 for r in linspace(0, roll, 50): self.show_transform([r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname) for r in linspace(0, self.pitch_angle, 20): self.show_transform([roll, r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname) for r in linspace(0, self.yaw_angle, 50): self.show_transform([roll, self.pitch_angle, r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname)
def buildComponent(component, componentDir=None): """ Build the component. Return a pair of paths (pathToBinary, pathToXPTFile)""" logger = build_util.getLogger('build_components') # Save current working directory to set it back later prevDir = os.getcwd() # Component Directory if componentDir is None: componentDir = os.path.join(Settings.prefs.src_dir, "components", component) os.chdir(componentDir) logger.info("Making build and bin dirs for component %s"%component) buildDir = os.path.join(componentDir, "build") binDir = os.path.join(componentDir, "bin") for dir in [buildDir, binDir]: try: os.mkdir(dir) except OSError, err: if err.errno == errno.EEXIST: logger.warning("Couldn't make %s because it exists."%dir) logger.warning("Deleting %s"%dir) shutil.rmtree(dir) logger.warning("Trying to make %s again"%dir) os.mkdir(dir) else: raise
def build_dist(self): for sdir in self.staging_dirs: if os.path.exists(sdir): shutil.rmtree(sdir) main_stage, ninja_stage = self.staging_dirs modules = [os.path.splitext(os.path.split(x)[1])[0] for x in glob(os.path.join('mesonbuild/modules/*'))] modules = ['mesonbuild.modules.' + x for x in modules if not x.startswith('_')] modules += ['distutils.version'] modulestr = ','.join(modules) python = shutil.which('python') cxfreeze = os.path.join(os.path.dirname(python), "Scripts", "cxfreeze") if not os.path.isfile(cxfreeze): print("ERROR: This script requires cx_freeze module") sys.exit(1) subprocess.check_call([python, cxfreeze, '--target-dir', main_stage, '--include-modules', modulestr, 'meson.py']) if not os.path.exists(os.path.join(main_stage, 'meson.exe')): sys.exit('Meson exe missing from staging dir.') os.mkdir(ninja_stage) shutil.copy(shutil.which('ninja'), ninja_stage) if not os.path.exists(os.path.join(ninja_stage, 'ninja.exe')): sys.exit('Ninja exe missing from staging dir.')
def GetUserCfgDir(self): """ Creates (if required) and returns a filesystem directory for storing user config files. """ cfgDir = '.idlerc' userDir = os.path.expanduser('~') if userDir != '~': # expanduser() found user home dir if not os.path.exists(userDir): warn = ('\n Warning: os.path.expanduser("~") points to\n '+ userDir+',\n but the path does not exist.\n') sys.stderr.write(warn) userDir = '~' if userDir == "~": # still no path to home! # traditionally IDLE has defaulted to os.getcwd(), is this adequate? userDir = os.getcwd() userDir = os.path.join(userDir, cfgDir) if not os.path.exists(userDir): try: os.mkdir(userDir) except (OSError, IOError): warn = ('\n Warning: unable to create user config directory\n'+ userDir+'\n Check path and permissions.\n Exiting!\n\n') sys.stderr.write(warn) raise SystemExit return userDir
def _download_reference_files(conn): print('Downloading reference files') if not exists(reference_base_dir): mkdir(reference_base_dir) files = {'tree': (get_reference_fp('gg_13_8-97_otus.tree'), 'ftp://ftp.microbio.me/greengenes_release/' 'gg_13_8_otus/trees/97_otus.tree'), 'taxonomy': (get_reference_fp('gg_13_8-97_otu_taxonomy.txt'), 'ftp://ftp.microbio.me/greengenes_release/' 'gg_13_8_otus/taxonomy/97_otu_taxonomy.txt'), 'sequence': (get_reference_fp('gg_13_8-97_otus.fasta'), 'ftp://ftp.microbio.me/greengenes_release/' 'gg_13_8_otus/rep_set/97_otus.fasta')} for file_type, (local_fp, url) in viewitems(files): # Do not download the file if it exists already if exists(local_fp): print("SKIPPING %s: file already exists at %s. To " "download the file again, erase the existing file first" % (file_type, local_fp)) else: try: urlretrieve(url, local_fp) except: raise IOError("Error: Could not fetch %s file from %s" % (file_type, url)) ref = Reference.create('Greengenes', '13_8', files['sequence'][0], files['taxonomy'][0], files['tree'][0]) _insert_processed_params(conn, ref)