def opm_uninstall(args): """Removes an install package""" print ("Uninstalling... ") if len(args) != 1: raise EnvironmentError("Invalid arguments") if not contxt.is_installed_package_name(args[0]): raise EnvironmentError("Package name missing or not found ") # Remove from installed list package_to_uninstall = contxt.installed_packages.pop(args[0]) try: # Remove files for change in [c for c in package_to_uninstall.changes if os.path.isfile(c)]: print ("Deleting " + change) os.remove(change) package_to_uninstall.changes.remove(change) # Remove folders if empty for change in [c for c in package_to_uninstall.changes if os.path.isdir(c)]: print ("Deleting " + change) try: os.removedirs(change) package_to_uninstall.changes.remove(change) except OSError as ex: if ex.errno != errno.ENOTEMPTY: raise ex except: # Add back but less any successful changes contxt.installed_packages.append(package_to_uninstall) finally: #Save contxt.commit_installed_packages() print ("Done")
def tearDown(self): ''' Remove temporary files and directories ''' for fileName in [self.testOut, self.testOutGZ, self.testIn, self.testInGZ]: if os.path.isfile(fileName): os.remove(fileName) os.removedirs(self.dirName)
def removeTree(self, path): try: os.removedirs(path) return True except OSError as e: qDebug("FileSystem.removeTree - %s: '%s'" % (e, path)) return False
def blastpSp(sp, db, evalue=0.0001): """ directory = tempfile.mkdtemp() fasta = fetchFasta(spAcc) fastaFile = '%s/seq.fasta' % directory wf = open(fastaFile, 'w') print(fasta, file=wf) wf.close() """ directory = tempfile.mkdtemp() fastaFile = '%s/seq.fasta' % directory fasta = '>query\n%s' % seq(sp) wf = open(fastaFile, 'w') print(fasta, file=wf, sep='', end='') wf.close() blastp = NcbiblastpCommandline(query=fastaFile, db=db, evalue=evalue, outfmt=5, out='%s/result.xml' % directory) stdout, stderr = blastp() print(stdout, end='', sep='') print(stderr, end='', sep='') result_handle = open('%s/result.xml' % directory) blast_record = NCBIXML.read(result_handle) result_handle.close() os.remove(fastaFile) os.remove('%s/result.xml' % directory) os.removedirs(directory) hits = [align.title for align in blast_record.alignments] hits = [i.split('|')[1] for i in hits] return hits
def remove_empty_folders(self, folder): """ Recursively removes folders but only if they are empty. Cleans up after libtorrent folder renames. """ info = self.get_status(["save_path"]) # Regex removes leading slashes that causes join function to ignore save_path folder_full_path = os.path.join(info["save_path"], re.sub("^/*", "", folder)) folder_full_path = os.path.normpath(folder_full_path) try: if not os.listdir(folder_full_path): os.removedirs(folder_full_path) log.debug("Removed Empty Folder %s", folder_full_path) else: for root, dirs, files in os.walk(folder_full_path, topdown=False): for name in dirs: try: os.removedirs(os.path.join(root, name)) log.debug("Removed Empty Folder %s", os.path.join(root, name)) except OSError as (errno, strerror): from errno import ENOTEMPTY if errno == ENOTEMPTY: # Error raised if folder is not empty log.debug("%s", strerror) except OSError as (errno, strerror): log.debug("Cannot Remove Folder: %s (ErrNo %s)", strerror, errno)
def parse_log(results_filename): """ Parse the log file from a results packet. ARGUMENTS results_filename (string) - name of compressed results file to test RETURNS logtext - text of log file logdata - dict of important log contents """ # Create temporary directory. import os, os.path, tempfile, shutil cwd = os.getcwd() tmpdir = tempfile.mkdtemp() # Extract source directory. [directory, filename] = os.path.split(results_filename) # Copy results to temporary directory. shutil.copyfile(results_filename, os.path.join(tmpdir, "results.tar.bz2")) # Change to temporary directory. os.chdir(tmpdir) # Extract payload and results. import commands command = "bzcat results.tar.bz2 | tar x" commands.getoutput(command) # Read log file. log_filename = "log.txt" logtext = read_file(log_filename) # Extract useful info from log file. logdata = dict() import re for line in logtext.split("\n"): m = re.match("^(.+?):(.+)", line) if m: groups = m.groups() key = groups[0].strip() value = groups[1].strip() logdata[key] = value # TODO: Add support for values that can span multiple lines, like Options and Args. # Clean up temporary directory. os.chdir(cwd) for filename in os.listdir(tmpdir): os.unlink(os.path.join(tmpdir, filename)) os.removedirs(tmpdir) return (logtext, logdata)
def test_write_new_file_handle(self): """Write out T1250 file to new file handle. """ dir = tempfile.mkdtemp() dry = False data = ('TOLP', 'data string') fhs = {} received = self._md.write(data, fhs, dir=dir, dry=dry) msg = 'T1250 write-out to new file handle should return True' self.assertTrue(received, msg) # Close file handles. files = [] for fh in fhs.values(): files.append(fh.name) fh.close() received = len(files) expected = 1 msg = 'T1250 write-out should only produce 1 file' self.assertEqual(received, expected, msg) fh = open(files[0]) received = fh.read().rstrip() expected = 'data string' msg = 'T1250 file content incorrect' self.assertEqual(received, expected, msg) # Clean up. remove_files([fhs[x].name for x in fhs.keys()]) os.removedirs(dir)
def destroy_temp_dir(dir_path,): try: os.removedirs(dir_path) return True except OSError, e: click.echo('ERROR on creating temporary dir: {}'.format(e)) return False
def _copy_static(self): static_dir = os.path.join(self._root, "static") for root, dirs, files in os.walk(static_dir): dst = self._dst + root.replace(static_dir, "") for name in dirs: dst_path = os.path.join(dst, name) if not os.path.exists(dst_path): os.mkdir(dst_path) for name in files: src_path = os.path.join(root, name) dst_path = os.path.join(dst, name) if not os.path.exists(dst_path) or os.path.getmtime(dst_path) < os.path.getmtime(src_path): shutil.copy2(src_path, dst_path) dst_files = os.listdir(dst) dst_dirs = [f for f in dst_files if os.path.isdir(os.path.join(dst, f))] dst_files = [f for f in dst_files if os.path.isfile(os.path.join(dst, f))] # removed dirs for name in set(dst_dirs) - set(dirs): os.removedirs(os.path.join(root, name)) # removed files for name in set(dst_files) - set(files): if not name.endswith(".html"): os.remove(os.path.join(root, name))
def test4(self): path = tempfile.mkdtemp() filename = os.path.join(path, "test4data.wav") data = np.zeros(32, dtype=np.int16) data[1::4] = 10000 data[3::4] = -10000 wavio.write(filename, data, 44100, sampwidth=1) try: f = wave.open(filename, 'r') self.assertEqual(f.getnchannels(), 1) self.assertEqual(f.getsampwidth(), 1) self.assertEqual(f.getframerate(), 44100) f.close() w = wavio.read(filename) self.assertEqual(w.rate, 44100) self.assertEqual(w.sampwidth, 1) self.assertEqual(w.data.dtype, np.uint8) self.assertEqual(w.data.shape, (32, 1)) expected = 128*np.ones_like(data, dtype=np.uint8).reshape(-1, 1) expected[1::4, 0] = 255 expected[3::4, 0] = 0 np.testing.assert_equal(w.data, expected) finally: os.remove(filename) os.removedirs(path)
def remove_flux(self, flux_id): with sqlite3.connect(self.db_filename) as conn: cursor=conn.cursor() #Suprimer les items cursor.execute(""" SELECT flux.titre, item.nom FROM flux, item WHERE item.id = :flux_id """, {'flux_id':flux_id}) titre_flux=None for row in cursor.fetchall(): titre_flux, nom, status = row if status==0: path=os.path.join(self.podcast_path, titre_flux, nom) assert os.path.exists(path) #TODO Add here a confirmation os.remove(path) if titre_flux: os.removedirs(os.path.join(self.podcast_path, titre_flux)) cursor.execute(""" DELETE FROM item where flux= :flux_id """, {'flux_id':flux_id}) cursor.execute(""" DELETE FROM flux where id= :flux_id """, {'flux_id':flux_id})
def CheckEmailExist(self,email): exist=False e_mail=email.split('@')[0] e_mail_domain=email.split('@')[1] temp_dir=tempfile.gettempdir() cookie=temp_dir+"/"+RandomString()+'.cookie' h = httplib2.Http(cookie) resp, content = h.request("http://e.mail.ru/cgi-bin/signup?from=main", "GET") try: os.removedirs(cookie) except: tmp_error='' content=content.decode('utf-8','ignore') try: login_xid=content.split('<input autocomplete="off"')[1].split('value=""')[0].split('"')[1].split('"')[0] x_reg_id=content.split("'x_reg_id': '")[1].split("'")[0] except: login_xid='' x_reg_id='' self.SetPostRequest("RegistrationDomain="+e_mail_domain+"&Signup_utf8=1&"+login_xid+"="+e_mail+"&x_reg_id="+x_reg_id) result=str(self.HtmlContent('http://e.mail.ru/cgi-bin/checklogin')[1]) if result=='EX_USEREXIST': exist=True #UpdateID=ElementsDB() #UpdateID.GetEmailID(email) return exist
def test_pull_dir(self): """Pull a randomly generated directory of files from the device.""" adb = AdbWrapper() temp_files = {} host_dir = None try: # create temporary host directory host_dir = tempfile.mkdtemp() # create temporary dir on device adb.shell_nocheck("rm -r {}".format(AdbFile.DEVICE_TEMP_DIR)) adb.shell("mkdir -p {}".format(AdbFile.DEVICE_TEMP_DIR)) # populate device dir with random files temp_files = make_random_device_files( adb, in_dir=AdbFile.DEVICE_TEMP_DIR, num_files=32) adb.pull(remote=AdbFile.DEVICE_TEMP_DIR, local=host_dir) for device_full_path in temp_files: host_path = os.path.join( host_dir, temp_files[device_full_path].base_name) with open(host_path, "rb") as host_file: host_md5 = compute_md5(host_file.read()) self.assertEqual(host_md5, temp_files[device_full_path].md5) finally: for dev_file in temp_files.values(): host_path = os.path.join(host_dir, dev_file.base_name) os.remove(host_path) adb.shell_nocheck("rm -r {}".format(AdbFile.DEVICE_TEMP_DIR)) if host_dir: os.removedirs(host_dir)
def rmfile(url, localfilename = None): if localfilename: localname = localfilename else: localname = join(dstdir, urlsplit(url).path[1:]) if splitext(localname)[1].lower() in exclude_exts: logger.info("exclude file: %s, skip", localname) return True if os.path.exists(localname): try: os.remove(localname) logger.info("rmfile: %s ok", localname) #rm empty dir, avoid empty 'holes' try: os.removedirs(dirname(localname)) except OSError as ex: if ex.errno == errno.ENOTEMPTY: pass else: logger.error('rm dir failed: %s', dirname(localname)) return 'rm dir failed: %s' % dirname(localname) return 'OK' except OSError as ex: logger.error("rmfile: %s failed: %s", localname, ex) return ex else: logger.info('rmfile: %s file not exists', localname) return '%s file not exists' % (localname)
def execute_delta(self, client, ignore_path = None): delta = client.delta(self.cursor) self.cursor = delta['cursor'] for entry in delta['entries']: path = entry[0][1:] meta = entry[1] # this skips the path if we just uploaded it if path != ignore_path: if meta != None: path = meta['path'][1:] # caps sensitive if meta['is_dir']: print '\n\tMaking Directory:',path self.makedir_local(path) elif path not in self.remote_files: print '\n\tNot in local' self.download(client, path) elif meta['rev'] != self.remote_files[path]['rev']: print '\n\tOutdated revision' self.download(client, path) # remove file or directory else: if os.path.isdir(path): print '\n\tRemoving Directory:', path os.removedirs(path) elif os.path.isfile(path): print '\n\tRemoving File:', path os.remove(path) del self.local_files[path] del self.remote_files[path] else: pass # file already doesn't exist localy
def main(): if not which('entr', throw=False): message = ( '\nentr(1) is used in this app as a cross platform file watcher.' 'You can install it via your package manager on most POSIX ' 'systems. See the site at http://entrproject.org/\n' ) print(message) if not virtualenv_exists: virtualenv_bin = which('virtualenv', throw=False) subprocess.check_call( [virtualenv_bin, env_dir] ) subprocess.check_call( [pip_bin, 'install', '-e', project_dir] ) if not has_module('pytest'): subprocess.check_call( [pip_bin, 'install', '-r', test_requirements_filepath] ) if not os.path.isfile(os.path.join(env_dir, 'bin', 'sphinx-quickstart')): subprocess.check_call( [pip_bin, 'install', '-r', sphinx_requirements_filepath] ) if os.path.exists(os.path.join(env_dir, 'build')): os.removedirs(os.path.join(env_dir, 'build'))
def removeCustomModule(self, moduleName): try: customCfg = self._customModulesFile content = fileUtils.getFileContent(customCfg) lines = content.splitlines() startIdx = -1 cfgUrl = '' for i in range(0, len(lines)): if lines[i].startswith("title=%s" % moduleName): startIdx = i elif startIdx > -1 and lines[i].startswith("url="): tmp = lines[i][4:] cfgUrl = os.path.join(self._customModulesFolder, tmp) break if os.path.isfile(cfgUrl): os.remove(cfgUrl) os.remove(cfgUrl.replace(".cfg", ".module")) # remove all folder that start with cfg name and a dot baseDir = os.path.dirname(cfgUrl) prefix = os.path.basename(cfgUrl).replace(".cfg", ".") dirs = fileUtils.get_immediate_subdirectories(baseDir) for d in dirs: if d.startswith(prefix): fileUtils.clearDirectory(os.path.join(baseDir, d)) os.removedirs(os.path.join(baseDir, d)) return True except: pass return False
def destroyObject(bucket, key): '''destroy's object''' conn = Connection() try: #Validate the bucket _verifyBucket(conn, bucket, True) #Check for object and get information from database query = "SELECT hashfield FROM object WHERE bucket = %s AND object = %s" result = conn.executeStatement(query, (escape_string(str(bucket)), escape_string(str(key)))) if len(result) == 0: raise NotFoundException.NoSuchKeyException(bucket, key) #Delete the object from the database and the filesystem query = "DELETE FROM object_metadata WHERE bucket = %s AND object = %s" conn.executeStatement(query, (escape_string(str(bucket)), escape_string(str(key)))) query = "DELETE FROM object WHERE bucket = %s AND object = %s" conn.executeStatement(query, (escape_string(str(bucket)), escape_string(str(key)))) except: conn.cancelAndClose() raise conn.close() hashString = result[0][0] path = Config.get('common','filesystem_path') path += str(bucket) path += "/"+hashString[0:3]+"/"+hashString[3:6]+"/"+hashString[6:9] os.remove(path+"/"+hashString) try: os.removedirs(path) except OSError, e: if e.errno != errno.ENOTEMPTY: raise
def __del__(self): for filename in self.tmpfiles: if filename and os.path.exists(filename): os.remove(filename) if self.tmpdir is not None and os.path.exists(self.tmpdir): log.debug(u'Removing tmpdir %s', self.tmpdir) os.removedirs(self.tmpdir)
def teardown(self): ''' Clean up all the test files. ''' for filename in os.listdir(self.path): os.remove(os.path.join(self.path, filename)) os.removedirs(self.path)
def testcmakedirs(self): self.m.cmakedirs("test_dir_DELETE/nested/dir") self.assert_(os.path.exists("test_dir_DELETE/nested/dir")) self.assert_(os.path.isdir("test_dir_DELETE")) self.assert_(os.path.isdir("test_dir_DELETE/nested")) self.assert_(os.path.isdir("test_dir_DELETE/nested/dir")) os.removedirs("test_dir_DELETE/nested/dir")
def clean_stale(src_dir, dst_dir, ext): for root, dirs, files in os.walk(dst_dir): for file in sorted(files): dst = os.path.join(root, file) rel = os.path.splitext(os.path.relpath(dst, dst_dir))[0] srcs = [os.path.join(src_dir, rel) + ext for ext in ['.flac', '.mp3', '.ogg']] if dst.endswith('.wip'): os.unlink(dst) if dst.endswith('.mp3') or dst.endswith('.ogg'): unlink = False if any(map(os.path.isfile, srcs)): src = filter(os.path.isfile, srcs)[0] if os.path.getmtime(src) > os.path.getmtime(dst): if src.endswith('.flac') and check_md5(src, dst): print "Retagging %s/%s" % (rel, file) retag(src, dst) else: unlink = True else: unlink = True if unlink: print "Unlinking %s/%s" % (rel, file) os.unlink(dst) files.remove(file) if files == ['.folder.jpg']: os.unlink(os.path.join(root, files[0])) if not files and not dirs: os.removedirs(root)
def _serializeClassifiers(self, topGroup): with Tracer(traceLogger): deleteIfPresent(topGroup, 'Classifiers') self._dirtyFlags[Section.Classifiers] = False if not self.mainOperator.Classifiers.ready(): return classifiers = self.mainOperator.Classifiers topGroup.require_group("Classifiers") for i in range(len(classifiers)): classifier_forests = classifiers[i].value # Classifier can be None if there isn't any training data yet. if classifier_forests is None: return for forest in classifier_forests: if forest is None: return # Due to non-shared hdf5 dlls, vigra can't write directly to our open hdf5 group. # Instead, we'll use vigra to write the classifier to a temporary file. tmpDir = tempfile.mkdtemp() cachePath = os.path.join(tmpDir, 'tmp_classifier_cache.h5').replace('\\', '/') for j, forest in enumerate(classifier_forests): forest.writeHDF5( cachePath, 'ClassifierForests/Forest{:04d}'.format(j) ) # Open the temp file and copy to our project group with h5py.File(cachePath, 'r') as cacheFile: grouppath = "Classifiers/Classifier%d"%i topGroup.copy(cacheFile['ClassifierForests'], grouppath) os.remove(cachePath) os.removedirs(tmpDir)
def delete(self): log = logging.getLogger('flscp') conf = FLSConfig.getInstance() # delete! # 1. remove credentials # 2. remove entry from /etc/postfix/fls/aliases # 3. remove entry from /etc/postfix/fls/mailboxes # 4. remove entry from /etc/postfix/fls/sender-access # 5. remove entry from mail_users # 7. remove complete mails in /var/mail/,... directory # 6. postmap all relevant entries self.updateCredentials() self.updateMailboxes() self.updateAliases() self.updateSenderAccess() if self.exists(): db = MailDatabase.getInstance() cx = db.getCursor() query = ('SELECT mail_id, mail_addr FROM mail_users WHERE mail_id = %s') cx.execute(query, (self.id,)) for (mail_id, mail_addr,) in cx: (mail, domain) = mail_addr.split('@') path = '%s/%s/%s/' % (conf.get('mailserver', 'basemailpath'), domain, mail) if os.path.exists(path): try: os.removedirs(path) except Exception as e: log.warning('Error when removing directory: %s' % (e,)) query = ('DELETE FROM mail_users WHERE mail_id = %s') cx.execute(query, (self.id,)) cx.close()
def rmdir(self, parents=False): if not self.exists(): return if parents: os.removedirs(self) else: os.rmdir(self)
def clear_files(): if os.path.exists(cache_path): os.remove(cache_path) if os.path.exists(os.path.dirname(cache_path)): os.removedirs(os.path.dirname(cache_path)) if os.path.exists(cache_dir): os.removedirs(cache_dir)
def makeApp(self): #make a .love file self.makeLoveFile() #then pack files into a .exe os.system("copy /b love.exe+" + self.export + ".love " + self.export + ".exe") os.mkdir(self.export) shutil.copyfile(self.export + ".exe", self.export + "/" + self.export + ".exe") shutil.copyfile("SDL.dll", self.export + "/SDL.dll") shutil.copyfile("openal32.dll", self.export + "/openal32.dll") shutil.copyfile("DevIL.dll", self.export + "/DevIL.dll") os.remove(self.export + ".love") os.remove(self.export + ".exe") zipbool = self.chkZip.get() if zipbool == 1: z = zipfile.ZipFile(self.export + ".zip", "w", zipfile.ZIP_DEFLATED) rootlen = len(self.export) + 1 for root, dirs, files in os.walk(self.export): for name in files: fn = os.path.join(root, name) z.write(fn, fn[rootlen:]) os.remove(fn) z.close() os.removedirs(self.export) return True
def tearDown(self): """ Called immediately after each test method has been called. """ # Remove the temporary directory. os.removedirs(self.tmpdir) return
def clean_empty_directories(session): """ Removes empty directories from pool directories. """ print "Cleaning out empty directories..." count = 0 cursor = session.execute( "SELECT DISTINCT(path) FROM location WHERE type = :type", {'type': 'pool'}, ) bases = [x[0] for x in cursor.fetchall()] for base in bases: for dirpath, dirnames, filenames in os.walk(base, topdown=False): if not filenames and not dirnames: to_remove = os.path.join(base, dirpath) if not Options["No-Action"]: Logger.log(["removing directory", to_remove]) os.removedirs(to_remove) count += 1 if count: Logger.log(["total removed directories", count])
def move(date_taken, filename): moved = False year = date_taken.strftime("%Y") month = date_taken.strftime("%b").lower() day = date_taken.strftime("%d") + "-" final_location_full = final_location + year + "/" + month final_filename = final_location_full + "/" + day + os.path.basename(filename) try: skip = os.path.isfile(final_filename) if skip: print "Removing duplicate: ", filename os.remove(filename) if not skip: try: os.makedirs(final_location_full) except: pass print "Moving ", filename, " to ", final_filename os.rename(filename, final_filename) moved = True try: os.removedirs(os.path.dirname(filename)) except: pass except OSError: print "[MOVE] Error for: ", filename return moved
os.makedirs('foo/bar/baz') os.chdir('foo?bar/baz') print(os.getcwd()) os.chdir('../../..') print(os.getcwd()) #In [6] os.system('echo foo > foo/foo.txt') os.remove('foo/foo.txt') #In [7] os.rmdir('foo/bar/baz') # foo/barディレクトリにあるbazディレクトを削除 print(os.listdir('foo/bar')) # bazディレクトが削除されたことを確認 os.mkdir('foo/bar/baz') # foo/bar/bazディレクトリを再作成 os.removedirs('foo/bar/baz') # foo. bar. bazの3つのディレクトリを削除 os.chdir('foo') # fooディレクトリがないので例外となる #In [8] os.makedirs('foo/bar/baz') os.system('echo foo > foo/foo.txt') os.system('echo bar > foo/bar/bar.txt') os.system('echo baz > foo/bar/baz/baz.txt') os.rmdir('foo/bar/baz') #In [9] os.removedirs('foo/bar/baz') #In [10] os.remove('foo/bar/baz/baz.txt') os.rmdir('foo/bar/baz')
def __init__(self, interpreter, package, dpath=None, options=None): self.interpreter = interpreter self.impl = interpreter.impl self.package = package if not dpath: self.proot = "debian/%s" % self.package else: dpath = dpath.strip('/') self.proot = join('debian', self.package, dpath) self.dpath = dpath del dpath self.options = options self.result = { 'requires.txt': set(), 'egg-info': set(), 'nsp.txt': set(), 'shebangs': set(), 'public_vers': set(), 'private_dirs': {}, 'compile': False, 'ext_vers': set(), 'ext_no_version': set() } for root, dirs, file_names in os.walk(self.proot): if interpreter.should_ignore(root): del dirs[:] continue self.current_private_dir = self.current_pub_version = None version = interpreter.parse_public_dir(root) if version: self.current_dir_is_public = True if version is True: version = None else: self.current_pub_version = version else: self.current_dir_is_public = False if self.current_dir_is_public: if root.endswith('-packages'): if version is not None: self.result['public_vers'].add(version) for name in ('test', 'tests'): if name in dirs: log.debug( 'removing dist-packages/%s (too common name)', name) rmtree(join(root, name)) dirs.remove(name) else: self.current_private_dir = self.check_private_dir(root) if not self.current_private_dir: # i.e. not a public dir and not a private dir is_bin_dir = self.is_bin_dir(root) if is_bin_dir: self.handle_bin_dir(root, file_names) else: # not a public, private or bin directory # continue with a subdirectory continue for name in dirs: dpath = join(root, name) if self.is_unwanted_dir(dpath): rmtree(dpath) dirs.remove(name) continue if self.is_egg_dir(root): self.handle_egg_dir(root, file_names) continue # check files for fn in sorted(file_names): # sorted() to make sure .so files are handled before .so.foo fpath = join(root, fn) if self.is_unwanted_file(fpath): log.debug('removing unwanted: %s', fpath) os.remove(fpath) continue if self.is_egg_file(fpath): self.handle_egg_file(fpath) continue if not exists(fpath): # possibly removed while handling .so symlinks if islink(fpath) and '.so.' in split(fpath)[-1]: # dangling symlink to (now removed/renamed) .so file # which wasn't removed yet (see test203's quux.so.0) log.info('removing dangling symlink: %s', fpath) os.remove(fpath) continue fext = splitext(fn)[-1][1:] if fext == 'so': if not self.options.no_ext_rename: fpath = self.rename_ext(fpath) ver = self.handle_ext(fpath) ver = ver or version if ver: self.current_result.setdefault('ext_vers', set()).add(ver) else: self.current_result.setdefault('ext_no_version', set()).add(fpath) if self.current_private_dir: if exists(fpath) and fext != 'so': mode = os.stat(fpath)[ST_MODE] if mode & S_IXUSR or mode & S_IXGRP or mode & S_IXOTH: if (options.no_shebang_rewrite or fix_shebang(fpath, self.options.shebang)) and \ not self.options.ignore_shebangs: try: res = Interpreter.from_file(fpath) except Exception as e: log.debug('cannot parse shebang %s: %s', fpath, e) else: self.current_result.setdefault( 'shebangs', set()).add(res) if fext == 'py' and self.handle_public_module( fpath) is not False: self.current_result['compile'] = True if not dirs: try: os.removedirs(root) except OSError: pass log.debug("package %s details = %s", package, self.result)
def tearDownClass(cls): r"""Clean up test directory.""" os.removedirs(cls.test_dir) del cls.test_dir del cls.experiment gc.collect()
spike_mon_2, name='stdp_net') if standalone == 1: device.insert_code('main', 'std::clock_t start = std::clock();') net.run(5 * second, report='text') if standalone == 1: device.insert_code( 'main', ''' std::ofstream myfile ("speed.txt"); if (myfile.is_open()) { double value = (double) (std::clock() - start)/(%d * CLOCKS_PER_SEC); myfile << value << std::endl; myfile.close(); } ''' % (max(1, n_threads))) try: os.removedirs(path) except Exception: pass if standalone == 1: device.build(project_dir=path, compile_project=True, run_project=True, debug=False)
def deldir(pth, force=True): if(os.path.isdir(pth)): if(force): shutil.rmtree(pth) else: os.removedirs(pth)
def remove_cache_dirs(self): try: os.removedirs(self.cache_path()) except: pass
def myrmdir(): dirname = raw_input("ENTER DIRECTORY: ") if os.path.exists(dirname): os.removedirs(dirname) else: print("DIRECTORY " + dirname + " NOT FOUND")
def removedirs(self): os.removedirs(self)
def remove_folder(folder): """ Remove folder with all its files """ for root, dirs, files in os.walk(folder, topdown=False): for name in files: os.remove(os.path.join(root, name)) removedirs(folder)
def removedirs(dir): os.removedirs(dir)
def test_interpret_task_results_without_sorting(self): task = self._get_core_task() subtask_id = "xxyyzz" files_dir = os.path.join(task.tmp_dir, subtask_id) files = self.additional_dir_content([5], sub_dir=files_dir) shutil.move(files[2], files[2] + ".log") files[2] += ".log" shutil.move(files[3], files[3] + "err.log") files[3] += "err.log" files_copy = copy(files) task.interpret_task_results(subtask_id, files, result_types["files"], False) files[0] = outer_dir_path(files[0]) files[1] = outer_dir_path(files[1]) files[4] = outer_dir_path(files[4]) self.assertEqual(task.results[subtask_id], [files[0], files[1], files[4]]) self.assertEqual(task.stderr[subtask_id], files[3]) self.assertEqual(task.stdout[subtask_id], files[2]) for f in files_copy: with open(f, 'w'): pass task.interpret_task_results(subtask_id, files_copy, result_types["files"], False) self.assertEqual(task.results[subtask_id], [files[0], files[1], files[4]]) for f in files_copy: with open(f, 'w'): pass os.remove(files[0]) os.makedirs(files[0]) with self.assertLogs(logger, level="WARNING"): task.interpret_task_results(subtask_id, files_copy, result_types["files"], False) assert task.results[subtask_id] == [files[1], files[4]] os.removedirs(files[0]) for f in files + files_copy: if os.path.isfile(f): os.remove(f) assert not os.path.isfile(f) subtask_id = "aabbcc" files_dir = os.path.join(task.tmp_dir, subtask_id) files = self.additional_dir_content([5], sub_dir=files_dir) shutil.move(files[2], files[2] + ".log") files[2] += ".log" shutil.move(files[3], files[3] + "err.log") files[3] += "err.log" res = [ self.__compress_and_dump_file(files[0], "abc" * 1000), self.__compress_and_dump_file(files[1], "def" * 100), self.__compress_and_dump_file(files[2], "outputlog"), self.__compress_and_dump_file(files[3], "errlog"), self.__compress_and_dump_file(files[4], "ghi") ] task.interpret_task_results(subtask_id, res, result_types["data"], False) files[0] = outer_dir_path(files[0]) files[1] = outer_dir_path(files[1]) files[4] = outer_dir_path(files[4]) self.assertEqual(task.results[subtask_id], [files[0], files[1], files[4]]) self.assertEqual(task.stderr[subtask_id], files[3]) self.assertEqual(task.stdout[subtask_id], files[2]) for f in [files[0], files[1], files[4]]: self.assertTrue( os.path.isfile(os.path.join(task.tmp_dir, os.path.basename(f)))) for f in [files[2], files[3]]: self.assertTrue( os.path.isfile( os.path.join(task.tmp_dir, subtask_id, os.path.basename(f)))) subtask_id = "112233" task.interpret_task_results(subtask_id, res, 58, False) self.assertEqual(task.results[subtask_id], []) self.assertEqual(task.stderr[subtask_id], "[GOLEM] Task result 58 not supported") self.assertEqual(task.stdout[subtask_id], "")
with open(filename.strip(".png").strip(".jpg")+ ".txt", 'rb') as f: s2 = f.read() os.system("del *.txt") f2 = open('../text.txt','ab+') f2.write(s2) f2.close() print("识别成功!") print("文本导出成功!") print() if __name__ == "__main__": outfile = 'text.txt' outdir = 'tmp' if path.exists(outfile): os.remove(outfile) if not path.exists(outdir): os.mkdir(outdir) print("压缩过大的图片...") # 首先对过大的图片进行压缩,以提高识别速度,将压缩的图片保存与临时文件夹中 for picfile in glob.glob("pictures/*"): convertimg(picfile, outdir) print("图片识别...") for picfile in glob.glob("tmp/*"): myOCR(picfile) os.chdir("D:\\python编程\\SWork\\Include") os.remove(picfile) print('图片文本提取结束!文本输出结果位于 %s 文件中。' % outfile) os.removedirs(outdir)
def removedirs(self): """ .. seealso:: :func:`os.removedirs` """ os.removedirs(self) return self
def del_seg_video(): time.sleep(2) for i in all_filename: os.remove(os.path.join(target_path, sel_res.getVideoTitle(), i)) os.removedirs(os.path.join(target_path, sel_res.getVideoTitle()))
assert outcome == Runner.PASS if __name__ == "__main__": random_fuzzer.run(cat) if __name__ == "__main__": random_fuzzer.runs(cat, 10) # ## Lessons Learned if __name__ == "__main__": print('\n## Lessons Learned') if __name__ == "__main__": os.remove(FILE) os.removedirs(tempdir) # ## Next Steps if __name__ == "__main__": print('\n## Next Steps') # ## Background if __name__ == "__main__": print('\n## Background') # ## Exercises if __name__ == "__main__": print('\n## Exercises')
"auv", "/home/uwsim/uwsim_ws/install_isolated/share/RL/launch/basic.launch") launch.start() rospy.loginfo("auv started!") rospy.sleep(10) rank = MPI.COMM_WORLD.Get_rank() sess = U.single_threaded_session() sess.__enter__() task_name = "{}.{}.{}.{}".format(policy_name, args.env_id, args.taskname, args.seed) tensorboard_dir = osp.join(args.log_dir, task_name) if os.path.exists(tensorboard_dir): os.removedirs(tensorboard_dir) ckpt_dir = osp.join(args.checkpoint_dir, task_name) if rank == 0: logger.configure() else: logger.configure(format_strs=[]) workerseed = args.seed + 10000 * MPI.COMM_WORLD.Get_rank( ) if args.seed is not None else None set_global_seeds(workerseed) args.no_cnn = True if args.policy_type == 'dense' else False env = make_env(args.env_id, seed=args.seed,
for sub in submodules: print "Getting submodule %s" % sub.path use_archive = sub.path not in private_repos if use_archive: url = git_url(sub.url) archive_url = "%s/archive/%s.zip" % (url, sub.hexsha) request = urllib2.Request(archive_url) with open(ZIP_PATH, 'wb') as f: f.write(urllib2.urlopen(request).read()) try: with zipfile.ZipFile(ZIP_PATH) as zf: zf.extractall(DL_DIR) except zipfile.BadZipfile: # fall back to standard download use_archive = False with open(ZIP_PATH) as f: print ("Getting archive failed with error %s. Falling back to " "git clone." % f.read()) os.remove(ZIP_PATH) except Exception as e: use_archive = False print ("Getting archive failed with error %s. Falling back to " "git clone." % e.message) else: os.remove(ZIP_PATH) os.removedirs(sub.path) submodule_dir = os.listdir(DL_DIR)[0] shutil.move(os.path.join(DL_DIR, submodule_dir), sub.path) if not use_archive: os.system('git submodule update %s' % sub.path)
def create_custom_qss( name, path, color_background_light, color_background_normal, color_background_dark, color_foreground_light, color_foreground_normal, color_foreground_dark, color_selection_light, color_selection_normal, color_selection_dark, border_radius, ): """ Create a custom palette based on the parameters defined. The `name` must be a valid Python identifier and will be stored as a lowercased folder (even if the identifier had uppercase letters). This fuction returns the custom stylesheet pointing to resources stored at .../path/name/. """ stylesheet = '' # Check if name is valid if is_identifier(name): name = name if name[0].isupper() else name.capitalize() else: raise Exception('The custom palette name must be a valid Python ' 'identifier!') # Copy resources folder rc_loc = os.path.basename(RC_PATH) qss_loc = os.path.basename(QSS_PATH) theme_root_path = os.path.join(path, name.lower()) theme_rc_path = os.path.join(theme_root_path, rc_loc) if os.path.isdir(theme_root_path): shutil.rmtree(theme_root_path) shutil.copytree(RC_PATH, theme_rc_path) # Copy QSS folder and contents theme_qss_path = os.path.join(theme_root_path, qss_loc) if os.path.isdir(theme_qss_path): os.removedirs(theme_qss_path) shutil.copytree(QSS_PATH, theme_qss_path) # Create custom palette custom_palette = type(name, (DarkPalette, ), {}) custom_palette.COLOR_BACKGROUND_LIGHT = color_background_light custom_palette.COLOR_BACKGROUND_NORMAL = color_background_normal custom_palette.COLOR_BACKGROUND_DARK = color_background_dark custom_palette.COLOR_FOREGROUND_LIGHT = color_foreground_light custom_palette.COLOR_FOREGROUND_NORMAL = color_foreground_normal custom_palette.COLOR_FOREGROUND_DARK = color_foreground_dark custom_palette.COLOR_SELECTION_LIGHT = color_selection_light custom_palette.COLOR_SELECTION_NORMAL = color_selection_normal custom_palette.COLOR_SELECTION_DARK = color_selection_dark custom_palette.SIZE_BORDER_RADIUS = border_radius custom_palette.PATH_RESOURCES = "'{}'".format(theme_root_path) # Process images and save them to the custom platte rc folder create_images(rc_path=theme_rc_path, palette=custom_palette) create_palette_image(path=theme_root_path, palette=custom_palette) # Compile SCSS variables_scss_filepath = os.path.join(theme_qss_path, VARIABLES_SCSS_FILE) theme_main_scss_filepath = os.path.join(theme_qss_path, MAIN_SCSS_FILE) theme_qss_filepath = os.path.join(theme_root_path, QSS_FILE) stylesheet = create_qss( qss_filepath=theme_qss_filepath, main_scss_filepath=theme_main_scss_filepath, variables_scss_filepath=variables_scss_filepath, palette=custom_palette, ) # Update colors in text with open(theme_main_scss_filepath, 'r') as fh: data = fh.read() for key, color in DarkPalette.color_palette().items(): custom_color = custom_palette.color_palette()[key].upper() data = data.replace(color, custom_color) stylesheet = stylesheet.replace(color, custom_color) with open(theme_main_scss_filepath, 'w') as fh: fh.write(data) with open(theme_qss_filepath, 'w') as fh: fh.write(stylesheet) return stylesheet
subprocess.Popen(patch_list) new_file_sha1 = sha1OfFile(new_file_path) if patched_file_sha1 != new_file_sha1: #print('sha1 mismatch: ' + new_file_path) print('sha1 mismatch: ' + new_file_path, file=errorlog) print(command, file=performedlog) except FileNotFoundError: print('file not found: ' + original_file) print('file not found: ' + original_file, file=errorlog) elif command.startswith('delete_recursive'): # Must preceed delete or else conditions will be wrong command = command[17:-2] dirs = command.strip().split(',') for adir in dirs: to_delete = adir.strip().strip('"').strip()[1:] try: os.removedirs(os.path.join(args.base, to_delete)) except FileNotFoundError: print('warning - directory may or may not have been deleted: ' + to_delete) print('warning - directory may or may not have been deleted: ' + to_delete, file=errorlog) print(command, file=performedlog) elif command.startswith('delete'): # Most follow delete_recursive or else conditions will wrong command = command[7:-2] files = command.strip().split(',') for afile in files: to_delete = afile.strip().strip('"').strip()[1:] try: os.remove(os.path.join(args.base, to_delete)) except FileNotFoundError: print('warning - file may or may not have been deleted: ' + to_delete) print('warning - file may or may not have been deleted: ' + to_delete, file=errorlog) except IsADirectoryError:
import os os.listdir('.') #列出当前目录的文件 os.makedirs('xx') #在当前目录下创建一个叫xx的目录 os.removedirs('xxxx') #删除xxx目录 os.chdir('') #用于改变当前工作目录到指定的路径。 #可利用poen或者system 来直接执行Linux的命令 #例子 ls os.system('ls') os.popen('ls').read() #区别就是system 执行返回布尔值,poen的返回值可读
def test_create_scenario_02(): config = Config() config['DATA_FOLDER'] = '/tmp/%s' % uuid.uuid4() scenario = Scenario(config=config, parameters={'k': 'v'}) assert os.path.exists(scenario.data_folder) os.removedirs(scenario.data_folder)
def delete(self, sdid: str) -> None: path = self.path_to(sdid) os.remove(path) os.removedirs(path.parent)
def deleteDirectory(name): removedirs(name)
scale='bytes')) if len(del_obj_queue): ri = raw_input("\n Press [enter] to delete segments marked above.") if ri != "": message = "Aborting..." raise Exception, message else: print "\n Nothing to delete. Exiting...\n" sys.exit() for obj in del_obj_queue: print " [31mDeleting:[m %s" % (obj.target) os.remove(obj.target) # CLEAN UP: attempt to remove the project directory # and the year* directory. # * remove the .DS_Store file in the year directory # leftover from mac browsing project_dir = os.path.split(obj.target)[0] year_dir = os.path.split(project_dir)[0] for _dir in [project_dir, year_dir]: ds = "%s/.DS_Store" % _dir try: os.remove(ds) except: pass try: os.removedirs(_dir) except: pass
import os import shutil # a. Create a new directory. newpath = r'/Users/zhipengyan/Desktop/Python' if not os.path.exists(newpath): os.mkdir(newpath) # b. Rename the above directory. os.rename('/Users/zhipengyan/Desktop/Python', '/Users/zhipengyan/Desktop/Pycharm') # c. Delete the above directory. os.removedirs('/Users/zhipengyan/Desktop/Pycharm') # d. Create another directory and create two text files in this directory. os.mkdir('/Users/zhipengyan/Desktop/Python') open('/Users/zhipengyan/Desktop/Python/1.xlsx', 'a').close() open('/Users/zhipengyan/Desktop/Python/2.xlsx', 'a').close() # e. Delete one of the text files from the above directory. try: os.remove('/Users/zhipengyan/Desktop/Python/1.xlsx') except OSError: pass # f. Rename the remaining text file. os.rename('/Users/zhipengyan/Desktop/Python/2.xlsx', '/Users/zhipengyan/Desktop/Python/stock_price.xlsx') # g. Create a subdirectory within the above created directory. os.makedirs('/Users/zhipengyan/Desktop/Python/level1')
def minimize_binary_lp(A, b, c, use_pulp=False): import pulp M = len(b) N = len(c) if use_pulp: # Create problem prob = pulp.LpProblem("Problem", pulp.LpMinimize) # Create variables names = [str(i) for i in range(N)] x = pulp.LpVariable.dicts("x", names, 0, 1, pulp.LpInteger) # Objective function prob += pulp.lpSum([c[i] * x[str(i)] for i in range(N)]), "" # Constraints if isinstance(A, np.ndarray): for i in range(M): prob.constraints["C%d" % i] = \ pulp.LpAffineExpression([ (x[str(j)], A[i][j]) for j in range(N) if A[i][j] != 0]) <= b[i] else: for i in range(M): prob.constraints["C%d" % i] = pulp.LpAffineExpression( [(x[str(j)], A[i][j]) for j in A[i].keys()]) <= b[i] # Solution prob.solve() return np.array([pulp.value(x[names[i]]) for i in range(N)]) else: temp_path = os.path.join(os.getcwd(), "temp", str( int(10 * time.time())) + str(np.random.uniform())) os.makedirs(temp_path) lp_path = os.path.join(temp_path, "input.lp") sol_path = os.path.join(temp_path, "sol.txt") exe_path = pulp.LpSolverDefault.path with open(lp_path, "w") as f: f.write("\\* Problem *\\\n") f.write("Minimize\n") f.write("OBJ: %s\n" % lin_fun(c)) f.write("Subject To\n") for i in range(M): f.write("C%d: %s <= %f\n" % (i, lin_fun(A[i]), b[i])) f.write("Binaries\n") for i in range(N): f.write("x_%d\n" % i) f.write("End\n") os.popen("%s %s solve solu %s" % (exe_path, lp_path, sol_path)).read() ans = np.zeros(N) for line in open(sol_path, "r"): tokens = line.split() for i in range(len(tokens)): if(tokens[i][0] == "x"): value = float(tokens[i + 1]) if (value < -EPS or value > 1 + EPS): return None ans[int(tokens[i][2:])] = value break os.remove(lp_path) os.remove(sol_path) os.removedirs(temp_path) return ans
def rmdir(s): print(s) try: os.removedirs(s) except IOError: print("there was an error navigating to " + s)
pass return 80 def makedir(path, notindexed): os.mkdir(path) def unlinkpath(f, ignoremissing=False): """unlink and remove the directory if it is empty""" try: os.unlink(f) except OSError, e: if not (ignoremissing and e.errno == errno.ENOENT): raise # try removing directories that might now be empty try: os.removedirs(os.path.dirname(f)) except OSError: pass def lookupreg(key, name=None, scope=None): return None def hidewindow(): """Hide current shell window. Used to hide the window opened when starting asynchronous child process under Windows, unneeded on other systems. """ pass class cachestat(object):
def cleanup(dir='test'): for path, dirs, files in os.walk(dir): for fn in files: os.remove(os.path.join(path, fn)) for dir in dirs: os.removedirs(os.path.join(path, dir))
def buildPredixSDKs(config): try: config.current = 'buildPredixSDKs' print("Fast install = " + config.fastinstall) if config.pullsubmodules == 'y': print("CurrentDir " + os.getcwd()) statementStatus = subprocess.call( 'git submodule update --init --remote predix-sdks', shell=True) print("CurrentDir " + os.getcwd()) print("ChangeDir = " + config.predixSDKs) os.chdir(config.predixSDKs) try: updateGitModules(config) checkoutSubmodules() finally: restoreGitModules(config) print("ChangeDir = ..") os.chdir("..") print("Build using maven setting : " + config.mvnsettings + " Maven Repo : " + config.mavenRepo) if config.fastinstall != 'y': print("Compiling code...") if config.mavenRepo != "": os.removedirs(config.mavenRepo) #statementStatus = subprocess.call("rm -rf "+config.mavenRepo, shell=True) if config.mvnsettings == "": os.chdir(config.predixSDKs) statementStatus = subprocess.call( "mvn clean package -Dmaven.repo.local=" + config.mavenRepo, shell=True) os.chdir("..") else: os.chdir(config.predixSDKs) statementStatus = subprocess.call( "mvn clean package -s " + config.mvnsettings + " -Dmaven.repo.local=" + config.mavenRepo, shell=True) os.chdir("..") else: print("mvnSettings=" + config.mvnsettings) if config.mvnsettings == "": os.chdir(config.predixSDKs) statementStatus = subprocess.call("mvn clean package", shell=True) os.chdir("..") else: os.chdir(config.predixSDKs) statementStatus = subprocess.call("mvn clean package -s " + config.mvnsettings, shell=True) os.chdir("..") if statementStatus != 0: print("Maven build failed.") sys.exit(1) config.retryCount = 0 except: print traceback.print_exc() print() print('Exception when running ' + config.current + '. Retrying') config.retryCount = config.retryCount + 1 if config.retryCount <= 1: buildPredixSDKs(config) else: raise