def remove(self, ignore_errors=False): # Think about ignore_errors stream_logger.info(" - %s" % self.name) # If archive not already extract if not os.path.exists("%s/%s" % (conf.get("settings", "cache"), self.name)): self.unarchive() self.import_control() # Pre Remove stream_logger.info(" | Pre Remove") self.control.pre_remove() # Remove stream_logger.info(" | Remove") files_list = open(os.path.join(conf.get("settings", "cache"), self.name, "files.lst")).readlines() for _file in files_list: try: os.remove(os.path.join(conf.get("settings", "packages"), _file.replace("\n", ""))) except: pass # Post Remove stream_logger.info(" | Post Remove") self.control.post_remove() stream_logger.info(" | Clean") shutil.rmtree(os.path.join(conf.get("settings", "cache"), self.name))
def rrmdir(directory): for root, dirs, files in os.walk(directory, topdown=False): for name in files: os.remove(os.path.join(root, name)) for name in dirs: os.rmdir(os.path.join(root, name)) os.rmdir(directory)
def __del__(self): """Destructor. Removes the pidfile, if it was created by ourselfes.""" if not self.created: return if not os.path.exists(self.filename): if self.verbose > 3: log.debug( _("Pidfile '%s' doesn't exists, not removing."), self.filename) return if not self.auto_remove: if self.verbose > 3: log.debug( _("Auto removing disabled, don't deleting '%s'."), self.filename) return if self.verbose > 1: log.debug(_("Removing pidfile '%s' ..."), self.filename) if self.simulate: if self.verbose > 1: log.debug(_("Just kidding ..")) return try: os.remove(self.filename) except OSError as e: log.err( _("Could not delete pidfile %(file)r: %(err)s"), self.filename, str(e)) except Exception as e: self.handle_error(str(e), e.__class__.__name__, True)
def _setup_region_dict(self, name, reg, outsubdir, systematic, ntuple, basedir): regdic = reg.get_config_dict() modedir = regdic['hists'].lower() regdic['name'] = name if outsubdir: systdir = outsubdir else: if systematic == 'NONE': systdir = 'baseline' else: systdir = systematic.lower() full_out_dir = join(basedir, modedir, systdir) if not isdir(full_out_dir): if self.make_dirs: make_dir_if_none(full_out_dir) else: raise IOError(99,"no dir",full_out_dir) histname = '{}.h5'.format(basename(splitext(ntuple)[0])) full_out_path = join(full_out_dir, histname) if isfile(full_out_path): if self.rerun: os.remove(full_out_path) else: return None regdic['output_name'] = full_out_path regdic['systematic'] = systematic return regdic
def _delete(self): # Gets root b4 deleting all files # self._root() # Deletes file if it exists # if self.written == True: os.remove(self.filename) self.written = False else: pass # Deletes created folders until a folder that is not empty is encountered # while self.roots != []: # Def current garbage (i.e. to be deleted) # current_dir = '' for folder in self.roots: current_dir += '%s/' % folder # Checks if there are other files or folders in the way # if os.listdir(current_dir) == []: os.rmdir(current_dir) del self.roots[-1] else: break
def __update_hotkey(self, command, hotkey): """ Update the hotkey for 'command' to 'hotkey'. """ """ If 'command' is not found, add it with the new 'hotkey'. """ """ Return 'True' on success, 'False' otherwise. """ self.__touch_config_file() oldfile = open(XBINDKEYS_CONFIG_FILE, "r") newfile = open(XBINDKEYS_CONFIG_FILE + ".new", "w") # Search for command commandfound = False skipnextline = False for line in oldfile: if skipnextline == False: newfile.write(line) else: skipnextline = False if line == '"' + command + '"\n': newfile.write(" " + hotkey + "\n") # update hotkey commandfound = True skipnextline = True if commandfound == False: # command not found, add it newfile.write('"' + command + '"\n') newfile.write(" " + hotkey + "\n") oldfile.close() newfile.close() try: os.remove(XBINDKEYS_CONFIG_FILE) except: sessionlog.write("ERROR: 'Hotkeys.__update_hotkey()' - Cannot replace '" + XBINDKEYS_CONFIG_FILE + "'.") os.remove(XBINDKEYS_CONFIG_FILE + ".new") return False shutil.move(XBINDKEYS_CONFIG_FILE + ".new", XBINDKEYS_CONFIG_FILE) return True
def docss(): """ Compresses the CSS files """ listCSS = [] theme = settings.get_theme() print "Using theme %s" % theme css_cfg = os.path.join("..", "..", "..", "private", "templates", theme, "css.cfg") f = open(css_cfg, "r") files = f.readlines() f.close() for file in files[:-1]: p = re.compile("(\n|\r|\t|\f|\v)+") file = p.sub("", file) listCSS.append("../../styles/%s" % file) outputFilenameCSS = "eden.min.css" # Merge CSS files print "Merging Core styles." mergedCSS = mergeCSS(listCSS, outputFilenameCSS) # Compress CSS files print "Writing to %s." % outputFilenameCSS compressCSS(mergedCSS, outputFilenameCSS) # Move files to correct locations print "Deleting %s." % outputFilenameCSS try: os.remove("../../themes/%s/%s" % (theme, outputFilenameCSS)) except: pass print "Moving new %s." % outputFilenameCSS shutil.move(outputFilenameCSS, "../../themes/%s" % theme)
def _maybe_extract(fpath, dirname, descend=True): path = os.path.dirname(fpath) untar_fpath = os.path.join(path, dirname) if not os.path.exists(untar_fpath): print('Extracting contents of "{}"...'.format(dirname)) tfile = zipfile.ZipFile(fpath, 'r') try: tfile.extractall(untar_fpath) except (Exception, KeyboardInterrupt) as e: if os.path.exists(untar_fpath): if os.path.isfile(untar_fpath): os.remove(untar_fpath) else: shutil.rmtree(untar_fpath) raise tfile.close() if descend: dirs = [os.path.join(untar_fpath, o) for o in os.listdir(untar_fpath) if os.path.isdir(os.path.join(untar_fpath, o))] if len(dirs) != 1: print("Error, found not exactly one dir: {}".format(dirs)) sys.exit(-1) return dirs[0] else: return untar_fpath
def remove_logfile(self): from os import remove, path try: remove( path.join(path.split(__file__)[0], self.logfile_name) ) except OSError: # nothing to do if file does not exist pass
def test_bom(self): dest = os.path.join(settings.TMP_PATH, 'test_bom') open(dest, 'w').write('foo'.encode('utf-16')) self.viewer.select('foo') self.viewer.selected = {'full': dest, 'size': 1} eq_(self.viewer.read_file(), u'foo') os.remove(dest)
def test_delete_mid_read(self): self.viewer.extract() self.viewer.select('install.js') os.remove(os.path.join(self.viewer.dest, 'install.js')) res = self.viewer.read_file() eq_(res, '') assert self.viewer.selected['msg'].startswith('That file no')
def open_tempfile_with_atomic_write_to(path, **kwargs): """ Open a temporary file object that atomically moves to the specified path upon exiting the context manager. Supports the same function signature as `open`. The parent directory exist and be user-writable. WARNING: This is just like 'mv', it will clobber files! """ parent_directory = os.path.dirname(path) _tempfile = tempfile.NamedTemporaryFile(delete=False, dir=parent_directory) _tempfile.close() tempfile_path = _tempfile.name try: with open(tempfile_path, **kwargs) as file: yield file file.flush() os.fsync(file.fileno()) os.rename(tempfile_path, path) finally: try: os.remove(tempfile_path) except OSError as e: if e.errno == errno.ENOENT: pass else: raise e
def save(self, force_insert=False, force_update=False): if getattr(settings, 'DPP_IE_COMPATIBLE_PDF_VIEWER', True) and self.old_document != self.document: self.pdf_images_generated = False else: self.pdf_images_generated = True super(Document, self).save(force_insert, force_update) #print "pdf_images_generated set to: " + str(self.pdf_images_generated) # Delete old document if self.old_document and self.old_document != self.document: if os.path.exists(self.old_document.path): os.remove(self.old_document.path) #print "Old document deleted from path: " + self.old_document.path if self.old_document != self.document: cmd = u"python manage.py createpages " + str(self.id) + " --settings=" + settings.SETTINGS_MODULE subprocess.Popen(cmd, shell=True) #print "New page creation process started..." # Creating images when DPP_IE_COMPATIBLE_PDF_VIEWER=True in settings.py if getattr(settings, 'DPP_IE_COMPATIBLE_PDF_VIEWER', True) and self.old_document != self.document: cmd = u"python manage.py generatepdfimages " + str(self.id) + " --settings=" + settings.SETTINGS_MODULE subprocess.Popen(cmd, shell=True) #print "Image generation process started..." self.old_document = self.document
def scrape_rollcall(self, vote, vurl): (path, resp) = self.urlretrieve(vurl) pdflines = convert_pdf(path, 'text') os.remove(path) current_vfunc = None for line in pdflines.split('\n'): line = line.strip() # change what is being recorded if line.startswith('YEAS') or line.startswith('AYES'): current_vfunc = vote.yes elif line.startswith('NAYS'): current_vfunc = vote.no elif (line.startswith('EXCUSED') or line.startswith('NOT VOTING') or line.startswith('ABSTAIN')): current_vfunc = vote.other # skip these elif not line or line.startswith('Page '): continue # if a vfunc is active elif current_vfunc: # split names apart by 3 or more spaces names = re.split('\s{3,}', line) for name in names: if name: current_vfunc(name.strip())
def save_tmp_file(fileobj, filename, ext): if ext in IMAGES_EXT: f = open("/tmp/" + filename + ext, 'wb') shutil.copyfileobj(fileobj, f) f.close() helpers.resize_image(filename, ext) if ext != '.jpg': os.remove("/tmp/" + filename + ext) return '.jpg' if ext in ['.txt']: f = open("/tmp/" + filename + ext, 'w') shutil.copyfileobj(fileobj, f) f.close() return ext if ext in ['.dcm', '.dicom']: f = open("/tmp/" + filename + ext, 'w') shutil.copyfileobj(fileobj, f) f.close() o = dir(main_helpers.dicom) try: main_helpers.dicom.saveDicomAsImage("/tmp/" + filename + ext, "/tmp/" + filename + ext + ".thumbnail.jpg") except: shutil.copy( os.path.join(settings.BASE_DIR, 'static/img/files/dicom.png'), "/tmp/" + filename + ext + ".thumbnail.png" ) return ext f = open("/tmp/" + filename + ext, 'wb') shutil.copyfileobj(fileobj, f) f.close() return ext
def write(self, cr, uid, ids, vals, context=None): if not isinstance(ids, list): ids = [ids] if vals.get("filename") and not vals.get("extension"): vals["filename"], vals["extension"] = os.path.splitext(vals["filename"]) upd_ids = ids[:] if vals.get("filename") or vals.get("extension"): images = self.browse(cr, uid, upd_ids, context=context) for image in images: old_full_path = self._image_path(cr, uid, image, context=context) if not old_full_path: continue # all the stuff below is there to manage the files on the filesystem if ( vals.get("filename") and (image.name != vals["filename"]) or vals.get("extension") and (image.extension != vals["extension"]) ): super(product_images, self).write(cr, uid, image.id, vals, context=context) upd_ids.remove(image.id) if "file" in vals: # a new image have been loaded we should remove the old image # TODO it's look like there is something wrong with function # field in openerp indeed the preview is always added in the write :( if os.path.isfile(old_full_path): os.remove(old_full_path) else: new_image = self.browse(cr, uid, image.id, context=context) new_full_path = self._image_path(cr, uid, new_image, context=context) # we have to rename the image on the file system if os.path.isfile(old_full_path): os.rename(old_full_path, new_full_path) return super(product_images, self).write(cr, uid, upd_ids, vals, context=context)
def send_data(save_path): """ Sends all the data files that are present in the specified path to the Qbike server. :param save_path: Requires the path in which the trips are saved. :return: Nothing. The data is sent to the Server and the txt files are removed from the path's directory. """ end = False Trip_nb = 100 while end == False: if not os.path.isfile('C:\Users\Joren\Documents\Ir 1B\P&O\P&O 3\Tryouts\Trips\Trip1.txt'): end = True else: for nb in reversed(range(0, 100)): Trip = os.path.join(save_path, "Trip" + str(nb) + ".txt") Trip_nb = str(nb) if os.path.isfile(Trip): break Trip_path = os.path.join(save_path, r"Trip" + Trip_nb + r".txt") with open(Trip_path, "r") as Trip: batch = json.load(Trip) info = {'purpose': 'batch-sender', 'groupID': "cwa2", 'userID': ID} socketIO = SocketIO('dali.cs.kuleuven.be', 8080) socketIO.on('server_message', on_response) socketIO.emit('start', json.dumps(info), on_response) socketIO.wait(2) socketIO.emit('batch-tripdata', json.dumps(batch), on_response) socketIO.wait(5) os.remove(Trip_path) print "Sent Data"
def runThread(self,conn,addr): while True: #print ('Connected from', addr) #data = conn.recv(bsize) #print ('Data received from client', repr(data.decode())) dirs = os.listdir(fpath) time.sleep(10) for fl in dirs: msg = '{0}{1}'.format("Sending file: ",fl) conn.send(msg.encode()) if "ok" in conn.recv(bsize).decode(): # client ready to receive selfl = '{0}{1}'.format(fpath,fl) f = open(selfl,'rb') payload = f.read(bsize) while (payload): conn.send(payload) print('........') if "ok" in conn.recv(bsize).decode(): payload = f.read(bsize) conn.send("eof".encode()) f.close() # once the file is sent, it must be removed os.remove(selfl)
def removeOld(self, path, dontDelete = [], newSize = 0): files = [] oldSize = 0 for root, subfiles, filenames in os.walk(path): log.debug(subfiles) for filename in filenames: ext = os.path.splitext(filename)[1].lower()[1:] fullPath = os.path.join(root, filename) oldSize += os.path.getsize(fullPath) # iso's are huge, but the same size as 720p, so remove some filesize for better comparison if ext == 'iso': oldSize -= (os.path.getsize(fullPath) / 1.6) if not fullPath in dontDelete: # Only delete media files and subtitles if ('*.' + ext in self.extensions['movie'] or '*.' + ext in self.extensions['subtitle']) and not '-trailer' in filename: files.append(fullPath) log.info('Quality Old: %d, New %d.' % (long(oldSize) / 1024 / 1024, long(newSize) / 1024 / 1024)) if long(oldSize) < long(newSize): for file in files: try: os.remove(file) log.info('Removed old file: %s' % file) except OSError: log.info('Couldn\'t delete file: %s' % file) return True else: log.info('New file(s) are smaller then old ones, don\'t overwrite') return False
def testMakeDirs(self): d = path(self.tempdir) # Placeholder file so that when removedirs() is called, # it doesn't remove the temporary directory itself. tempf = d / 'temp.txt' tempf.touch() try: foo = d / 'foo' boz = foo / 'bar' / 'baz' / 'boz' boz.makedirs() try: self.assert_(boz.isdir()) finally: boz.removedirs() self.failIf(foo.exists()) self.assert_(d.exists()) foo.mkdir(o750) boz.makedirs(o700) try: self.assert_(boz.isdir()) finally: boz.removedirs() self.failIf(foo.exists()) self.assert_(d.exists()) finally: os.remove(tempf)
def sync(self, args): """ Synchronize rtc/repository.yaml file and each rtc repository version hash. """ options, argv = self.parse_args(args[:], self._print_alternative_rtcs) verbose = options.verbose_flag sys.stdout.write('# Writing repository.yaml for package distribution\n') sys.stdout.write('## Parsing RTC directory\n') package = admin.package.get_package_from_path(os.getcwd()) repos = [] for rtc in admin.rtc.get_rtcs_from_package(package, verbose=verbose): sys.stdout.write('### RTC %s\n' % rtc.rtcprofile.basicInfo.name) repo = admin.repository.get_repository_from_path(rtc.path, description=rtc.rtcprofile.basicInfo.description) repos.append(repo) repo_file = os.path.join(package.get_rtcpath(), 'repository.yaml') bak_file = repo_file + wasanbon.timestampstr() if os.path.isfile(bak_file): os.remove(bak_file) import shutil, yaml shutil.copy(repo_file, bak_file) dic = yaml.load(open(bak_file, 'r')) if not dic: dic = {} for repo in repos: if getattr(repo, 'url') != None: url = repo.url.strip() else: url = '' dic[repo.name] = {'repo_name' : repo.name, 'git': url, 'description':repo.description, 'hash':repo.hash} yaml.dump(dic, open(repo_file, 'w'), encoding='utf8', allow_unicode=True, default_flow_style=False) pass
def make_new_version_message(self, path): """Make a new version message for the repo at the given path.""" try: cwd = os.getcwd() os.chdir(path) version = self.get_current_tag() if version[0] is None: return messages_path = os.path.join(path, 'messages.json') message_path = self.rewrite_messages_json(messages_path, version) if os.path.exists(message_path): os.remove(message_path) with open(message_path, mode='w', encoding='utf-8') as f: header = '{} {}'.format( os.path.basename(path), os.path.splitext(os.path.basename(message_path))[0]) f.write('{}\n{}\n'.format(header, '-' * (len(header) + 1))) f.write(self.get_commit_messages_since(version)) self.window.run_command('open_file', args={'file': message_path}) except Exception: import traceback traceback.print_exc() finally: os.chdir(cwd)
def sqlite_new(path): global SQLite if SQLite is not None: sqlite_close() if os.path.isfile(path): os.remove(path) SQLite = sqlite_init(path)
def test_is_same_output(self): fd1 = sys.stderr.fileno() fd2 = os.dup(fd1) try: self.assertTrue(ConfigureOutputHandler._is_same_output(fd1, fd2)) finally: os.close(fd2) fd2, path = tempfile.mkstemp() try: self.assertFalse(ConfigureOutputHandler._is_same_output(fd1, fd2)) fd3 = os.dup(fd2) try: self.assertTrue(ConfigureOutputHandler._is_same_output(fd2, fd3)) finally: os.close(fd3) with open(path, 'a') as fh: fd3 = fh.fileno() self.assertTrue( ConfigureOutputHandler._is_same_output(fd2, fd3)) finally: os.close(fd2) os.remove(path)
def page_extract(start, end, SUBSECTION): PDF_IN = PdfFileReader(open(PDF_DIR, 'rb')) # for i in xrange(PDF_IN.numPages): # for all pages for i in range(int(start) - 1, int(end)): output = PdfFileWriter() output.addPage(PDF_IN.getPage(i)) base, name_ext = os.path.split(PDF_DIR) name, ext = os.path.splitext(name_ext) PDF_OUT = '{}{}'.format(TMP_DIR, '{}-{}{}'.format(name, str(i).zfill(6), ext)) with open(PDF_OUT, 'wb') as outputStream: output.write(outputStream) gs_pdf_to_png(PDF_OUT) os.remove(PDF_OUT) png_list = group(os.listdir(TMP_DIR), 2) for tup in png_list: print tup card_front = os.path.join(TMP_DIR, tup[0]) card_back = os.path.join(TMP_DIR, tup[1]) make_cards(card_front, card_back, SUBSECTION)
def test_io(self): import h5py # Cleanup directories fname = 'testdset.hdf5' if mpiutil.rank0 and os.path.exists(fname): os.remove(fname) mpiutil.barrier() gshape = (19, 17) ds = mpiarray.MPIArray(gshape, dtype=np.int64) ga = np.arange(np.prod(gshape)).reshape(gshape) l0, s0, e0 = mpiutil.split_local(gshape[0]) ds[:] = ga[s0:e0] ds.redistribute(axis=1).to_hdf5(fname, 'testds', create=True) if mpiutil.rank0: with h5py.File(fname, 'r') as f: h5ds = f['testds'][:] assert (h5ds == ga).all() ds2 = mpiarray.MPIArray.from_hdf5(fname, 'testds') assert (ds2 == ds).all()
def __WritePickled(self, obj, filename): """Pickles the object and writes it to the given file. """ if not filename or filename == '/dev/null' or not obj: return descriptor, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename)) tmpfile = os.fdopen(descriptor, 'wb') pickler = pickle.Pickler(tmpfile, protocol=1) pickler.fast = True pickler.dump(obj) tmpfile.close() self.__file_lock.acquire() try: try: os.rename(tmp_filename, filename) except OSError: try: os.remove(filename) except: pass os.rename(tmp_filename, filename) finally: self.__file_lock.release()
def symlink(env, target, source): trgt = str(target[0]) src = str(source[0]) if os.path.islink(trgt) or os.path.exists(trgt): os.remove(trgt) os.symlink(os.path.basename(src), trgt)
def stop(self): """ Stop the daemon """ # Get the pid from the pidfile try: pf = file(self.pidfile, 'r') pid = int(pf.read().strip()) pf.close() except IOError: pid = None if not pid: message = "pidfile %s does not exist. Daemon not running?\n" sys.stderr.write(message % self.pidfile) return # not an error in a restart # Try killing the daemon process try: while 1: os.kill(pid, SIGTERM) time.sleep(0.1) except OSError, err: err = str(err) if err.find("No such process") > 0: if os.path.exists(self.pidfile): os.remove(self.pidfile) else: print str(err) sys.exit(1)
def parse(parseFileName): global resultFile; resultFileName = "Conf-Nodes.txt"; try: os.remove(resultFileName); except OSError: pass; tree = ET.parse(parseFileName); root = tree.getroot(); #print(root); resultFile = open(resultFileName, 'a'); for child in root: allAuthors = child.findall('author'); pubYear = child.find('year'); journal = child.find('journal'); booktitle = child.find('booktitle'); pubName = None; if not (journal is None): pubName = journal; elif not (booktitle is None): pubName = booktitle; if not (pubName is None): #variable was reassigned processRecord(allAuthors, pubYear, pubName); resultFile.close();