def delete_images(art_images): ''' cycle through object and delete related images ''' MEDIA_ROOT = '/www/sites/crazyforus.com/media' for image in art_images: filename = MEDIA_ROOT+"/"+image.image_link+"/"+image.image_name if os.path.exists(filename): os.delete(filename)
def readRawImage(update, context): file = context.bot.getFile(update.message.photo[-1].file_id ) #recive the image url on telegram server path = file.file_id + '.jpeg' file.download(path) #download image with a unique name try: result = bot_counter( path, update.message.photo[-1].width ) #try to recive 2 lists with the botnames by analyzing the downloaded image except RuntimeError: texto = "That definitely insn't a CODM match screenshot😫\nIf you are confused about the usage of this application type /tutorial" delete(path) #delete the image else: delete(path) #delete the image if didnt raise error if (len(result[0]) + len(result[1]) > 0): #aprendendo o texto, pegando as duas listas texto = 'There are at least ' + str( len(result[0]) + len(result[1])) + ' bots in this match: \n' for i in result[0]: texto += '🔵 ' + i + '\n' for i in result[1]: texto += '🔴 ' + i + '\n' else: #texto para se n tiver bot texto = "I couldn't find any bot in this match 🤔🤔" context.bot.send_message(chat_id=update.effective_chat.id, text=texto, reply_to_message_id=update.message.message_id)
def Gutmann(fname): for i in range(0,4): randpass(fname) fixedpass(fname,"\x55\x55\x55") fixedpass(fname,"\xAA\xAA\xAA") fixedpass(fname,"\x92\x49\x24") fixedpass(fname,"\x49\x24\x92") fixedpass(fname,"\x24\x92\x49") fixedpass(fname,"\x00\x00\x00") fixedpass(fname,"\x11\x11\x11") fixedpass(fname,"\x22\x22\x22") fixedpass(fname,"\x33\x33\x33") fixedpass(fname,"\x44\x44\x44") fixedpass(fname,"\x55\x55\x55") fixedpass(fname,"\x66\x66\x66") fixedpass(fname,"\x77\x77\x77") fixedpass(fname,"\x88\x88\x88") fixedpass(fname,"\x99\x99\x99") fixedpass(fname,"\xAA\xAA\xAA") fixedpass(fname,"\xBB\xBB\xBB") fixedpass(fname,"\xCC\xCC\xCC") fixedpass(fname,"\xDD\xDD\xDD") fixedpass(fname,"\xEE\xEE\xEE") fixedpass(fname,"\xFF\xFF\xFF") fixedpass(fname,"\x92\x49\x24") fixedpass(fname,"\x24\x92\x49") fixedpass(fname,"\x24\x92\x49") fixedpass(fname,"\x6D\xB6\xDB") fixedpass(fname,"\xB6\xDB\x6D") for i in range(0,4): randpass(fname) os.delete(fname)
def statQue(delete, num): dir = "stat_que" ques = {} files = os.listdir(dir) for file in files: f = open(dir+"/"+file) data = json.load(f) ques.update({data["Que"]: data["Count"]}) list_d = list(ques.items()) list_d.sort(key=lambda i: i[1], reverse=True) if num > 0: list_d = list_d[0:num] print() print() print("Топ из {0} запросов: ".format(num)) print() for que in list_d: print(que[0] + " - " + str(que[1])) print() if delete: os.delete(dir)
def restore(): deleeteOrReset = messagebox.askyesnocancel( title='还原', message='重置到为刚导入状态请选Yes,清除所有数据请选No,取消请选Cancel') if deleeteOrReset == None: return global status if status == False: messagebox.showerror(title='Fatal Error', message='你还没导入姓名表') return if not deleeteOrReset: try: delete('nameData') except: pass status = False statusLabel.config(text='未导入姓名表', fg='red') else: with open('nameData', 'r') as f: nameLst = json.load(fp=f) try: delete('nameData') except: pass with open('nameData', 'w') as f: nameLst['modified'] = [] nameLst['except'] = [] for i in nameLst['orig']: nameLst['modified'].append(i) json.dump(nameLst, fp=f)
def delete_file(untracked_file): print "Are you sure you want to delete the file?(Y or N)" answer = str.lower(raw_input("> ")) if answer == "y" or answer == "yes": os.delete(untracked_file) else: "Ok, let's skip this file"
def end(self): self.con.close() try: os.delete(self.tempname) except: pass
def clear_folder(folder): try: files = os.listdir(folder) for f in files: os.delete(folder + f) except: pass
def filechoose(): global chosefilename chosefilename = filedialog.askopenfilename(filetypes=[('Excel表格文件', '*.xlsx *.xls')]) global status if chosefilename == '': return try: delete('nameData') except: pass try: names = excel_reader.open_workbook(r'%s' % chosefilename) names = names.sheet_by_name('Sheet1') except: status = False statusLabel.config(text='未导入姓名表', fg='red') messagebox.showerror('Fatal Error', '请检查文件是否存在!') return lst = {'orig': [], 'modified': [], 'except': []} n = 0 while True: try: lst['orig'].append(names.cell(n + 1, 0).value) n += 1 except: break lst['modified'] = lst['orig'] with open('nameData', 'w') as f: json.dump(lst, fp=f) status = True statusLabel.config(text='已导入姓名表', fg='black') messagebox.showinfo(title='Info', message='导入成功!')
def Gutmann(fname): for i in range(0, 4): randpass(fname) fixedpass(fname, "\x55\x55\x55") fixedpass(fname, "\xAA\xAA\xAA") fixedpass(fname, "\x92\x49\x24") fixedpass(fname, "\x49\x24\x92") fixedpass(fname, "\x24\x92\x49") fixedpass(fname, "\x00\x00\x00") fixedpass(fname, "\x11\x11\x11") fixedpass(fname, "\x22\x22\x22") fixedpass(fname, "\x33\x33\x33") fixedpass(fname, "\x44\x44\x44") fixedpass(fname, "\x55\x55\x55") fixedpass(fname, "\x66\x66\x66") fixedpass(fname, "\x77\x77\x77") fixedpass(fname, "\x88\x88\x88") fixedpass(fname, "\x99\x99\x99") fixedpass(fname, "\xAA\xAA\xAA") fixedpass(fname, "\xBB\xBB\xBB") fixedpass(fname, "\xCC\xCC\xCC") fixedpass(fname, "\xDD\xDD\xDD") fixedpass(fname, "\xEE\xEE\xEE") fixedpass(fname, "\xFF\xFF\xFF") fixedpass(fname, "\x92\x49\x24") fixedpass(fname, "\x24\x92\x49") fixedpass(fname, "\x24\x92\x49") fixedpass(fname, "\x6D\xB6\xDB") fixedpass(fname, "\xB6\xDB\x6D") for i in range(0, 4): randpass(fname) os.delete(fname)
def main(): #continuously update the tweets.csv file while True: #dump the database into the csv file database = dataset.connect("sqlite:///tweets.db") result = database["tweets"].all() #tweets.db can not be empty at this point! freeze(result, format='csv', filename="tweets.csv") #analyze each line in the csv file csvReader() avg_city_scores = {} for key, value in city_scores.items(): avg_city_scores[key] = sum(value) / float(len(value)) print("###########################################") print(avg_city_scores) city_scores.clear() #erase the database tweets table table = database["tweets"] table.delete() try: os.delete("tweets.csv") except: pass #wait for five minutes, then redo the above code time.sleep(30)
def filename(formats): tmp_file = tempfile.NamedTemporaryFile(delete=False, suffix=f".{formats}") yield tmp_file.name try: os.delete(tmp_file.name) except: pass
def tearDown(self): """ Method to reset test and remove file.json """ try: delete("file.json") except: pass
def destroy(self): """Destroy the session""" if not self.started: raise NotStarted("Session must be started") os.delete(self.path) if self._flock: self._flock.release() self.started = False
def setUp(self): """ Method to init test and remove file.json """ try: delete("file.json") except: pass
def del_data(self): if self.show_dis(): try: delete("data.json") except Exception as e: pass self.start() return
def dedup(): texts = set() for filename in os.listdir(data_path): text = open(data_path + filename).read() if text not in texts: texts.add(text) else: os.delete(data_path + filename)
def process_xml_file(file_name, voice, speedup, play_program): if speedup == 1: speedup = None coding = (VOICE_CODINGS.get(voice) or 'iso-8859-1') _, xml_temp_file = tempfile.mkstemp('.xml') try: # recode the XML file recodep = (coding != 'utf-8') if recodep: decode = codecs.getdecoder('utf-8') encode = codecs.getencoder(coding) input = open(file_name, encoding='utf8') output = open(xml_temp_file, 'w', encoding='utf8') while True: data = input.read() if not data: break if recodep: data = encode(decode(data)[0])[0] output.write(data) output.close() # synthesize wav_file = file_name[:-3] + 'wav' if speedup: _, wav_temp_file = tempfile.mkstemp('.wav') else: wav_temp_file = wav_file try: print("text2wave -eval '(%s)' -mode singing '%s' -o '%s'" % (voice, xml_temp_file, wav_temp_file,)) result = os.system("text2wave -eval '(%s)' -mode singing '%s' -o '%s'" % (voice, xml_temp_file, wav_temp_file,)) if result: sys.stdout.write("Festival processing failed.\n") return if speedup: result = os.system("sox '%s' '%s' speed '%f'" % (wav_temp_file, wav_file, speedup,)) if result: sys.stdout.write("Festival processing failed.\n") return finally: if speedup: try: os.delete(wav_temp_file) except OSError: pass sys.stdout.write("%s created.\n" % (wav_file,)) # play if play_program: os.system("%s '%s' >/dev/null" % (play_program, wav_file,)) finally: try: os.delete(xml_temp_file) except OSError: pass
def quit(): global sockfile try: print 'stopping memory scanner' client.sendall('QUIT') client.close() except: pass notify('Flarelyzer', 'Closed!') delete(sockfile) print '--Notification agent closed--' exit(0)
def dumper(): #dump the database into the csv file database = dataset.connect("sqlite:///tweets.db") result = database["tweets"].all() #tweets.db can not be empty at this point! freeze(result, format='csv', filename="tweets.csv") table = database["tweets"] table.delete() #analyze each line in the csv file csvReader() avg_city_scores = {} for key, value in city_scores.items(): avg_city_scores[key] = (sum(value) / float(len(value))) print("###########################################") print(avg_city_scores) scores = "" for city in cities: if city in avg_city_scores: if avg_city_scores[city] < -0.5: s = 0 elif (-0.5 < avg_city_scores[city] < 0): s = 1 elif (avg_city_scores[city] == 0): s = 2 elif (0 < avg_city_scores[city] < 0.5): s = 3 else: s = 4 scores += str(s) else: scores += '2' print("updated scores.txt") with open('scores.txt', 'w') as file: file.write(scores) city_scores.clear() os.remove("tweets.csv") #erase the database tweets table try: os.delete("tweets.csv") except: pass
def process_xml_file (file_name, voice, speedup, play_program): if speedup == 1: speedup = None coding = (VOICE_CODINGS.get (voice) or 'iso-8859-1') _, xml_temp_file = tempfile.mkstemp ('.xml') try: # recode the XML file recodep = (coding != 'utf-8') if recodep: decode = codecs.getdecoder ('utf-8') encode = codecs.getencoder (coding) input = open (file_name) output = open (xml_temp_file, 'w') while True: data = input.read () if not data: break if recodep: data = encode (decode (data)[0])[0] output.write (data) output.close () # synthesize wav_file = file_name[:-3] + 'wav' if speedup: _, wav_temp_file = tempfile.mkstemp ('.wav') else: wav_temp_file = wav_file try: print "text2wave -eval '(%s)' -mode singing '%s' -o '%s'" % (voice, xml_temp_file, wav_temp_file,) result = os.system ("text2wave -eval '(%s)' -mode singing '%s' -o '%s'" % (voice, xml_temp_file, wav_temp_file,)) if result: sys.stdout.write ("Festival processing failed.\n") return if speedup: result = os.system ("sox '%s' '%s' speed '%f'" % (wav_temp_file, wav_file, speedup,)) if result: sys.stdout.write ("Festival processing failed.\n") return finally: if speedup: try: os.delete (wav_temp_file) except: pass sys.stdout.write ("%s created.\n" % (wav_file,)) # play if play_program: os.system ("%s '%s' >/dev/null" % (play_program, wav_file,)) finally: try: os.delete (xml_temp_file) except: pass
def Bruce_Schneier(fname): """Bruce Schneiers Algorithm (7 passes) This method offers a seven pass overwriting algorithm. The first pass with all ones, the second pass with all with zeroes and then five more passes with a cryptographically secure pseudo-random sequence (Schneier, 1996).""" fixedpass(fname, "\x01") fixedpass(fname, "\x00") for i in range(0, 5): randpass(fname) os.delete(fname)
def delete_all(self): """ Delete all files in the CaptureMain.FILE_ROOT directory. """ num_deleted = 0 for filename in os.listdir(CaptureMain.FILE_ROOT): os.delete(filename) num_deleted += 1 if Tk1Main.DEBUG: print("Deleted %d files" % num_deleted)
def B_HMG_IS5_B(fname): """ British HMG IS5 - Baseline (1 pass + 1 verification pass) This baseline scheme allows the data sectors in the storage device to be overwritten with zeroes. This wiping method also does a verification pass to ensure that the data written is correct.""" verpass = fixedpass(fname, "\x00") if verify(verpass, fname) != True: raise ValueError( "Expected Values of file were not found when opening!") os.delete(fname)
def R_GOST_P50739_35(fname): """Russian GOST P50739-95 (2 passes) This Russian standard allows the data sectors in the storage device to be overwritten with a single pass of zeroes (0x00), followed by another pass with pseudo-random numbers.""" fixedpass(fname, "\x00") verpass = randpass(fname) if verify(verpass, fname) != True: raise ValueError( "Expected Values of file were not found when opening!") os.delete(fname)
def B_HMG_IS5_B(fname): """ British HMG IS5 - Baseline (1 pass + 1 verification pass) This baseline scheme allows the data sectors in the storage device to be overwritten with zeroes. This wiping method also does a verification pass to ensure that the data written is correct.""" verpass = fixedpass(fname, "\x00") if verify(verpass,fname) != True: raise ValueError("Expected Values of file were not found when opening!") os.delete(fname)
def stop_redis_server(proc, conf): try: if proc.poll() is None: proc.terminate() proc.wait(5) except: pass try: os.delete(conf) except: pass
def R_GOST_P50739_35(fname): """Russian GOST P50739-95 (2 passes) This Russian standard allows the data sectors in the storage device to be overwritten with a single pass of zeroes (0x00), followed by another pass with pseudo-random numbers.""" fixedpass(fname, "\x00") verpass = randpass(fname) if verify(verpass,fname) != True: raise ValueError("Expected Values of file were not found when opening!") os.delete(fname)
def ssh(options): if os.path.exists('ssh-testfile.txt'): os.delete('ssh-testfile.txt') try: qsh("scp brandon@%s:testfile.txt testfile.txt 2>&1 | tee %s/ssh-results.txt" % (options.testing.traceHost, options.testing.traceDir)) except: pass if os.path.exists('ssh-testfile.txt'): print('SSH SUCCESS') os.delete('ssh-testfile.txt') else: print('SSH FAILURE')
def Bruce_Schneier(fname): """Bruce Schneiers Algorithm (7 passes) This method offers a seven pass overwriting algorithm. The first pass with all ones, the second pass with all with zeroes and then five more passes with a cryptographically secure pseudo-random sequence (Schneier, 1996).""" fixedpass(fname, "\x01") fixedpass(fname, "\x00") for i in range(0,5): randpass(fname) os.delete(fname)
def download_install_java(version, cqs_home=None): if cqs_home is None: cqs_home = os.path.join(os.path.expanduser("~"),'.cassandra-quickstart') jre_root = os.path.join(cqs_home,'java') sys_platform = platform.system() + "_" + platform.architecture()[0] try: java_pkg = PLATFORM_JAVA[version][sys_platform] except KeyError: raise AssertionError("Could not find a suitable java download for platform: {sys_platform}".format(sys_platform=sys_platform)) makedirs(jre_root) java_home = os.path.join(jre_root, version) download_path = java_home + '.zip' # Delete any previous installs : try: shutil.rmtree(java_home) os.delete(download_path) except OSError: pass # Download: download(java_pkg['url'], download_path, show_progress=True) # Check sha256sum of downloaded java package: with open(download_path) as f: if hashlib.sha256(f.read()).hexdigest() != java_pkg['sha256']: raise AssertionError( 'Java zip file from {download_url} did not have ' 'expected SHA256: {sha}. Not installing.'.format( download_url=java_pkg['url'], sha=java_pkg['sha256'])) # Extract java zip file to home dir: with ZipFile(download_path) as z: z.extractall(jre_root) try: java_pkg_dir = [d for d in z.namelist() if d.startswith(version) and '/' in d][0].strip('/') except: raise AssertionError('Could not find java directory name inside package: {download_path}'.format(**locals())) extracted_java_dir = os.path.join(jre_root, java_pkg_dir) os.rename(extracted_java_dir, java_home) # Set executable bits: for path in os.listdir(os.path.join(java_home,'bin')): os.chmod(os.path.join(java_home,'bin',path), 0o755) for path in os.listdir(os.path.join(java_home,'jre','bin')): os.chmod(os.path.join(java_home,'jre','bin',path), 0o755) # Write the sha of the original package we installed: with open(os.path.join(java_home, 'SHA.txt'), 'w') as f: f.write(java_pkg['sha256']) meta = {'java_home':java_home, 'version':version} meta.update(java_pkg) return meta
def add_files(fnames, uid, ptype, delete=False, sticky=False): user = models.User.objects.get(pk=uid) for fname in fnames: title, tag_val, body = parse(fname) print "*** adding %s" % title post = models.Post(title=title, author=user, type=ptype, tag_val=tag_val, content=body, sticky=sticky) post.save() post.set_tags() if delete: os.delete(fname)
def main(): text = putinlist() name = extractname(text) emails = getemails(text) tels = gettels(text) writeit(name, tels, emails) writeresponse(name) os.delete(cardname) os.delete('../response.txt')
def cleanup(): print "INFO: Begin cleanup: (please wait)" '''os''' for os in OS.objects.all(): if (os.host_set.all().count() == 0): print "INFO: delete os %s" % os os.delete() '''pkg''' for p in Package.objects.all(): if(p.hosts.count() == 0 ): print "INFO: delete package %s version %s" % (p,p.version) p.delete() print "INFO: End cleanup"
def US_DoD_5220_22_M(fname): """U.S. Standard, DoD 5220.22-M (3 passes) The National Industrial Security Program Operating Manual, issued to the US Department of Defense, Department of Energy, and other US government agencies specifies standards for the clearing, and sanitising of data classified as confidential, secret. U.S. Department of Defense specifies three passes extended character rotation overwrite algorithm in the DoD 5220.22-M specification. This Total Privacy shredding method conforms to these overwriting standards as well as method 'd' of the Cleaning and Sanitation Matrix (DoD, 2006). """ randpass(fname) randpass(fname) randpass(fname) os.delete(fname)
def checkout_and_compare(svn_url, svn_rev, git_dir, git_sha, tmp_dir): print if not isdir(tmp_dir): os.makedirs(tmp_dir) svn_tmp = join(tmp_dir, "svn") git_tmp = join(tmp_dir, "git") if isdir(svn_tmp): os.delete(svn_tmp) if isdir(git_tmp): os.delete(git_tmp) os.system("svn checkout -r %s file://%s %s" % (svn_rev, svn_url, svn_tmp)) os.system("git clone %s %s" % (git_dir, git_tmp)) os.system("git --git-dir=%s checkout %s" % (git_tmp, git_sha)) try: print "Comparing %s with %s..." % (svn_tmp, git_tmp) if os.system("diff -r -U3 -x .svn -x .git %s %s" % (svn_tmp, git_tmp)) != 0: print "Comparison failed for args:", (svn_url, svn_rev, git_dir, git_sha, tmp_dir) return False else: print "Comparison of SVN rev %s with Git sha %s successful" % (svn_rev, git_sha) return True finally: os.delete(svn_tmp) os.delete(git_tmp)
def G_VSITR(fname): """German VSITR (7 passes) Similar to previous method, the German standard overwrites each data sector in the storage device with three alternating patterns of zeroes and ones, followed by the seventh pass with random character. However, no verification on the overwritten data is needed.""" fixedpass(fname, "\x00") fixedpass(fname, "\x01") fixedpass(fname, "\x00") fixedpass(fname, "\x01") fixedpass(fname, "\x00") fixedpass(fname, "\x01") verpass = randpass(fname) os.delete(fname)
def delete_same_rows(file_name): uniques = [] with open(file_name, 'r') as f, open(f'tmp_{file_name}', 'a')as f2: for row in f: splited = row.split() number1, sign, number2, repeat = splited unique = f'{number1} {sign} {number2}' if unique not in uniques: uniques.append(unique) f2.write(f'{inique} {repeat}') os.delete(file_name) os.rename(f'tmp_{file_name}', file_name)
async def deleteMap(self, ctx, *args): global mapStatus global maps mapID = int(args[0]) await self.bot.say("Removing map ID {0}: {1}.".format(mapID, maps[mapID]['name'])) os.delete("maps/combat/" + maps[mapID]['name']) if mapStatus['currentCombat'] == mapID: mapStatus['currentCombat'] = -1 mapStatus['pogLocations'] = [] elif mapStatus['currentCombat'] > mapID: mapStatus['currentCombat'] -= 1 maps.pop(mapID) saveMaps()
def encrypt(filename, recipient): gpgfname = filename+'.gpg' cmd = 'gpg --batch --encrypt="'+filename+'" --recipient="'+recipient+'" --output="'+gpgfname+'" --yes' # yes to overwrite print cmd p = Popen(split(cmd), stdin=PIPE, stdout=PIPE) p.stdin.write(open(filename).read()) p.stdin.close() if p.wait() == 0: os.delete(filename) return True, gpgfname else: stdout = p.communicate()[0] print stdout return False, '$ '+cmd+'\n'+stdout
def print_file(events): # Print json try: os.delete(output_file) except: pass try: with open(output_file, "w") as out_file: out_file.write("[\n") for e in events[0:-1]: out_file.write(e.str_json() + ",\n") out_file.write(events[-1].str_json() + "\n") out_file.write("]\n") except: print("ERROR: Error writing file.")
def safe_copy(src, dst, filename): while True: md5_orig = gen_md5(src) copy2(src, os.path.join(dst, filename)) md5_after = gen_md5(os.path.join(dst, filename)) print(md5_orig == md5_after) if md5_orig == md5_after: with open(os.path.join(dst, filename + '_checksum.md5'), 'w') as writer: writer.write(md5_after) break else: os.delete(os.path.join(dst, filename)) print('Checksums did not match, trying again...')
def B_HMG_IS5_E(fname): """British HMG IS5 - Enhanced (3 passes + 1 verification pass) This enhanced scheme is a three pass overwriting algorithm. In the first pass, it overwrites all the data sectors in the storage device with 0x00. In the second pass, it overwrites the entire data sectors again with 0xFF. In the last pass, it overwrites all the data sectors in the storage device with pseudo-random numbers. This wiping method also does a verification pass after the third pass to ensure that the data overwritten are correct""" fixedpass(fname, "\x00") fixedpass(fname, "\xFF") verpass = randpass(fname) if verify(verpass,fname) != True: raise ValueError("Expected Values of file were not found when opening!") os.delete(fname)
def B_HMG_IS5_E(fname): """British HMG IS5 - Enhanced (3 passes + 1 verification pass) This enhanced scheme is a three pass overwriting algorithm. In the first pass, it overwrites all the data sectors in the storage device with 0x00. In the second pass, it overwrites the entire data sectors again with 0xFF. In the last pass, it overwrites all the data sectors in the storage device with pseudo-random numbers. This wiping method also does a verification pass after the third pass to ensure that the data overwritten are correct""" fixedpass(fname, "\x00") fixedpass(fname, "\xFF") verpass = randpass(fname) if verify(verpass, fname) != True: raise ValueError( "Expected Values of file were not found when opening!") os.delete(fname)
def __call__(self,options): """Make .tex file for JOB""" ret,jlist,base,jobspecfile,order,jjspec = e4t.cli.select_jobs_from_jobfile(options.jobfile) cache = options.cache offset = _get_offset(options,1) for section,jobs in jlist.items(): for j,k in jobs: datareq = _cachejobs(j,**k) if datareq is None: continue f = cache.filename(*datareq,offset=offset) if f and exists(f): print "deleting",j,f,exists(f) os.delete(f)
def compare_and_output_image(filename, selected_benchmarks, plotLabels=(None, None), plotSpeedup=False): try: import pylab as P except: print "Pylab not found" return 0 import os processor = ResultsProcessor() for selected in selected_benchmarks: processor.add_dataset(selected[0], selected[1]) if plotSpeedup: if processor.plot_speedup(plotLabels[0], plotLabels[1]) != 0: speedup_filename = benchmark_path( ) + '/' + filename + '.speedup.png' try: os.delete(speedup_filename) except: pass P.savefig(speedup_filename, format='png') P.close() return speedup_filename if processor.plot_data(plotLabels[0], plotLabels[1]) != 0: image_filename = benchmark_path() + '/' + filename + '.png' try: os.delete(image_filename) except: pass P.savefig(image_filename, format='png') P.close() return image_filename return None
def C_RCMP_TSSIT_OPS_11(fname): """Canadian RCMP TSSIT OPS-II (7 passes + 1 verification pass) This method is a seven passes overwriting algorithm with three alternating patterns of zeroes and ones followed by the last pass with random characters. This wiping method also does a verification pass to ensure that the data overwritten in the final pass is correct.""" fixedpass(fname, "\x00") fixedpass(fname, "\x01") fixedpass(fname, "\x00") fixedpass(fname, "\x01") fixedpass(fname, "\x00") fixedpass(fname, "\x01") verpass = randpass(fname) if verify(verpass,fname) != True: raise ValueError("Expected Values of file were not found when opening!") os.delete(fname)
def C_RCMP_DSX(fname): """Canadian RCMP DSX Method (3 passes + 3 verification passes) The DSX method is a three pass overwriting algorithm. In the first pass, it overwrites all the data sectors in the storage device with zeroes. In the second pass, it overwrites the entire data sectors again with ones. In the third pass, it overwrites all the data sectors in the storage device with pseudo-random numbers. After each pass, the values overwritten in the data sectors are verified to ensure integrity (RCMP, 2003).""" verpass = fixedpass(fname,"\x00") if verify(verpass,fname) != True: raise ValueError("Pass 1/3 - Expected Values of file were not found when opening!") verpass = fixedpass(fname,"\x01") if verify(verpass,fname) != True: raise ValueError("Pass 2/3 - Expected Values of file were not found when opening!") verpass = randpass(fname) if verify(verpass,fname) != True: raise ValueError("Pass 3/3 - Expected Values of file were not found when opening!") os.delete(fname)
def compare_and_output_image(filename, selected_benchmarks, plotLabels = (None, None), plotSpeedup = False): try: import pylab as P except: print "Pylab not found" return 0; import os processor = ResultsProcessor() for selected in selected_benchmarks: processor.add_dataset(selected[0], selected[1]) if plotSpeedup: if processor.plot_speedup(plotLabels[0], plotLabels[1]) != 0: speedup_filename = benchmark_path() + '/' + filename + '.speedup.png'; try: os.delete(speedup_filename) except: pass P.savefig(speedup_filename, format='png') P.close() return speedup_filename if processor.plot_data(plotLabels[0], plotLabels[1]) != 0: image_filename = benchmark_path() + '/' + filename + '.png'; try: os.delete(image_filename) except: pass P.savefig(image_filename, format='png') P.close() return image_filename return None
#!/usr/bin/python3 import os os.rename("test1.txt", "test2.txt") os.mkdir("/tmp/test") os.chdir("/tmp/test") os.getcwd() os.delete("test2.txt") os.rmdir("/tmp/test")
def tearDown(self): """ Clean up after tests """ if 0: os.delete('wavefront_plane*fits') # delete intermediate wavefront files os.delete('test*fits')
fileout.close() # 4. Create a new directory below the current one and place a file in there # Note, pay attention to what file was created and where # Note, some bugs may be os-specific. newfolder = "tempfolder:2" try: os.mkdir(newfolder) #mk means "make" except: print "didn't happen" fileout = open(newfolder + "newfile.txt",'w') print os.getcwd() fileout.write("am I in the right folder?\n") fileout.close() fileout = open(newfolder + os.sep + "newfile.txt",'r') print fileout.read() #5. Now delete the directory and the file # Note, now you have to search for a command... filestodelete = glob.glob(newfolder + os.sep + "*") for i in filestodelete: os.delete(i) os.rmdir(newfolder) #rm means remove