def clean_folders(): """ Delete all generated files. """ delete_folder(GENERATED_PATH) delete_folder(PATH.format("")) print("All clear here")
def __init__(self, inputDataFileName): utl.globalTrace("{0}\t Init Large Scale DataSet\n".format( datetime.datetime.now())) utl.delete_folder(const.KHC_TemporaryDir) self.__cleanWorkingData() self.inputDataFileName = inputDataFileName utl.globalTrace("{0}\t End Init Large Scale DataSet\n".format( datetime.datetime.now()))
def __cleanWorkingDirectory(self): """ aim: clean working directory """ if not const.KHC_KeepWorkingDir: utl.globalTrace("{0}\t Clean working directory: \t {1}\n".format( datetime.datetime.now(), const.KHC_TemporaryDir)) utl.delete_folder(const.KHC_TemporaryDir) utl.globalTrace("{0}\t End Clean working directory\n".format( datetime.datetime.now()))
folder = os.path.dirname(target_file) hash_folder = os.path.dirname(hash_file) os.makedirs(folder, exist_ok=True) os.makedirs(hash_folder, exist_ok=True) try: start_time = time.time() ret = derypt_file(data_file, target_file, get_password()) end_time = time.time() if ret.exit == 0: print("Decrypt %s ok.... %ss" % (data_file, end_time - start_time)) # os.remove(data_file) sha256sum = get_file_sha256sum(target_file) with open(hash_file, "w") as f: f.write(str(sha256sum)) else: failed_file.append(data_file) print("Decrypt %s failed, output is: %s" % (data_file, ret.output)) except Exception as e: print("Decrypt %s failed, exception: %s" % (data_file, e)) failed_file.append(data_file) if failed_file: print("Failed encryt file:") for f in failed_file: print(f) else: delete_folder(data_path)
def build_corr_dict(filePath, freqDict, freqCutoff=0.0007, bufferSize=40000, corrNum=50, outPath=None): """ Builds dict mapping tokens to the ranked list of corrNum tokens with the highest normalized co-occurence in filePath. Args: -filePath: Path to the csv file in which the wikipdia articles are stored -freqDict: Dictionary of freq tuples for observed tokens -freqCutoff: Upper frequency that a token can have and still be analyzed. -bufferSize: Number of texts to analyze in RAM at one time. At bufferSize, the current tokenDict is saved under TEMP_FOLDER_PATH and deleted from RAM. -corrNum: Max number of tokens to include in the ranked corrList of each token. -outPath: Path to which to save the final corrDict. All temporary files created during run will be deleted. Returns: Dictionary mapping each qualifying token to a scored and ranked list of corrNum relatedTokens with scores as floats in range (0, 1]. """ TEMP_FOLDER_PATH = 'corrDictTablets' def corrable(token, freqTuple): """ Helper determines if token corr should be taken """ return False if (freqTuple[0] > freqCutoff) or ( token.isdigit()) else True # dict mapping tokens with frequency below freqCutoff to empty counters emptyTokenDict = { token: Counter() for token, freqTuple in freqDict.items() if corrable(token, freqTuple) } print(f'{len(emptyTokenDict)} valid tokens found.') # build knowledgeProcessor from just tokens to recieve corr scores knowledgeProcessor = build_knowledgeProcessor(emptyTokenDict.keys()) def norm_pageTokens(pageTokens, numWords): """ Helper normalizes pageToken Counter() by dividing by token frequency and cuts those that are below freqCutoff """ return { token: ((rawCount / numWords) / freqDict[token][0]) for token, rawCount in pageTokens.items() } # create temp folder for to hold tablets of tokenDict safe_make_folder(TEMP_FOLDER_PATH) # iterate over each article in filePath curTokenDict = {} with open(filePath, 'r') as wikiFile: for i, page in enumerate(tqdm(wikiFile)): # build counter of token nums on page and norm counts by frequency pageTokens = Counter(knowledgeProcessor.extract_keywords(page)) numWords = len(page.split()) normedTokens = norm_pageTokens(pageTokens, numWords) # update the related tokens of each token on the page with others for token in normedTokens.keys(): curTokenCounter = normedTokens.copy() curTokenVal = curTokenCounter.pop(token) curTokenCounter = Counter({ otherToken: round(otherVal) for otherToken, otherVal in curTokenCounter.items() }) if token in curTokenDict: curTokenDict[token].update(curTokenCounter) else: curTokenDict.update({token: curTokenCounter}) # save to temp folder if i is at buffer size if (i % bufferSize == 0) and (i > 0): # clean empty tokens from curTokenDict cleanTokenDict = { token: counts for token, counts in curTokenDict.items() if counts.values() != [] } # save cleaned token dict in temp folder and delete from RAM save(cleanTokenDict, f'{TEMP_FOLDER_PATH}/tokenDict{i}.sav') del cleanTokenDict # reinitialize curTokenDict curTokenDict = {} # delete some big objects we won't need to conserve RAM del knowledgeProcessor del freqDict # use last, unsaved curTokenDict as accumulator to fold saved tokenDicts print('Folding tokenDict') for file in tqdm(os.listdir(TEMP_FOLDER_PATH)): try: loadedTokenDict = load(f'{TEMP_FOLDER_PATH}/{file}') for token, tokenCounter in loadedTokenDict.items(): if token in curTokenDict: curTokenDict[token].update(tokenCounter) else: curTokenDict.update({token: tokenCounter}) del loadedTokenDict except Exception as e: print(f'ERROR: {e}') def score_to_fraction(tokenTuple, scoreSum): """ Helper converts tokenTuples with rawScores to tokenTuples with scores as fractions of topTokens """ return (round((tokenTuple[0] / scoreSum), ndigits=3), tokenTuple[1]) # build corrDict of top corrNum tokens for each token in tokenDict print('Building topTokens') corrDict = {} for token, counter in tqdm(curTokenDict.items()): corrList = [(score, otherToken) for otherToken, score in counter.items()] if corrList != []: corrList.sort(reverse=True) topTokens = corrList[:corrNum] # take sum of scores across top 50 tokens to normalize top scores scoreSum = sum([tokenTuple[0] for tokenTuple in corrList[:50]]) fraction_lambda = lambda tokenTuple: score_to_fraction( tokenTuple, scoreSum) topTokens = list(map(fraction_lambda, topTokens)) corrDict.update({token: topTokens}) # delete the temporary folder and emptyTokenDict delete_folder(TEMP_FOLDER_PATH) del emptyTokenDict # save corrDict if prompted if outPath: save(corrDict, outPath) return corrDict
def create_bkp_files(self, databases, config): msg = "Pulling databases" self.pk_log_row = self.db.insert( self.config['db_name_log_record'], { 'backup_id': self.pk_row, 'log': msg, 'status': 1, 'log_datetime': 'now()' } ) bkp_context_success = [] bkp_context_error = [] db_to_pass = '******'.join(databases).replace(' ', '').replace('\n', '') query = u"UPDATE {0} SET databases_to_pass='******' WHERE id={2}".format( self.config['db_name_record'], db_to_pass, self.pk_row ) self.db.query(query) for database in databases: db_name = clear_name(database) if db_name is not None and db_name not in config['DB_IGNORED']: self.create_folder( config['local_destiny_folder']) file_name = \ db_name + "_bkp_" + time.strftime('%d_%m_%Y') + '.sql' path = os.path.join(self.bkp_folder_path, file_name) bkp = subprocess.call( self.commands['bkp_error'].format( config['pg_user'], db_name, path ), shell=True ) if bkp != 0: bkp = subprocess.call( self.commands['bkp'].format( config['host_machine'], config['port'], config['pg_user'], path, db_name ), shell=True ) if bkp != 0: bkp_context_error.append(db_name) else: bkp_context_success.append(db_name) else: bkp_context_success.append(db_name) try: zip_folder(self.bkp_folder_path) delete_folder(self.bkp_folder_path) except Exception as err: self.treat_exception(err) self.zip_folder_path = self.bkp_folder_path + '.zip' msg = "Databases backup: {0}".format(','.join(bkp_context_success)) query = u"UPDATE {0} SET databases_passed='{1}' WHERE id={2}".format( self.config['db_name_record'], ','.join(bkp_context_success), self.pk_row ) self.db.query(query) self.steps_done.append(True) self.db.update( self.config['db_name_log_record'], { 'id': self.pk_log_row, 'status': 2, 'log': msg } ) self.db.update( self.config['db_name_record'], { 'id': self.pk_row, 'status': 2, 'percents_completed': self.count_percentage(), 'finish_backup_datetime': 'NULL' } ) self.email_context_success = self.email_context_success \ + "- {0}\n".format(msg) if bkp_context_error != []: msg = "No databases backup: {0}".format(','.join(bkp_context_error)) self.db.update( self.config['db_name_log_record'], { 'id': self.pk_log_row, 'status': 3, 'log': msg } ) self.email_context_error = "- {0}\n".format( msg)
def main(): parser = argparse.ArgumentParser(description='testing for recurrent neural networks') parser.add_argument('--model', dest='modelName', choices=['mnist', 'fashion_mnist', 'sentiment', 'ucf101'], default='fashion_mnist') parser.add_argument('--TestCaseNum', dest='TestCaseNum', default='10000') parser.add_argument('--Mutation', dest='Mutation', choices=['random', 'genetic'], default='random') parser.add_argument('--CoverageStop', dest='CoverageStop', default='0.9') parser.add_argument('--threshold_SC', dest='threshold_SC', default='0.6') parser.add_argument('--threshold_BC', dest='threshold_BC', default='0.7') parser.add_argument('--symbols_TC', dest='symbols_TC', default='3') parser.add_argument('--seq', dest='seq', default='[400,499]') parser.add_argument('--mode', dest='mode', choices=['train', 'test'], default='test') parser.add_argument('--outputs', '--outdir', '-o', dest='outdir', default='testRNN_output/', help='') parser.add_argument('--dataset', help='Test dataset file (in numpy persistent data format---for UCF101 only)', metavar = 'NP(Z)') args=parser.parse_args() # seq: # mnist [4,24] # sentiment [400,499] # lipo [60,79] # ucf101 [0,10] modelName = args.modelName mode = args.mode outdir = args.outdir dataset = args.dataset threshold_SC = args.threshold_SC threshold_BC = args.threshold_BC symbols_TC = args.symbols_TC seq = re.findall(r"\d+\.?\d*", args.seq) Mutation = args.Mutation CoverageStop = args.CoverageStop TestCaseNum = args.TestCaseNum if dataset is not None and \ (not os.path.exists (dataset) or not os.access (dataset, os.R_OK)): sys.exit (f'Unreadable dataset file `{dataset}\'') r = None if mode != 'train': # record time r = record (os.path.join (outdir, "record.txt"), time.time()) # reset output folder: delete_folder (r.subdir ('adv_output')) if modelName == 'sentiment': if mode == 'train': sentimentTrainModel() else: sentimentGenerateTestSuite(r,threshold_SC,threshold_BC,symbols_TC,seq,TestCaseNum, Mutation, CoverageStop) elif modelName == 'mnist' or 'fashion_mnist': if mode == 'train': mnist_lstm_train(modelName) elif mode == 'backdoor': mnist_lstm_backdoor_test(r,threshold_SC,threshold_BC,symbols_TC,seq,TestCaseNum, Mutation, modelName) else: mnist_lstm_adv_test(r, threshold_SC, threshold_BC, symbols_TC, seq, TestCaseNum, Mutation, modelName) elif modelName == 'ucf101': if mode == 'train': vgg16_lstm_train() else: vgg16_lstm_test(r, dataset, threshold_SC, threshold_BC, symbols_TC, seq, TestCaseNum, Mutation, CoverageStop) else: print("Please specify a model from {sentiment, mnist, ucf101}") if r is not None: r.close()
import os from utils import delete_folder targets = ["taichung_taiwan", "taoyuan_taiwan", "kaohsiung_taiwan"] for target_app_id in targets: delete_folder('working/%s' % target_app_id) os.system('python3 make_app.py %s' % target_app_id)
def main(): parser = argparse.ArgumentParser(description='testing for recurrent neural networks') parser.add_argument('--model', dest='modelName', default='lipo', help='') parser.add_argument('--TestCaseNum', dest='TestCaseNum', default='2000', help='') parser.add_argument('--TargMetri', dest='TargMetri', default='None', help='') parser.add_argument('--CoverageStop', dest='CoverageStop', default='0.9', help='') parser.add_argument('--threshold_CC', dest='threshold_CC', default='6', help='') parser.add_argument('--threshold_GC', dest='threshold_GC', default='0.78', help='') parser.add_argument('--symbols_SQ', dest='symbols_SQ', default='2', help='') parser.add_argument('--seq', dest='seq', default='[70,74]', help='') parser.add_argument('--mode', dest='mode', default='test', help='') parser.add_argument('--minimalTest', dest='minimalTest', default='0', help='') parser.add_argument('--output', dest='filename', default='./log_folder/record.txt', help='') args=parser.parse_args() modelName = args.modelName mode = args.mode filename = args.filename threshold_CC = args.threshold_CC threshold_MC = args.threshold_GC symbols_SQ = args.symbols_SQ seq = args.seq seq = re.findall(r"\d+\.?\d*", seq) TargMetri = args.TargMetri CoverageStop = args.CoverageStop TestCaseNum = args.TestCaseNum minimalTest = args.minimalTest # reset output folder if minimalTest == '0' : delete_folder("minimal_nc") delete_folder("minimal_cc") delete_folder("minimal_mc") delete_folder("minimal_sqp") delete_folder("minimal_sqn") mkdir("adv_output") mkdir("output") else: delete_folder("adv_output") delete_folder("output") mkdir("minimal_nc") mkdir("minimal_cc") mkdir("minimal_mc") mkdir("minimal_sqp") mkdir("minimal_sqn") # record time r = record(filename,time.time()) if modelName == 'sentiment': if mode == 'train': sentimentTrainModel() else: sentimentGenerateTestSuite(r,threshold_CC,threshold_MC,symbols_SQ,seq,TestCaseNum,minimalTest,TargMetri,CoverageStop) elif modelName == 'mnist': if mode == 'train': mnist_lstm_train() else: mnist_lstm_test(r,threshold_CC,threshold_MC,symbols_SQ,seq,TestCaseNum,minimalTest,TargMetri,CoverageStop) elif modelName == 'lipo': if mode == 'train': lipo_lstm_train() else: lipo_lstm_test(r,threshold_CC,threshold_MC,symbols_SQ,seq,TestCaseNum,minimalTest,TargMetri,CoverageStop) elif modelName == 'ts': if mode == 'train': ts_lstm_train() else: ts_lstm_test(r,threshold_CC,threshold_MC,symbols_SQ,seq,TestCaseNum,minimalTest,TargMetri,CoverageStop) else: print("Please specify a model from {sentiment, mnist, lipo, ts}") r.close()
def delete_cache(self): utils.delete_folder(utils.user_file_cache)
def create_bkp_files(self, databases, config): msg = "Pulling databases" self.pk_log_row = self.db.insert( self.config['db_name_log_record'], { 'backup_id': self.pk_row, 'log': msg, 'status': 1, 'log_datetime': 'now()' }) bkp_context_success = [] bkp_context_error = [] db_to_pass = '******'.join(databases).replace(' ', '').replace('\n', '') query = u"UPDATE {0} SET databases_to_pass='******' WHERE id={2}".format( self.config['db_name_record'], db_to_pass, self.pk_row) self.db.query(query) for database in databases: db_name = clear_name(database) if db_name is not None and db_name not in config['DB_IGNORED']: self.create_folder(config['local_destiny_folder']) file_name = \ db_name + "_bkp_" + time.strftime('%d_%m_%Y') + '.sql' path = os.path.join(self.bkp_folder_path, file_name) bkp = subprocess.call(self.commands['bkp_error'].format( config['pg_user'], db_name, path), shell=True) if bkp != 0: bkp = subprocess.call(self.commands['bkp'].format( config['host_machine'], config['port'], config['pg_user'], path, db_name), shell=True) if bkp != 0: bkp_context_error.append(db_name) else: bkp_context_success.append(db_name) else: bkp_context_success.append(db_name) try: zip_folder(self.bkp_folder_path) delete_folder(self.bkp_folder_path) except Exception as err: self.treat_exception(err) self.zip_folder_path = self.bkp_folder_path + '.zip' msg = "Databases backup: {0}".format(','.join(bkp_context_success)) query = u"UPDATE {0} SET databases_passed='{1}' WHERE id={2}".format( self.config['db_name_record'], ','.join(bkp_context_success), self.pk_row) self.db.query(query) self.steps_done.append(True) self.db.update(self.config['db_name_log_record'], { 'id': self.pk_log_row, 'status': 2, 'log': msg }) self.db.update( self.config['db_name_record'], { 'id': self.pk_row, 'status': 2, 'percents_completed': self.count_percentage(), 'finish_backup_datetime': 'NULL' }) self.email_context_success = self.email_context_success \ + "- {0}\n".format(msg) if bkp_context_error != []: msg = "No databases backup: {0}".format( ','.join(bkp_context_error)) self.db.update(self.config['db_name_log_record'], { 'id': self.pk_log_row, 'status': 3, 'log': msg }) self.email_context_error = "- {0}\n".format(msg)
def delete_folder(self,item): utils.delete_folder('%s/%s/' % (dir_name, item.folder)) wx.CallAfter(self.when_deleted, item)
def update_item(self, item,dispose_dia = True): post_ga_event('update_item','artifact_%s' % item.name) try: if item.create_delta: for deltaitem in item.deltas: if not utils.check_if_already_updated_with_delta(dir_name, item.name, deltaitem.version): log_dia_info('Updating file %s' % deltaitem.file) retries = 0 nof_retries = 3 while retries < nof_retries: utils.get_file(utils.resolve_delta_dir(item),utils.get_storage_location(deltaitem.file),deltaitem.file,item.name) dia_tick() if was_dia_cancelled(): log_dia_info('Cancelling...') break if item.dynamic_import: utils.delete_folder(utils.user_file_cache_dyn) utils.create_dir(utils.user_file_cache_dyn) try: if was_dia_cancelled(): log_dia_info('Cancelling...') break utils.extract_file(utils.get_storage_location(deltaitem.file), utils.user_file_cache_dyn,item,True) if was_dia_cancelled(): log_dia_info('Cancelling...') break dynamic_import.move_in_place(utils.user_file_cache + 'dyn/%s/' % item.folder, '%s/%s/' % (dir_name, item.folder)) if was_dia_cancelled(): log_dia_info('Cancelling...') update_tree_view(self) break utils.update_db(dir_name, item.name, deltaitem.version) utils.delete_folder(utils.user_file_cache + 'dyn/%s/' % item.folder) item.needs_update = False update_tree_view(self) dia_tick() break except FtpOverloadedException: post_ga_event('update_item_ftp_overload','artifact_%s' % deltaitem.file) raise except InvalidZipFileException as e: post_ga_event('update_item_invalid_zip','artifact_%s' % deltaitem.file) utils.delete_file(utils.get_storage_location(deltaitem.file)) if retries == nof_retries-1: raise else: log_info('Unpacking %s into %s' % (item.name, dir_name)) try: if was_dia_cancelled(): log_dia_info('Cancelling...') break utils.extract_file(utils.get_storage_location(deltaitem.file), dir_name,item,False) if was_dia_cancelled(): log_dia_info('Cancelling...') update_tree_view(self) break utils.update_db(dir_name, item.name, deltaitem.version) target_folder = dir_name + '/' + item.folder log_dia_info('Updated %s with deltafile %s at location %s' % (item.name,deltaitem.file,target_folder)) item.needs_update = False update_tree_view(self) dia_tick() if utils.get_boolean_user_setting(delete_files_after_install): utils.delete_file(utils.get_storage_location(deltaitem.file)) break except FtpOverloadedException: post_ga_event('update_item_ftp_overload','artifact_%s' % deltaitem.file) raise except InvalidZipFileException: post_ga_event('update_item_invalid_zip','artifact_%s' % deltaitem.file) log_dia_info('Invalid deltazifile, delete and retry') utils.delete_file(utils.get_storage_location(deltaitem.file)) if retries == nof_retries-1: raise retries += 1 if dispose_dia: wx.CallAfter(dispose_dialog) except FtpOverloadedException: if dispose_dia: wx.CallAfter(dispose_dialog_fail,'Too many users right now, please try again later') except InvalidZipFileException as e: if dispose_dia: wx.CallAfter(dispose_dialog_fail,e.message) except: if dispose_dia: wx.CallAfter(dispose_dialog_fail,'Unknown error %s:' % sys.exc_info()[0]) finally: update_tree_view(self)
def install_item(self, current_item, dispose_dia = True): post_ga_event('install_item','artifact_%s' % current_item.name) folder = '%s/%s/' % (dir_name, current_item.folder) if not utils.check_if_already_updated_with_delta(dir_name, current_item.name, current_item.version) or not os.path.exists(folder): try: log_dia_info('Getting full entry %s' % current_item.name) retries = 0 nof_retries = 3 while retries < nof_retries: try: utils.get_file(current_item.basedir,utils.get_storage_location(current_item.file),current_item.file,current_item.name) if os.path.exists(folder): log_dia_info('Deleting current folder %s (this may take a while, please be patient)' % folder) utils.delete_folder('%s/%s/' % (dir_name, current_item.folder)) if was_dia_cancelled(): log_dia_info('Downloaded for %s was cancelled' % current_item.name) if dispose_dia: wx.CallAfter(dispose_dialog) return log_dia_info('Downloaded %s' % current_item.name) log_dia_info('Extracting files %s' %dir_name) dia_tick() utils.extract_file(utils.get_storage_location(current_item.file), dir_name,current_item,True) dia_tick() break; except InvalidZipFileException: post_ga_event('install_item_invalid_zip','artifact_%s' % current_item.name) log_info('Invalid zifile, delete and retry') utils.delete_file(utils.get_storage_location(current_item.file)) if retries == nof_retries-1: raise retries+=1 if utils.get_boolean_user_setting(delete_files_after_install): utils.delete_file(utils.get_storage_location(current_item.file)) if was_dia_cancelled(): if dispose_dia: wx.CallAfter(dispose_dialog) return log_dia_info('Update db') utils.update_db(dir_name, current_item.name, current_item.version) current_item.not_installed = False log_dia_info('Done extracting full entry %s at location %s' % (current_item.name, dir_name)) log_dia_info('Install done') if dispose_dia: wx.CallAfter(dispose_dialog) except InvalidZipFileException as e: utils.delete_folder('%s/%s/' % (dir_name, current_item.folder)) log_dia_info('Install failed du to error during fetch or unzip') if dispose_dia: wx.CallAfter(dispose_dialog_fail,e.message) except FtpOverloadedException as e: log_dia_info('Too many users, please try agin in a while') if dispose_dia: wx.CallAfter(dispose_dialog_fail,'Too many users, please try later') except: utils.delete_folder('%s/%s/' % (dir_name, current_item.folder)) log_dia_info('Install failed due to unknown error') if dispose_dia: wx.CallAfter(dispose_dialog_fail,'Unknown error %s:' % sys.exc_info()[0]) finally: update_tree_view(self) else: if dispose_dia: wx.CallAfter(dispose_dialog)
old_sha256sum = f.read() if str(old_sha256sum) != str(sha256sum): start_time = time.time() ret = enrypt_file(data_file, target_file, get_password()) end_time = time.time() if ret.exit == 0: print("Encrypt %s ok.... %ss" % (data_file, end_time - start_time)) os.remove(data_file) else: failed_file.append(data_file) print("Encrypt %s failed, output is: %s" % ret.output) else: print("File %s not changed, skip..." % data_file) os.remove(data_file) except Exception as e: print("Encrypt %s failed, exception: %s" % (data_file, e)) failed_file.append(data_file) if failed_file: print("Failed encryt file:") for f in failed_file: print(f) else: delete_folder(local_path) delete_folder(hash_sum_path, delete_file=True) """ 可以指定文件加解密 """
elif ".c" in code2_file: utils.delete_file(code2_fio + ".c") if clean_binary: if ".cpp" in code2_file or ".c" in code2_file: utils.delete_file(code2_bin) utils.delete_file(tc2_out) utils.delete_file(ttc_out) utils.delete_file(tresult) if clean_compare: if grp_type == 2: print(created_folders) utils.delete_folder(code2_out) utils.delete_folder(result) elif grp_type == 1: fend = (str(i).zfill(idx_len) if idx_out else "") + ".txt" c2sidx = code2_out.rfind('/') + 1 ressidx = code2_out.rfind('/') + 1 c2fname = code2_out[c2sidx:] resfname = result[ressidx:] for fld in created_folders: utils.delete_file(interm + fld + "/" + c2fname + fend) utils.delete_file(interm + fld + "/" + resfname + fend) if gen_zip: utils.delete_file(interm + interm[:-1] + ".zip") utils.make_zip(interm[:-1])