def _write_pickle(self, filename_stem, results, also_save_txt=True): self._progress("Pickling results dictionary.") file = open("%s.pkl" % filename_stem, 'wb') cPickle.dump(results, file, cPickle.HIGHEST_PROTOCOL) file.close() if also_save_txt: util.write_file("%s_dict.txt" % filename_stem, str(results))
def clean_census(): in_filename = "../data/census.csv" out_filename = "../data/census_clean.csv" headers, data = read_file(in_filename) data = data[data[:,1] != 'Puerto Rico'] data[:,[1, 2]] = data[:,[2, 1]] write_file(out_filename, headers, data)
def set_new_file_location(self): ans = ask_user( self, "This will let 'B Z M A N' set a new path to your exisiting company database. " + "This means you will be able to open files that you have moved to a new location after saving them.\n\n" + "If you want to continue, click 'Ok'. If you want to exit, click 'Cancel'." ) if ans == QMessageBox.Ok: filename = QFileDialog.getOpenFileName( self, "Open", self.BZMAN_settings['path'], filter="Database Files (*.json)")[0] if filename: new_path, new_filename = os.path.split(filename) new_company_name = ",".join( os.path.split( os.path.splitext(filename)[0])[1].split('_')[:-2]) self.BZMAN_settings['path'] = new_path self.BZMAN_settings['database_name'] = new_filename self.BZMAN_settings['company'] = new_company_name write_file(self.BZMAN_settings, self.ctx.get_settings_file) inform_user( self, "New path to the company database has been set. You can now open the database using 'Open'" ) else: inform_user(self, "Select a valid database file")
def translate(ast, sig, child, device, outfile, translate_only, v): """ Translate the AST to target system. """ vmsg(v, 'Translating AST...') buf = io.StringIO() ext = None # Create a tranlator AST walker if device.system == SYSTEM_TYPE_XS1: walker = TranslateXS1(sig, child, buf) elif device.system == SYSTEM_TYPE_MPI: walker = TranslateMPI(sig, child, buf) walker.walk_program(ast) if translate_only: outfile = (outfile if outfile!=defs.DEFAULT_OUT_FILE else outfile+'.'+device.source_file_ext()) util.write_file(outfile, buf.getvalue()) vmsg(v, 'Produced file: '+outfile) raise SystemExit() return buf
def smrf(afa_file, pdb_file, options, pmrf_path): edge_file, mrf_file = options.edge_file, options.mrf_file pmrf_exec = '%s/pmrf' % pmrf_path if not os.path.exists(pmrf_exec): print 'Cannot find the PMRF executable in the directory %s.' % ( pmrf_path) sys.exit(1) ## Determine graph structure edge_list = build_edge(afa_file, pdb_file) write_file('\n'.join(['%s\t%s' % (i, j) for i, j in edge_list]), edge_file) message('MRF edge is determined.') ## Build MRF model cmd = '%s build %s --edge %s -o %s' % (pmrf_exec, afa_file, edge_file, mrf_file) subprocess.check_call(cmd.split()) message('MRF model is parameterized.') ## Estimate positional coevolution cmd = '%s stat %s --mode pos' % (pmrf_exec, mrf_file) fp = sys.stdout if options.score_file1 == "stdout" else open( options.score_file1, 'w') p = subprocess.Popen(shlex.split(cmd), stdout=fp) p.wait() message('Positional coevolution scores are estimated.') ## Estimate pairwise coevolution if options.score_file2: cmd = '%s stat %s --mode pair' % (pmrf_exec, mrf_file) p = subprocess.Popen(shlex.split(cmd), stdout=open(options.score_file2, 'w')) p.wait() message('Pairwise coevolution scores are estimated.')
def save_user_topics(path): output = [] for u in db.get_user_list(): person_topics = similarity.get_user_topics(u) output.append(person_topics) print u util.write_file(output, path)
def _write_attributes(self, key, attrdict): for attr, new_value in sorted(attrdict.items()): attrfile = self._attr_file(key, attr) is_new_attribute = not os.path.exists(attrfile) util.write_file(attrfile, new_value) if is_new_attribute: self._hg(["add", attrfile])
def create_profile(auth_info,**kwargs): if auth_info['code'] == 1: return json.dumps(auth_info) username = auth_info['username'] try: data = request.get_json()['params'] para = eval(str(data["partion"])) name = data['profile'] filename = str(name) util.copy_file(filename) util.write_file(filename,para) util.replace_url(filename,str(data['url'])) ret = profile_create(app.config['cobbler_url'],app.config['cobbler_user'],app.config['cobbler_password'],filename,str(data['distro']),'/var/lib/cobbler/kickstarts/%s'%filename) print "xiaoluoge" print ret if str(ret['result']) == "True": data = {"distro":str(data['distro']),"os":filename,"ks":'/var/lib/cobbler/kickstarts/%s'%filename} app.config['cursor'].execute_insert_sql('profile', data) util.write_log('api').info(username, "create cobbler profile %s success" %filename) else: util.write_log('api').info(username, "create cobbler profile %s faile" % data['ip']) return json.dumps({'code':0,'result':'create %s success' % filename}) except: util.write_log('api').error('create cobbler error:%s' % traceback.format_exc()) return json.dumps({'code':1,'errmsg': 'create cobbler failed'})
def update_database(self): self._collect_widget_data() data_pkl = self._replace_data() if data_pkl is not None: write_file(data_pkl, self.database_filename) self.statusBar.showMessage("Edited & Saved!", 2000) self.reload()
def save_user_tags(path, top_tag=50): output = {} for u in db.get_user_list(): person_tags = tags.generate_tag(u).user_tags(50) output[u] = person_tags print u util.write_file(output, path)
def test(): action = util.parse_action(sys.argv[1]) if not action == ALLOW: quit(1) util.install_trap() f = SyscallFilter(TRAP) # NOTE: additional syscalls required for python f.add_rule(ALLOW, "stat") f.add_rule(ALLOW, "fstat") f.add_rule(ALLOW, "open") f.add_rule(ALLOW, "openat") f.add_rule(ALLOW, "mmap") f.add_rule(ALLOW, "munmap") f.add_rule(ALLOW, "read") f.add_rule(ALLOW, "write") f.add_rule(ALLOW, "close") f.add_rule(ALLOW, "rt_sigaction") f.add_rule(ALLOW, "rt_sigreturn") f.add_rule(ALLOW, "sigreturn") f.add_rule(ALLOW, "sigaltstack") f.add_rule(ALLOW, "brk") f.add_rule(ALLOW, "exit_group") f.load() try: util.write_file("/dev/null") except OSError as ex: quit(ex.errno) quit(160)
def execute(logdir, outputdir): dags = parse_logs(logdir) for ahash in dags: i = 0 for appTime, app in dags[ahash]: summ = 0 stages_data = "" for idx, stage in enumerate(app): sum_s = {} data = "" for task_id in stage: if not sum_s.has_key(stage[task_id]["method"]): sum_s[stage[task_id]["method"]] = 0 sum_s[stage[task_id]["method"]] += 1 duration = int( sub_str_datetimes(stage[task_id]["end"], stage[task_id]["start"])) data += "%d\n" % duration outputFilename = outputdir + "/%d-%d.txt" % (i, idx) write_file(outputFilename, data) stages_data += '{"name": "S%d", "methods": "%s", "path": "%s"}\n' % ( idx, json.dumps(sum_s), outputFilename) write_file(outputdir + "/%d-stages.txt" % i, stages_data) i += 1
def new_file(self, called_from_tutorial=False): self.BZMAN_settings = read_file(self.ctx.get_settings_file) if self.BZMAN_settings["path"] == "": inform_user( self, "Welcom to BZMAN! \n\n" + "Please select a folder to save your company database.") self.folder_path = QFileDialog.getExistingDirectory( self, 'Select a folder to save your database') if self.folder_path: self.BZMAN_settings["path"] = self.folder_path self._new_file_logic() else: inform_user(self, "No folder location selected.") else: ans = ask_user( self, "Company database already exists.\n\n" + "Do you still want to create a new database?\n\n") if ans == QMessageBox.Ok: self.BZMAN_settings["path"] = "" write_file(self.BZMAN_settings, self.ctx.get_settings_file) inform_user( self, "Your old database will now need to be manually opened using 'Open'\n\n" + "You may now create a new database") self.new_file()
def test(): action = util.parse_action(sys.argv[1]) if not action == ALLOW: quit(1) util.install_trap() f = SyscallFilter(TRAP) # NOTE: additional syscalls required for python f.add_rule(ALLOW, "stat") f.add_rule(ALLOW, "fstat") f.add_rule(ALLOW, "open") f.add_rule(ALLOW, "openat") f.add_rule(ALLOW, "mmap") f.add_rule(ALLOW, "munmap") f.add_rule(ALLOW, "read") f.add_rule(ALLOW, "write") f.add_rule(ALLOW, "close") f.add_rule(ALLOW, "rt_sigaction") f.add_rule(ALLOW, "rt_sigreturn") f.add_rule(ALLOW, "exit_group") f.load() try: util.write_file("/dev/null") except OSError as ex: quit(ex.errno) quit(160)
def save_user_tags(path,top_tag=50): output = {} for u in db.get_user_list(): person_tags = tags.generate_tag(u).user_tags(50) output[u] = person_tags print u util.write_file(output,path)
def download(course, item): """ Download announcement JSON. :param course: A Course object. :param item: { "close_time": 2147483647, "user_id": 1069689, "open_time": 1411654451, "title": "Coursera", "deleted": 0, "email_announcements": "email_sent", "section_id": "14", "order": "6", "item_type": "announcement", "__type": "announcement", "published": 1, "item_id": "39", "message": "Hello, everyone.", "uid": "announcement39", "id": 39, "icon": "" } :return: None. """ path = '{}/announcement/{}.json' path = path.format(course.get_folder(), item['item_id']) util.make_folder(path, True) util.write_json(path, item) content = util.read_file(path) content = util.remove_coursera_bad_formats(content) util.write_file(path, content)
def generate_test_source_files(spec_directory, test_helper_filenames, spec_json, target): test_expansion_schema = spec_json['test_expansion_schema'] specification = spec_json['specification'] spec_json_js_template = util.get_template('spec_json.js.template') util.write_file( os.path.join(spec_directory, "generic", "spec_json.js"), spec_json_js_template % {'spec_json': json.dumps(spec_json)}) util.write_file( os.path.join(spec_directory, "generic", "debug-output.spec.src.json"), json.dumps(spec_json, indent=2, separators=(',', ': '))) # Choose a debug/release template depending on the target. html_template = "test.%s.html.template" % target artifact_order = test_expansion_schema.keys() + ['name'] artifact_order.remove('expansion') # Create list of excluded tests. exclusion_dict = {} for excluded_pattern in spec_json['excluded_tests']: excluded_expansion = \ expand_pattern(excluded_pattern, test_expansion_schema) for excluded_selection in permute_expansion(excluded_expansion, artifact_order): excluded_selection['delivery_key'] = spec_json['delivery_key'] exclusion_dict[dump_test_parameters(excluded_selection)] = True for spec in specification: # Used to make entries with expansion="override" override preceding # entries with the same |selection_path|. output_dict = {} for expansion_pattern in spec['test_expansion']: expansion = expand_pattern(expansion_pattern, test_expansion_schema) for selection in permute_expansion(expansion, artifact_order): selection['delivery_key'] = spec_json['delivery_key'] selection_path = spec_json['selection_pattern'] % selection if selection_path in output_dict: if expansion_pattern['expansion'] != 'override': print( "Error: %s's expansion is default but overrides %s" % (selection['name'], output_dict[selection_path]['name'])) sys.exit(1) output_dict[selection_path] = copy.deepcopy(selection) for selection_path in output_dict: selection = output_dict[selection_path] if dump_test_parameters(selection) in exclusion_dict: print('Excluding selection:', selection_path) continue try: generate_selection(spec_directory, test_helper_filenames, spec_json, selection, spec, html_template) except util.ShouldSkip: continue
def do_emf_carving(volume, carveokdir, carvenokdir): deletedFiles, filekeys = carveEMFVolumeJournal(volume) print "Journal carving done, trying to extract deleted files" n = 0 for name, vv in deletedFiles: for filekey in filekeys.get(vv.data.fileID, []): ff = EMFFile(volume, vv.data.dataFork, vv.data.fileID, filekey, deleted=True) data = ff.readAllBuffer() if isDecryptedCorrectly(data): write_file( carveokdir + "%s_%s" % (filekey.encode("hex")[:8], name.replace("/", "_")), data) n += 1 else: write_file( carvenokdir + "%s_%s" % (filekey.encode("hex")[:8], name.replace("/", "_")), data) if not filekeys.has_key(vv.data.fileID): print "Missing file key for", name else: del filekeys[vv.data.fileID] print "Done, extracted %d files" % n if False: fks = set(reduce(lambda x, y: x + y, filekeys.values())) print "%d file keys left, try carving empty space (slow) ? CTRL-C to exit" % len( fks) raw_input() carveEMFemptySpace(volume, fks)
def save_user_topics(path): output = [] for u in db.get_user_list(): person_topics = similarity.get_user_topics(u) output.append(person_topics) print u util.write_file(output,path)
def do_emf_carving(volume, carveokdir, carvenokdir): deletedFiles, filekeys = carveEMFVolumeJournal(volume) print "Journal carving done, trying to extract deleted files" n = 0 for name, vv in deletedFiles: for filekey in filekeys.get(vv.data.fileID, []): ff = EMFFile(volume,vv.data.dataFork, vv.data.fileID, filekey, deleted=True) data = ff.readAllBuffer() if isDecryptedCorrectly(data): write_file(carveokdir + "%s_%s" % (filekey.encode("hex")[:8],name.replace("/","_")),data) n += 1 else: write_file(carvenokdir + "%s_%s" % (filekey.encode("hex")[:8],name.replace("/","_")),data) if not filekeys.has_key(vv.data.fileID): print "Missing file key for", name else: del filekeys[vv.data.fileID] print "Done, extracted %d files" % n if False: fks = set(reduce(lambda x,y: x+y, filekeys.values())) print "%d file keys left, try carving empty space (slow) ? CTRL-C to exit" % len(fks) raw_input() carveEMFemptySpace(volume, fks)
def bf_system(): client = RamdiskToolClient() di = client.getDeviceInfos() devicedir = di["udid"] if os.getcwd().find(devicedir) == -1: try: os.mkdir(devicedir) except: pass os.chdir(devicedir) key835 = di.get("key835").decode("hex") systembag = client.getSystemKeyBag() kbkeys = systembag["KeyBagKeys"].data kb = Keybag.createWithDataSignBlob(kbkeys, key835) keybags = di.setdefault("keybags", {}) kbuuid = kb.uuid.encode("hex") print "Keybag UUID :", kbuuid if True and keybags.has_key(kbuuid) and keybags[kbuuid].has_key("passcodeKey"): print "We've already seen this keybag" passcodeKey = keybags[kbuuid].get("passcodeKey").decode("hex") print kb.unlockWithPasscodeKey(passcodeKey) kb.printClassKeys() else: keybags[kbuuid] = {"KeyBagKeys": systembag["KeyBagKeys"]} di["KeyBagKeys"] = systembag["KeyBagKeys"] di.save() print "Enter passcode or leave blank for bruteforce:" z = raw_input() res = client.getPasscodeKey(systembag["KeyBagKeys"].data, z) if kb.unlockWithPasscodeKey(res.get("passcodeKey").decode("hex")): print "Passcode \"%s\" OK" % z di.update(res) keybags[kbuuid].update(res) di.save() keychain_blob = client.downloadFile("/mnt2/Keychains/keychain-2.db") write_file("keychain-2.db", keychain_blob) print "Downloaded keychain database, use keychain_tool.py to decrypt secrets" return if z != "": print "Wrong passcode, trying to bruteforce !" if checkPasscodeComplexity(client) == 0: print "Trying all 4-digits passcodes..." bf = client.bruteforceKeyBag(systembag["KeyBagKeys"].data) if bf: di.update(bf) keybags[kbuuid].update(bf) print bf print kb.unlockWithPasscodeKey(bf.get("passcodeKey").decode("hex")) kb.printClassKeys() di["classKeys"] = kb.getClearClassKeysDict() di.save() else: print "Complex passcode used !" return #keychain_blob = client.downloadFile("/private/var/Keychains/keychain-2.db") keychain_blob = client.downloadFile("/mnt2/Keychains/keychain-2.db") write_file("keychain-2.db", keychain_blob) print "Downloaded keychain database, use keychain_tool.py to decrypt secrets"
def combine_census_returns(): returns_name = "../data/returns_clean.csv" census_name = "../data/census_clean.csv" returns_headers, returns_data = read_file(returns_name) census_headers, census_data = read_file(census_name) first_slice = [[i] for i in range(0,3141)] returns_data[first_slice, range(4,12)] = returns_data[first_slice, range(4,12)].astype(float).astype(int) combine_headers = ['CountyID', 'State', 'County', 'TotalPop', '2004_dem', '2004_rep', '2004_dem_percent', '2008_dem', '2008_rep', '2008_dem_percent', '2012_dem', '2012_rep', '2012_dem_percent', '2016_dem', '2016_rep', '2016_dem_percent', '2016_turnout', 'male/pop', 'hispanic/pop', 'white/pop', 'black/pop', 'native/pop', 'asian/pop', 'pacific/pop', 'voting_age_citizens/pop', 'income', 'income_per_cap', 'poverty', 'child_poverty', 'professional', 'service', 'office', 'construction', 'production', 'drive', 'carpool', 'transit', 'walk', 'other_transport', 'work_at_home', 'mean_compute', 'employed/pop', 'private_work', 'public_work', 'self_employed', 'family_work', 'unemployment'] combine_data = np.empty([len(returns_data), len(combine_headers)], dtype = object) for i in range(len(returns_data)): assert len(census_data[np.where(census_data[:,0] == returns_data[i,1])]) == 1, "County surjection" c_data = census_data[np.where(census_data[:,0] == returns_data[i,1])][0] votes = returns_data[i, range(4,12)].astype(float).astype(int) percent_dem = [float(votes[j])/(votes[j] + votes[j+1]) for j in range(0,8,2)] turnout = float(votes[6] + votes[7])/float(c_data[3]) male_pop = float(c_data[4])/float(c_data[3])*100 assert(turnout > 0 and turnout < 1), "In bounds" combine_data[i, range(4)] = census_data[i, range(4)] combine_data[i, (4,5,7,8,10,11,13,14)] = votes combine_data[i, (6,9,12,15)] = percent_dem combine_data[i, 16] = turnout combine_data[i, 17] = male_pop combine_data[i, range(18,24)] = c_data[range(6,12)] combine_data[i, 24] = float(c_data[12])/float(c_data[3])*100 combine_data[i, (25,26)] = c_data[[13,15]] combine_data[i, range(27, len(combine_headers))] = c_data[range(17, len(c_data))] combine_data[i,41] = float(c_data[31])/float(c_data[3])*100 write_file('../data/full_clean_data.csv', combine_headers, combine_data)
def smrf(afa_file, pdb_file, options, pmrf_path): edge_file, mrf_file = options.edge_file, options.mrf_file pmrf_exec = '%s/pmrf'%pmrf_path if not os.path.exists(pmrf_exec): print 'Cannot find the PMRF executable in the directory %s.'%(pmrf_path) sys.exit(1) ## Determine graph structure edge_list = build_edge(afa_file, pdb_file) write_file('\n'.join(['%s\t%s'%(i, j) for i, j in edge_list]), edge_file) message('MRF edge is determined.') ## Build MRF model cmd = '%s build %s --edge %s -o %s'%(pmrf_exec, afa_file, edge_file, mrf_file) subprocess.check_call(cmd.split()) message('MRF model is parameterized.') ## Estimate positional coevolution cmd = '%s stat %s --mode pos'%(pmrf_exec, mrf_file) fp = sys.stdout if options.score_file1 == "stdout" else open(options.score_file1, 'w') p = subprocess.Popen(shlex.split(cmd), stdout=fp) p.wait() message('Positional coevolution scores are estimated.') ## Estimate pairwise coevolution if options.score_file2: cmd = '%s stat %s --mode pair'%(pmrf_exec, mrf_file) p = subprocess.Popen(shlex.split(cmd), stdout=open(options.score_file2, 'w')) p.wait() message('Pairwise coevolution scores are estimated.')
def print_header(): """ 判断文件路径是否存在,不存在就创建,存在就写入 :return: """ util.mk_dirname(FILE_OUT) # print(','.join(KEYS)) util.write_file(FILE_OUT, ','.join((KEYS)) + '\n', 'a')
def save_certs_keys(self): certs, pkeys = self.get_certs() for c in certs: filename = (c) certs[c].save_pem(filename + ".crt") for k in pkeys: filename = (k) write_file(filename + ".key", pkeys[k])
def save_passwords(self): passwords = "\n".join(map(render_password, self.get_passwords())) inetpasswords = "\n".join( map(render_password, self.get_inet_passwords())) print "Writing passwords to keychain.csv" write_file( "keychain.csv", "Passwords;;\n" + passwords + "\nInternet passwords;;\n" + inetpasswords)
def get_adjust_image(filepath, f_type='jpg', max_width=1280): data = read_file(filepath) data = get_adjust_image_data(data, max_width) f_uun = genenate_file_key(data) + '.' + f_type write_file(f_uun, data) return f_uun
def load_config(): if not os.path.exists(CONFIG_FILE): write_file(DEFAULT_CONFIG, CONFIG_FILE) print 'Configuration file is created as %s'%(CONFIG_FILE) raise SystemExit cfg = ConfigParser.ConfigParser() cfg.read(CONFIG_FILE) return {'pmrf_path': os.path.expanduser(cfg.get('external', 'pmrf_path').split(';')[0])}
def writeCerts(self): if not self.certs: self.extractCertificates() for key, cert in self.certs.items(): cert_data = cert.as_der() cert_sha1 = hashlib.sha1(cert_data).hexdigest() write_file("%s_%s.crt" % (key, cert_sha1), cert_data)
def test(): movie_hot_list = movie_data_profile().hot_movies(rate=4.0,users=40) test_list = get_user_list() return_list = [] for u in test_list: print u return_list.extend(user_alanysis(u)) util.write_file(return_list,type='csv',path='user_info/whole_users_add_test.csv')
def _write_cache_file(self, file_path: str, content, mode="wb"): try: file_path = file_path.lstrip("/") write_file(content, self.cache_dir.joinpath(file_path), mode) except Exception as e: logger.exception(e, "Failed to write file: %s", file_path) raise ProviderError(e, file_path) return True
def key(self, rowid, filename=""): for row in self.get_keys(): if row["rowid"] == rowid: blob = RSA_KEY_DER_to_PEM(row["data"]) if filename: write_file(filename, blob) #k = M2Crypto.RSA.load_key_string(blob) print blob return
def download_stats(self): url = self.url + '/data/stats' path = self.info_folder + '/stats.html' util.download(url, path, self.cookie_file) content = util.read_file(path) pattern = r'<h1.*?</table>' content = re.search(pattern, content, re.DOTALL).group(0) util.write_file(path, content)
def cert(self, rowid, filename=""): for row in self.get_cert(): if row["rowid"] == rowid: blob = CERT_DER_to_PEM(row["data"]) if filename: write_file(filename, blob) cert = M2Crypto.X509.load_cert_der_string(row["data"]) print cert.as_text() return
def cmd_mv(self, src_pn, dst_pn): if not self.check_sanity(): dbg.err("it's not a metasync repo.") return False src_pn = os.path.abspath(src_pn) dst_pn = os.path.abspath(dst_pn) #TODO: check src_pn exists beg = time.time() try: dirname = os.path.dirname(src_pn) dirblob = self.blobstore.load_dir(dirname, False, dirty=True) if (dirblob is None): dbg.err("%s does not exist" % src_pn) return False except NotTrackedException as e: dbg.err(str(e)) return False fname = os.path.basename(src_pn) if (not fname in dirblob): dbg.err("%s does not exist" % pn) return False fblob = dirblob[fname] dirblob.rm(fname) dst_dirname = os.path.dirname(dst_pn) if (dirname != dst_dirname): dirblob = self.blobstore.load_dir(dirname, True, dirty=True) assert dirblob is not None dst_fname = os.path.basename(dst_pn) dirblob.add(dst_fname, fblob, dirty=False) root = self.get_root_blob() root.store() newblobs = self.blobstore.get_added_blobs() util.write_file(self.get_head(), root.hv) self.append_history(root.hv) end = time.time() dbg.time("local write: %f" % (end - beg)) # push new blobs remotely self.bstore_sync(newblobs) self._put_all(self.get_head(), self.get_remote_path(self.get_head_name())) end = time.time() dbg.time("remote write: %f" % (end - beg)) # move the file shutil.move(src_pn, dst_pn) self._join() return True
def cmd_mv(self, src_pn, dst_pn): if not self.check_sanity(): dbg.err("it's not a metasync repo.") return False src_pn = os.path.abspath(src_pn) dst_pn = os.path.abspath(dst_pn) #TODO: check src_pn exists beg = time.time() try: dirname = os.path.dirname(src_pn) dirblob = self.blobstore.load_dir(dirname, False, dirty=True) if(dirblob is None): dbg.err("%s does not exist" % src_pn) return False except NotTrackedException as e: dbg.err(str(e)) return False fname = os.path.basename(src_pn) if(not fname in dirblob): dbg.err("%s does not exist" % pn) return False fblob = dirblob[fname] dirblob.rm(fname) dst_dirname = os.path.dirname(dst_pn) if(dirname != dst_dirname): dirblob = self.blobstore.load_dir(dirname, True, dirty=True) assert dirblob is not None dst_fname = os.path.basename(dst_pn) dirblob.add(dst_fname, fblob, dirty=False) root = self.get_root_blob() root.store() newblobs = self.blobstore.get_added_blobs() util.write_file(self.get_head(), root.hv) self.append_history(root.hv) end = time.time() dbg.time("local write: %f" % (end-beg)) # push new blobs remotely self.bstore_sync(newblobs) self._put_all(self.get_head(), self.get_remote_path(self.get_head_name())) end = time.time() dbg.time("remote write: %f" % (end-beg)) # move the file shutil.move(src_pn, dst_pn) self._join() return True
def test(): movie_hot_list = movie_data_profile().hot_movies(rate=4.0, users=40) test_list = get_user_list() return_list = [] for u in test_list: print u return_list.extend(user_alanysis(u)) util.write_file(return_list, type='csv', path='user_info/whole_users_add_test.csv')
def save_certs_keys(self): certs, pkeys = self.get_certs() for c in certs: filename = c + ".crt" print "Saving certificate %s" % filename certs[c].save_pem(filename) for k in pkeys: filename = k + ".key" print "Saving key %s" % filename write_file(filename, pkeys[k])
def save_user_sim_matrix(topics): for i in range(60): topic_path = 'user_similarity/user_topic_sim/'+str(topics)+'_topics/'+str(i)+".pickle" tag_path = 'user_similarity/user_tag_sim/'+str(i)+".pickle" topic_i = util.read_file(topic_path) tag_i = util.read_file(tag_path) for t in topic_i: topic_i[t] = topic_i[t]*tag_i[t] path = 'user_similarity/user_sim/'+str(topics)+'_hybrid/'+str(i)+".pickle" util.write_file(topic_i,path)
def gen_template(template_file, output_file, config): """ Populates and generates a new template :param template_file: the initial template file :param output_file: the output file where the populated template will live :param config: the config object of type TTGConfiguration that will be used """ populator = ttg.populate.TTGPopulator(template_file, config) contents = populator.populate() util.write_file(contents + "\n", output_file)
def calulate_user_similarity(processes,topics): user_list = db_info.user_list user_list_list = db.split_item(user_list) user_topic_map = get_user_topic_map(topics) node = len(user_list_list)/60 for i in range(60): exe_list = user_list_list[i*node:(i+1)*node] results = multiprocess(processes,exe_list,user_topic_map) path = 'user_similarity/user_topic_sim/'+str(topics)+'_topics/' + str(i) + '.pickle' util.write_file(results,path)
def generate_kml(self, dir, name, places): if 'kml' not in places[0].keys(): return env = Environment(loader=PackageLoader('buildmap', 'templates')) template = env.get_template('kml.jinja') write_file(os.path.join(dir, name + '.kml'), template.render(places=places)) if name in self.filesToList: self.filesToList[name].append('kml') else: self.filesToList[name] = ['kml']
def generate_kml(self, dir, name, places): if "kml" not in places[0].keys(): return env = Environment(loader=PackageLoader("buildmap", "templates")) template = env.get_template("kml.jinja") write_file(os.path.join(dir, name + ".kml"), template.render(places=places)) if name in self.filesToList: self.filesToList[name].append("kml") else: self.filesToList[name] = ["kml"]
def assemble_str(name, string, show_calls, v, cleanup=True): """ Assemble a buffer containing a c program. """ srcfile = name + '.c' outfile = name + '.o' vmsg(v, 'Assembling '+srcfile+' -> '+outfile) util.write_file(srcfile, string) util.call([MPICC, srcfile, '-o', outfile] + ASSEMBLE_FLAGS, show_calls) if cleanup: os.remove(srcfile)
def compile_str(name, string, show_calls, v, save_temps=True): """ Compile a buffer containing an XC program. """ srcfile = name + '.xc' outfile = name + '.S' vmsg(v, 'Compiling '+srcfile+' -> '+outfile) util.write_file(srcfile, string) util.call([XCC, srcfile, '-o', outfile] + COMPILE_FLAGS, v=show_calls) if not save_temps: os.remove(srcfile)
def main(processes=4): global tag_map tag_map = util.read_file('user_profile/preference_tag/user_tags_50.pickle') user_list = db_info.user_list user_list_list = util.split_item(user_list) node = len(user_list_list)/60 for i in range(60): exe_list = user_list_list[i*node:(i+1)*node] results = multiprocess(processes,exe_list) path = 'user_similarity/user_tag_sim/genres/' + str(i) + '.pickle' util.write_file(results,path)
def load_config(): if not os.path.exists(CONFIG_FILE): write_file(DEFAULT_CONFIG, CONFIG_FILE) print 'Configuration file is created as %s' % (CONFIG_FILE) raise SystemExit cfg = ConfigParser.ConfigParser() cfg.read(CONFIG_FILE) return { 'pmrf_path': os.path.expanduser(cfg.get('external', 'pmrf_path').split(';')[0]) }
def test(): action = util.parse_action(sys.argv[1]) if action == TRAP: util.install_trap() f = SyscallFilter(action) f.add_rule(ALLOW, "rt_sigreturn") f.add_rule(ALLOW, "exit_group") f.load() try: util.write_file("/dev/null") except OSError as ex: quit(ex.errno) quit(160)
def cmd_rm(self, pn): if not self.check_sanity(): dbg.err("this is not a metasync repo") return False #TODO: check if the file exists beg = time.time() try: dirname = os.path.dirname(pn) dirblob = self.blobstore.load_dir(dirname, False) if(dirblob is None): dbg.err("%s does not exist" % pn) return False except NotTrackedException as e: dbg.err(str(e)) return False fname = os.path.basename(pn) if(not fname in dirblob): dbg.err("%s does not exist" % pn) return False dirblob.rm(fname) root = self.get_root_blob() root.store() newblobs = self.blobstore.get_added_blobs() # we may need to include pointer for previous version. util.write_file(self.get_head(), root.hv) self.append_history(root.hv) end = time.time() dbg.time("local write: %f" % (end-beg)) # push new blobs remotely self.bstore_sync(newblobs) self._put_all(self.get_head(), self.get_remote_path(self.get_head_name())) end = time.time() dbg.time("remote write: %f" % (end-beg)) self._join() # drop local copy # TODO: rm only tracked files if removing file. try: os.unlink(pn) except: dbg.err("failed to rm %s" % pn) return False return True