def IPv_analysis(IPv_type, exp_n, res_directory, coll, from_d, to_d, ext): input_file_path = res_directory + exp_n + '/5.split_data_for_analysis/' + IPv_type + '/' + coll + '_' + from_d + '-' + to_d + ext output_file_path = res_directory + exp_n + '/6.more_specifics_analysis/' + IPv_type + '/' + coll + '_' + from_d + '-' + to_d + '.csv' write_flag = f.overwrite_file(output_file_path) if write_flag: print "Loading " + input_file_path + "..." df = f.read_file(file_ext, input_file_path) df_sort = df.sort_values(by=['MONITOR', 'PREFIX', 'TIME']) df_sort = df_sort.reset_index(drop=True) df_sort = df_sort.drop(['Unnamed: 0'], axis=1) print "Data loaded successfully" # 1.Prefix visibility analysis print 'Getting visibility per prefix...' monitors, prefixes, visibilities_per_prefix, updates_per_prefix, ASes = prefix_visibility_analysis( df_sort, exp_n) df_prefixes_per_monitor = pd.DataFrame({ 'MONITOR': monitors, 'PREFIX': prefixes }) # 2.Clustering prefixes into more specifics, least_specifics and uniques (non-specifics) pref_types, deeps = clustering_prefixes(df_prefixes_per_monitor) df_visibility_per_prefix = pd.DataFrame({ 'MONITOR': monitors, 'PREFIX': prefixes, 'VISIBILITY': visibilities_per_prefix, 'UPDATES': updates_per_prefix, 'TYPE': pref_types, 'DEEP': deeps, 'ORIGIN': ASes }) # 3.Clustering more specifics prefixes into TOP, single level and more specifics of other more specifics df_more_specifics = df_visibility_per_prefix[ df_visibility_per_prefix['TYPE'] == 'more_specific'] df_more_specifics = df_more_specifics.reset_index(drop=True) df_more_specifics = df_more_specifics.drop(['Unnamed: 0'], axis=1) pref_types, deeps = clustering_prefixes(df_more_specifics) # Replace types for more detailed types df_more_specifics['TYPE'] = pref_types df_others = df_visibility_per_prefix[ df_visibility_per_prefix['TYPE'] != 'more_specific'] df_others = df_others.reset_index(drop=True) df_others = df_others.drop(['Unnamed: 0'], axis=1) df_visibility_per_prefix = df_more_specifics.append(df_others, ignore_index=True) output_file_path = res_directory + exp_n + '/6.more_specifics_analysis/' + IPv_type + '/' + collector + '_' + from_d + '-' + to_d + '.csv' f.save_file(df_visibility_per_prefix, ext, output_file_path)
def files_read(filename): user_id = auth.get_current_user_id() with db_manager.session_scope() as session: # Verify that file exists file = session.query(File).filter( File.filename == filename).one_or_none() if not file: return api_error(api_result_code=ApiErorrCode.FILE_NOT_EXISTS) # Get the user from DB user = session.query(User).get(user_id) if not user: return api_error(http_code=Unauthorized.code, api_result_code=ApiErorrCode.UNAUTHORIZED) # Enforce BLP no write down if not blp_rules.enforce_blp_read(user.level, file.level): return api_error(http_code=Unauthorized.code, api_result_code=ApiErorrCode.UNAUTHORIZED) # Read from the file content = file_manager.read_file(file.filename) return jsonify({'content': content})
def register(): # 读取teacher.json data = file_manager.read_file('files/teacher.json') while True: teacher_name = input('请输入账号(3~6位):') if not 3 <= len(teacher_name) <= 6: print('账号不符合,请重新输入!') else: break if teacher_name in data: print('注册失败!该账号已经注册过!') return while True: password = input('请输入密码(6~12位):') if not 6 <= len(password) <= 12: print('密码不符合,请重新输入!') else: break # teacher[teacher_name] = password t = bean.Teacher(teacher_name, password) data[t.name] = t.password file_manager.write_json('files/teacher.json', data)
def get_public_key_bytes(self, key_path): # get public key path key_file = os.path.join(key_path, PUBLIC_KEY_DIR_NAME, PUBLIC_KEY_FILE_NAME) # read encrypted file encrypted_key_pem = file_manager.read_file(key_file) # return decrypted public key return self.aes_cbc_cipher.decrypt_text(encrypted_key_pem)
def load_processes(self) -> list: try: processes = file_manager.read_binary_file(self.PROCESSES_FILE) except FileNotFoundError as err: print(err) self.inactive_processes = [] for i in file_manager.read_file(self.DEFAULT_PROCESSES_FILE): self.add_process(i) processes = self.load_processes() self.name_pad = max([len(i.name) for i in processes]) return processes
def start(): content = file_manager.read_file("welcome.txt") while True: operator = input(content + '\n请选择(1-3):') if operator == '1': login() elif operator == '2': register() elif operator == '3': sys.exit(0) else: print("输入有误")
def get_public_key_object(self, key_path): # get public key path key_file = os.path.join(key_path, PUBLIC_KEY_DIR_NAME, PUBLIC_KEY_FILE_NAME) # read encrypted public key encrypted_key_pem = file_manager.read_file(key_file) # decrypt public key decrypted_key_pem = self.aes_cbc_cipher.decrypt_text(encrypted_key_pem) #create public key object public_key = serialization.load_pem_public_key( decrypted_key_pem, backend=backends.default_backend()) return public_key
def start(): content = file_manager.read_file('welcome.txt') while True: option = input(content + '\n请输入你的操作(1-3):') if option == '1': login() elif option == '2': register() elif option == '3': break else: print('你输入的有误\n')
def start(): content = file_manager.read_file('files/welcome.txt') while True: operator = input(content + '\n请选择(1-3):') if operator == '1': login() elif operator == '2': register() elif operator == '3': exit(0) else: print('输入有误')
def start(): content = file_manager.read_file('welcome.txt') while True: operator = input(content + '请输入需要操作的数字(1-3):') if operator == '1': login() elif operator == '2': register() elif operator == '3': # break sys.exit(0) else: print('请输入正确的数字')
def get_my_private_key_object(self): # get private key path key_file = os.path.join(self.key_path, PRIVATE_KEY_DIR_NAME, PRIVATE_KEY_FILE_NAME) # read encrypted private key encrypted_key_pem = file_manager.read_file(key_file) # decrypt private key decrypted_key_pem = self.aes_cbc_cipher.decrypt_text(encrypted_key_pem) # create private key object private_key = serialization.load_pem_private_key( decrypted_key_pem, backend=backends.default_backend(), password=None) return private_key
def init_city_dict(filename): """ Read from file and generates a lookup dictionary with key: cityid and value: city coordinates. """ print("Reading cities from file:", filename) global NUM_CITIES for x in file_manager.read_file(filename): data = x.split() if data[0] not in CITY_LOOKUP: CITY_LOOKUP[int(data[0])] = (float(data[1]), float(data[2])) NUM_CITIES += 1 else: print("Key with value", data[0], "already in dict.")
def start(): content = file_manager.read_file('welcome.txt') while True: operator = input(content + '\n请选择(1-3):') if operator == '1': login() elif operator == '2': register() elif operator == '3': # break # 把死循环停止 # exit(0) # 退出整个程序 sys.exit(0) else: print('输入有误!')
def login(): # 读取teacher.json data = file_manager.read_file('files/teacher.json') teacher_name = input('请输入老师账号:') if teacher_name not in data: print('登陆失败!该账号没有注册过!') return password = input('请输入密码:') import tools if data[teacher_name] == tools.encrypt_password(password): student_manager.name = teacher_name student_manager.show_manager() else: print('密码错误')
def start(): content = file_manager.read_file('11 学生管理系统/files/welcome.txt') while True: operator = input(content + '\n') if operator == '1': print('登录') login() elif operator == '2': print('注册') register() elif operator == '3': print('退出') break # 把死循环停止 # exit(0) # 退出整个程序 # sys.exit(0) else: print('无效操作')
def show_manager(): content = file_manager.read_file('students_page.txt') % name while True: print(content) option = input('请输入1-5:') if option == '1': add_student() elif option == '2': show_student() elif option == '3': change_student() elif option == '4': delete_student() elif option == '5': break else: print('输入有误!')
def show_manager(): content = file_manager.read_file('students_page.txt') % name while True: print(content) operator = input('请选择(1-5):') if operator == '1': add_student() elif operator == '2': show_student() elif operator == '3': modify_student() elif operator == '4': delte_student() elif operator == '5': break else: print('输入有误!')
def show_manager(file_name): # 调用file_manager模块里的read_file方法 content = file_manager.read_file(file_name) % name while True: print(content) operator = input('请选择(1~5):') if operator == '1': add_student() elif operator == '2': show_student() elif operator == '3': modify_student() elif operator == '4': delete_student() elif operator == '5': break else: print('您输入的内容有误,请重新输入')
def show_manager(): # print('显示管理页面') content = file_manager.read_file( '11 学生管理系统/files/students_pages.txt') % username while True: print(content) operator = input('请输入(1~5):') if operator == '1': print('添加') add_student() elif operator == '2': print('查看') show_students_jianhua() elif operator == '3': print('修改') elif operator == '4': print('删除') delte_student() elif operator == '5': print('返回') break else: print('输入有误')
step_dir = '/5.split_data_for_analysis/IPv4' exp.per_step_dir(exp_name, step_dir) step_dir = '/5.split_data_for_analysis/IPv6' exp.per_step_dir(exp_name, step_dir) input_file_path = result_directory + exp_name + '/4.concatenate_RIB_data/' + collector + '_' + from_date + '-' + to_date + file_ext output_file_path = result_directory + exp_name + step_dir + '/' + collector + '_' + from_date + '-' + to_date + file_ext write_flag = f.overwrite_file(output_file_path) if write_flag == 1: print "Loading " + input_file_path + "..." df_advises = f.read_file(file_ext, input_file_path) print "Data loaded successfully" print "Splitting {} advises...".format(len(df_advises)) df_IPv4_updates, df_IPv6_updates = separate_IPv_types(df_advises) df_IPv4_updates = df_IPv4_updates.drop(['Unnamed: 0'], axis=1) df_IPv6_updates = df_IPv6_updates.drop(['Unnamed: 0'], axis=1) step_dir = '/5.split_data_for_analysis/IPv4' output_file_path = result_directory + exp_name + step_dir + '/' + collector + '_' + from_date + '-' + to_date + file_ext f.save_file(df_IPv4_updates, file_ext, output_file_path)
df_RIBS = pd.DataFrame({ 'TIME': times, 'TYPE': types, 'MONITOR': s_IPs, 'AS': s_AS, 'PREFIX': prefixes, 'AS_PATH': AS_PATHs }) print(' Data Frame created!') # Load clean data input_file_path = result_directory + exp_name + '/3.data_cleaning/' + collector + '_' + from_date + '-' + to_date + file_ext output_file_path = result_directory + exp_name + step_dir + '/' + collector + '_' + from_date + '-' + to_date + file_ext write_flag = f.overwrite_file(output_file_path) print "Loading " + input_file_path + "..." df_updates = f.read_file(file_ext, input_file_path) print "Data loaded successfully" print "Concatenating RIBs to updates" list_complete = [df_RIBS, df_updates] df_complete = pd.concat(list_complete) df_complete = df_complete.drop(['Unnamed: 0'], axis=1) f.save_file(df_complete, file_ext, output_file_path)
def show_manager(): print("正在登录") content = file_manager.read_file("students_page.txt") print(content)
result_directory = experiment['resultDirectory'] file_ext = experiment['resultFormat'] step_dir = '/3.data_cleaning' exp.per_step_dir(exp_name, step_dir) input_file_path = result_directory + exp_name + '/2.sort_data_for_cleaning/' + collector + '_' + from_date + '-' + to_date + file_ext output_file_path = result_directory + exp_name + step_dir + '/' + collector + '_' + from_date + '-' + to_date + file_ext write_flag = f.overwrite_file(output_file_path) if write_flag: print('Loading ' + input_file_path + '...') df = f.read_file(file_ext, input_file_path) print('Data loaded successfully') print('\nConverting timestamp to minutes...\n') df_time_s = df['TIME'] df_time_mm = df_time_s // 60 df_time_list = df_time_mm.tolist() df_type = df['TYPE'] df_type_list = df_type.tolist() state_indexes = get_state_indexes(df_type_list) print(len(state_indexes))
def handle_post_thread(self, data, path): self.send_response(HTTPStatus.OK) self.send_header("Content-type", "application/JSON") self.end_headers() if "code[]" not in data: data["code[]"] = [""] if path == "/cancel": self.cancellation_event.set() if path == "/process2": self.cancellation_event.clear( ) # Make sure we don't have lingering cancellation requests from before code = data["code[]"] curr_i = int(data["curr_i"][0]) curr_f = int(data["curr_f"][0]) global_frame_id = int(data["globalFrameID"][0]) visualize_tail_calls = data["tailViz"][0] == "true" self.wfile.write( bytes( handle(code, curr_i, curr_f, global_frame_id, visualize_tail_calls, cancellation_event=self.cancellation_event), "utf-8")) elif path == "/save": code = data["code[]"] filename = data["filename"][0] do_save = data["do_save"][0] == "true" if do_save: save(code, filename) self.wfile.write( bytes( json.dumps({ "result": "success", "stripped": strip_comments(code) }), "utf-8")) elif path == "/instant": code = data["code[]"] global_frame_id = int(data["globalFrameID"][0]) self.wfile.write(bytes(instant(code, global_frame_id), "utf-8")) elif path == "/reformat": code = data["code[]"] javastyle = data["javastyle"][0] == "true" self.wfile.write( bytes( json.dumps({ "result": "success", "formatted": prettify(code, javastyle) }), "utf-8")) elif path == "/test": self.cancellation_event.clear( ) # Make sure we don't have lingering cancellation requests from before output = cancelable_subprocess_call( self.cancellation_event, (sys.argv[0], os.path.splitext(ok_interface.__file__)[0] + ".py"), -1, sys.executable, subprocess.PIPE, subprocess.PIPE, None) self.wfile.write(output.split(ok_interface.BEGIN_OUTPUT)[1]) elif path == "/list_files": self.wfile.write(bytes(json.dumps(get_scm_files()), "utf-8")) elif path == "/read_file": filename = data["filename"][0] self.wfile.write(bytes(json.dumps(read_file(filename)), "utf-8")) elif path == "/new_file": filename = data["filename"][0] self.wfile.write( bytes(json.dumps({"success": new_file(filename)}), "utf-8")) elif path == "/save_state": global state for key, val in json.loads(data["state"][0]).items(): if key == "states": if "states" not in state: state["states"] = val else: merge(state["states"], val) else: state[key] = val if "settings" in state: save_config("settings", state["settings"]) elif path == "/load_state": if "states" not in state: self.wfile.write(b"fail") else: self.wfile.write(bytes(json.dumps(state), "utf-8")) elif path == "/load_settings": try: if "settings" not in state: state["settings"] = {} for key, val in load_config("settings").items(): state["settings"][key] = val except FileNotFoundError: self.wfile.write(b"fail") else: self.wfile.write(bytes(json.dumps(state["settings"]), "utf-8")) elif path == "/documentation": query = data.get("query", [""])[0] self.wfile.write(bytes(json.dumps(search(query)), "utf-8")) elif path == "/kill": # This is (only) fine because we're in a different thread than the actual server self.server.shutdown() self.server.socket.close()
def decrypt_file(self, path): encrypted_message = file_manager.read_file(path) decrypted_message = self.decrypt_text(encrypted_message) file_manager.write_to_file(path, decrypted_message)
def encrypt_file(self, path): message = file_manager.read_file(path) encrypted_message = self.encrypt_text(message) file_manager.write_to_file(path, encrypted_message)
def handle_post_thread(self, data, path): if (b'code[]' not in data): data[b'code[]'] = [b''] if (path == '/cancel'): self.cancellation_event.set() self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() if (path == '/process2'): self.cancellation_event.clear() code = [x.decode('utf-8') for x in data[b'code[]']] curr_i = int(data[b'curr_i'][0]) curr_f = int(data[b'curr_f'][0]) global_frame_id = int(data[b'globalFrameID'][0]) self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() self.wfile.write( bytes( handle(code, curr_i, curr_f, global_frame_id, cancellation_event=self.cancellation_event), 'utf-8')) elif (path == '/save'): code = [x.decode('utf-8') for x in data[b'code[]']] filename = data[b'filename'][0] do_save = (data[b'do_save'][0] == b'true') if do_save: save(code, filename) self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() self.wfile.write( bytes( json.dumps({ 'result': 'success', 'stripped': strip_comments(code), }), 'utf-8')) elif (path == '/instant'): code = [x.decode('utf-8') for x in data[b'code[]']] global_frame_id = int(data[b'globalFrameID'][0]) self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() self.wfile.write(bytes(instant(code, global_frame_id), 'utf-8')) elif (path == '/reformat'): code = [x.decode('utf-8') for x in data[b'code[]']] javastyle = (data[b'javastyle'][0] == b'true') self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() self.wfile.write( bytes( json.dumps({ 'result': 'success', 'formatted': prettify(code, javastyle), }), 'utf-8')) elif (path == '/test'): self.cancellation_event.clear() output = cancelable_subprocess_call( self.cancellation_event, (sys.argv[0], (os.path.splitext(ok_interface.__file__)[0] + '.py')), (-1), sys.executable, subprocess.PIPE, subprocess.PIPE, None) self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() self.wfile.write(output) elif (path == '/list_files'): self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() self.wfile.write(bytes(json.dumps(get_scm_files()), 'utf-8')) elif (path == '/read_file'): filename = data[b'filename'][0] self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() self.wfile.write(bytes(json.dumps(read_file(filename)), 'utf-8')) elif (path == '/new_file'): filename = data[b'filename'][0].decode('utf-8') self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() self.wfile.write( bytes(json.dumps({ 'success': new_file(filename), }), 'utf-8')) elif (path == '/save_state'): global state for (key, val) in json.loads(data[b'state'][0].decode('utf-8')).items(): if (key == 'states'): if ('states' not in state): state['states'] = val else: merge(state['states'], val) else: state[key] = val if ('settings' in state): save_config('settings', state['settings']) self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() elif (path == '/load_state'): self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() if ('states' not in state): self.wfile.write(b'fail') else: self.wfile.write(bytes(json.dumps(state), 'utf-8')) elif (path == '/load_settings'): self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() try: if ('settings' not in state): state['settings'] = {} for (key, val) in load_config('settings').items(): state['settings'][key] = val except FileNotFoundError: self.wfile.write(b'fail') else: self.wfile.write(bytes(json.dumps(state['settings']), 'utf-8')) elif (path == '/documentation'): self.send_response(HTTPStatus.OK, 'test') self.send_header('Content-type', 'application/JSON') self.end_headers() query = data.get(b'query', [b''])[0].decode('utf-8') self.wfile.write(bytes(json.dumps(search(query)), 'utf-8')) elif (path == '/kill'): self.server.shutdown() self.server.socket.close()
#!usr/bin/python3.4 #-*-coding:utf-8-* import sys import os sys.path.append("lib") import file_manager as fm import data_visualizer as dv rcg = fm.read_file("logs/20140709193255-HELIOS2014_3-vs-WrightEagle_0.rcg") rcl = fm.read_file("logs/20140709193255-HELIOS2014_3-vs-WrightEagle_0.rcl") path = "20140709193255-HELIOS2014_3-vs-WrightEagle_0/" teams = fm.find_teams(rcg) #print teams ball_data = fm.get_ball_data(rcg) #print ball_data kick_data = fm.get_kick_data(rcl, ball_data) #print kick_data #passes = fm.get_passes(kick_data) #print passes #dribbles = fm.get_dribbles(kick_data) #print dribbles kick_chains = fm.get_kick_chains(kick_data) #print kick_chains