def conver(): print("Begin conver, input file: " + util_global.get_value('input')) out_path = util_global.get_value('output') dst_path = os.path.split(util_global.get_value('input').rstrip('\\/'))[-1] conver_path = os.walk(util_global.get_value('input')) for path, dir_list, file_list in conver_path: for file_name in file_list: out_path_dst = abs_join(dst_path, path.split(dst_path)[1]) if file_name.endswith(".py"): util_global.set_value('path', os.path.join(path, file_name)) mkdir(os.path.join(out_path, out_path_dst)) conver_ast(path, out_path_dst, file_name) if util_global.get_value('need_conver', False): content = "Finish conver file: " + os.path.join( path, file_name) print(content) write_report_terminator(content) else: mkdir_and_copyfile(path, abs_join(out_path, out_path_dst), file_name) else: mkdir_and_copyfile(path, abs_join(out_path, out_path_dst), file_name) print("Finish conver, output file: " + out_path + "; report file: " + util_global.get_value('report'))
def conver(): """The entry point to convert Tensorflow script""" print("Begin conver, input file: " + util_global.get_value('input') + '\n') out_path = util_global.get_value('output') dst_path = os.path.split(util_global.get_value('input').rstrip('\\/'))[-1] dst_path_new = dst_path + util_global.get_value('timestap') conver_path = os.walk(util_global.get_value('input')) report_dir = util_global.get_value('report') mkdir(report_dir) report_xlsx = os.path.join(report_dir, 'api_analysis_report.xlsx') util_global.set_value('generate_dir_report', pd.DataFrame()) for path, _, file_list in conver_path: for file_name in file_list: out_path_dst = abs_join( dst_path_new, path.split(util_global.get_value('input'))[1]) file_path = os.path.join(path, file_name).replace('\\', '/') if not check_path_length(file_path): content = "".join([ "The file:", file_path, " length is invalid, skip convert." ]) log_warning(content) continue content = "".join(["Begin conver file: ", file_path]) print(content) threshold_file_size = 10 * 1024 * 1024 if file_name.endswith(".py"): if os.path.getsize(file_path) > threshold_file_size: content = "".join([ "The file:", file_path, " size is over 10M, skip convert." ]) log_warning(content) continue util_global.set_value('path', file_path) mkdir(os.path.join(out_path, out_path_dst)) conver_ast(path, out_path_dst, file_name) if util_global.get_value('need_conver', False): content = "".join( ["Finish conver file: ", file_path, '\n']) print(content) write_report_terminator(content) else: mkdir_and_copyfile(path, abs_join(out_path, out_path_dst), file_name) else: mkdir_and_copyfile(path, abs_join(out_path, out_path_dst), file_name) adjust_index() analysis_report = util_global.get_value('generate_dir_report') if analysis_report.empty: print('No api data in the report') else: analysis_report.to_excel(report_xlsx, index=True) get_api_statistic(analysis_report) print("Finish conver, output file: " + out_path + "; report file: " + util_global.get_value('report'))
def _save_model(self, info): new_dir = NN_INFO_STORE_DIR + MODEL_DIR + str(info.epoch) + "/" fp.mkdir(new_dir) pathname = new_dir + "nn_model.ckpt" self.saver.save(self.sess, pathname) heapq.heappush(self.model_info_heap, info) if len(self.model_info_heap) > MODEL_RECORD_WINDOW: old_info = heapq.heappop(self.model_info_heap) old_dir = NN_INFO_STORE_DIR + MODEL_DIR + str(old_info.epoch) + "/" fp.remove_dir(old_dir)
def run(self): signal.signal(signal.SIGINT, self.handle_signal) signal.signal(signal.SIGTERM, self.handle_signal) signal.signal(signal.SIGHUP, self.handle_signal) signal.signal(signal.SIGTSTP, self.handle_signal) for i in range(len(self.control_msg_pipes)): self.control_msg_pipes[i][1].close() assert NUM_TRAIN_TRACES > 0 assert NUM_TEST_TRACES > 0 self.train_trace_id_list = [i for i in range(NUM_TRAIN_TRACES)] self.test_trace_id_list = [i for i in range(NUM_TEST_TRACES)] fp.mkdir(NN_INFO_STORE_DIR) fp.mkdir(NN_INFO_STORE_DIR + MODEL_DIR) pathname = NN_INFO_STORE_DIR + str( TimeMillis32()) + "_reward_and_entropy.txt" self.fp_re = open(pathname, "w") self._init_tensorflow() self.saver = tf.train.Saver(max_to_keep=2 * MODEL_RECORD_WINDOW) if os.path.exists(PRETRAIN_MODEL_DIR): name = "nn_model.ckpt" model_recover = PRETRAIN_MODEL_DIR + name if fp.check_filename_contain(PRETRAIN_MODEL_DIR, name): self.logger.debug("model recover") self.saver.restore(self.sess, model_recover) else: self.logger.debug("no model") self.group_id = self.left self._write_net_param() self.can_send_train_args = True self._send_train_args(self.group_id) while not self.terminate: self._check_control_msg_pipe() if self.train_mode: self._process_train_mode() if self.group_id >= self.right: self._stop_agents() break else: self._process_test_mode() self.sess.close() self.fp_re.close() for i in range(len(self.control_msg_pipes)): self.control_msg_pipes[i][0].close() if self.terminate: self.logger.debug("terminate signal") pathname = NN_INFO_STORE_DIR + MODEL_DIR + "model_info.txt" model_info_file = open(pathname, "w") for i in range(len(self.model_info_heap)): info = self.model_info_heap[i] model_info_file.write( str(info.epoch) + "\t" + str(info.reward) + "\t" + str(info.entropy) + "\n") model_info_file.close()
def conver(): print("Begin conver, input file: " + util_global.get_value('input') + '\n') out_path = util_global.get_value('output') dst_path = os.path.split(util_global.get_value('input').rstrip('\\/'))[-1] dst_path_new = dst_path + util_global.get_value('timestap') conver_path = os.walk(util_global.get_value('input')) report_dir = util_global.get_value('report') mkdir(report_dir) report_xlsx = os.path.join(report_dir, 'api_analysis_report.xlsx') util_global.set_value('generate_dir_report', pd.DataFrame()) for path, dir_list, file_list in conver_path: for file_name in file_list: out_path_dst = abs_join( dst_path_new, path.split(util_global.get_value('input'))[1]) file_path = os.path.join(path, file_name).replace('\\', '/') content = "Begin conver file: " + file_path print(content) if file_name.endswith(".py"): util_global.set_value('path', file_path) mkdir(os.path.join(out_path, out_path_dst)) conver_ast(path, out_path_dst, file_name) if util_global.get_value('need_conver', False): content = "Finish conver file: " + file_path + '\n' print(content) write_report_terminator(content) else: mkdir_and_copyfile(path, abs_join(out_path, out_path_dst), file_name) else: mkdir_and_copyfile(path, abs_join(out_path, out_path_dst), file_name) adjust_index() analysis_report = util_global.get_value('generate_dir_report') if analysis_report.empty: print('No api data in the report') else: analysis_report.to_excel(report_xlsx, index=True) get_api_statistic(analysis_report) print("Finish conver, output file: " + out_path + "; report file: " + util_global.get_value('report'))
def run(self): signal.signal(signal.SIGINT, self.handle_signal) signal.signal(signal.SIGTERM, self.handle_signal) signal.signal(signal.SIGHUP, self.handle_signal) signal.signal(signal.SIGTSTP, self.handle_signal) for i in range(len(self.control_msg_pipes)): self.control_msg_pipes[i][1].close() assert NUM_TEST_TRACES > 0 self.test_trace_id_list = [i for i in range(NUM_TEST_TRACES)] fp.mkdir(NN_INFO_STORE_DIR + MODEL_DIR) self._init_tensorflow() saver = tf.train.Saver() recovered = False if os.path.exists(LOAD_MODEL_DIR): name = "nn_model.ckpt" model_recover = LOAD_MODEL_DIR + name if fp.check_filename_contain(LOAD_MODEL_DIR, name): self.logger.debug("model recover") saver.restore(self.sess, model_recover) recovered = True else: self.logger.debug("no model") if recovered is False: self._stop_agents() return pathname = NN_INFO_STORE_DIR + str( TimeMillis32()) + "_test_reward_and_entropy.txt" self.fp_re = open(pathname, "w") self._write_net_param() while not self.terminate: self._check_control_msg_pipe() self._process_test_mode() if self.done: self._stop_agents() break self.sess.close() self.fp_re.close() for i in range(len(self.control_msg_pipes)): self.control_msg_pipes[i][0].close() if self.terminate: self.logger.debug("terminate signal")