def train_model(self): self.__begin() log_debug(self.final_model_path, token_id=self.training_token, prefix=self.common_prefix) log_debug(os.path.exists(self.final_model_path), token_id=self.training_token, prefix=self.common_prefix) should_train = not os.path.exists(self.final_model_path) has_error = self.__train() if should_train else self.__retrain( self.final_model_path) # Since python supports short-circuit operator for boolean conjunction if not has_error and self.__attach_to_networks(): message = 'Dataset: {0} training successful'.format( self.dataset_name) else: message = 'Dataset: {0} training Failed'.format(self.dataset_name) log_error(message, token_id=self.training_token, prefix=self.common_prefix) self.__tear_down() return has_error, message
def __build_url(self, path): if check_url_valid(path): return path elif self.base_url: return self.base_url + path else: logger.log_error(sysvalidate(self,Err_Http_04)) raise ParameterError(sysvalidate(self,Err_Http_04))
def get_tracker(algorithm, bounding_box, frame): ''' Fetches a tracker object based on the algorithm specified. ''' if algorithm == 'csrt': return csrt_create(bounding_box, frame) if algorithm == 'kcf': return kcf_create(bounding_box, frame) log_error('Invalid tracking algorithm specified (options: csrt, kcf)', {'cat': 'TRACKER_CREATE'})
def run(self, test_obj): if isinstance(test_obj, TestCase): result = self.run_testcase(test_obj) elif isinstance(test_obj, TestSuite): result = self.run_testsuite(test_obj) elif isinstance(test_obj, TestPlan): result = self.run_testplan(test_obj) else: logger.log_error((type(self).__qualname__, ValidationError('{}无法运行,只能运行测试用例,测试套件和测试计划!'.format(test_obj)))) raise ValidationError('只能运行测试用例,测试套件和测试计划!') return result
def handle_error(ex): token_id = str(uuid.uuid4()) err = { 'ErrorTrace': traceback.format_exc(), 'RequestData': request.data, 'RequestHeader': request.headers, 'RequestRoute': request.full_path } log_error(err, token_id) resp_message = 'An unexpected error occurred. \nYou might contact the dev team and supply this token: {} to get more info.' return resp_message.format(token_id)
def execute_metacases(self): """ check and filter run testcases then execute :return: """ for filename, metacases in self._metacases.items(): logger.log_info("begin execute metacases : " + filename) for metacase in metacases: result = {} dirs = str(filename).split(os.sep) result["Application"] = dirs[-1] result["Environment"] = "" result["Id"] = metacase.caseName result["Test-Case"] = metacase.caseName result["APIType"] = metacase.apiType result["Error"] = [] try: #todo deal with type cases [skipped,] metacase.execute_verify() result["Result"] = "Passed" # except AssertionError: except Exception: # _, _, tb = sys.exc_info() # traceback.print_tb(tb) # Fixed format formatted_lines = traceback.format_exc().splitlines() # print(str(formatted_lines)) # exc_type, exc_value, exc_traceback = sys.exc_info() # print("format execption.") # print(repr(traceback.format_exception(exc_type, exc_value, # exc_traceback))) # tb_info = traceback.extract_tb(tb) # filename, line, func, text = tb_info[-1] msg = 'Assert fail : {0}'.format(str(formatted_lines)) logger.log_error("execute fail: " + msg) result["Result"] = "Failed" # msg = log_assertFail() # logger.log_error("asert fail : "+ msg) errorInfos = [] infos = {} infos["Description"] = msg infos["Assert"] = "xxx" infos["Screenshot"] = { "Name": "Screenshot-1.png", "Url": "#" } errorInfos.append(infos) result["Error"] = errorInfos finally: self.exec_result.append(result)
def request(self, method: str, url: str, **kwargs): """ :rtype: object """ if not kwargs.get('headers', None): kwargs.setdefault('headers', header) self.meta_data = {} kwargs.setdefault('timeout', 10) if method.lower() not in allow_http_method: logger.log_error(sysvalidate(self,Err_Http_02)) raise HttpMethodError(sysvalidate(self,Err_Http_02)) url = self.__build_url(url) self.meta_data['url'] = url try: response = requests.Session.request(self, method=method, url=url, **kwargs) self.meta_data['total_seconds'] = response.elapsed.total_seconds() self.meta_data['status_code'] = response.status_code self.meta_data['content-length'] = response.headers.get('Content-Length', 0) logger.log_info('请求接口是:{},响应码为:{},响应时间:{},返回大小:{}'.format( self.meta_data['url'], self.meta_data['status_code'], self.meta_data['total_seconds'], self.meta_data['content-length'] )) except Timeout: logger.log_error(sysvalidate(self,Err_Http_03)) raise except (MissingSchema, InvalidSchema, InvalidURL) as e: logger.log_error(repr(e)) raise except RequestException as e: logger.log_error(repr(e)) raise except Exception as e: logger.log_error(repr(e)) raise # try: # response.raise_for_status() # except RequestException as e: # response.errors = repr(e) # logger.log_warning("{exception}".format(exception=repr(e))) # raise # else: # logger.log_info('请求接口是:{},响应码为:{},响应时间:{},返回大小:{}'.format( # self.meta_data['url'], # self.meta_data['status_code'], # self.meta_data['total_seconds'], # self.meta_data['content-length'] # )) return response
def __attach_to_networks(self): try: new_model_path = self.train_stats['finalModel'] log_debug('Before Update', token_id=self.training_token, prefix=self.common_prefix) log_debug('networks id : {0}'.format(id(self.network_dict)), token_id=self.training_token, prefix=self.common_prefix) replace_network = self.dataset_name in self.network_dict if replace_network: del self.network_dict[self.dataset_name] gc.collect() self.network_dict.update({self.dataset_name: self.trained_model}) new_model_file = os.path.basename(new_model_path) dest_folder = os.path.dirname(self.final_model_path) shutil.copy2(new_model_path, dest_folder) log_debug(self.network_dict, token_id=self.training_token, prefix=self.common_prefix) if replace_network: os.rename(self.final_model_path, self.final_model_path[:-3] + '.bak') os.rename(dest_folder + '/' + new_model_file, self.final_model_path) os.remove(self.final_model_path[:-3] + '.bak') else: os.rename(dest_folder + '/' + new_model_file, self.final_model_path) return True except Exception: log_error('Network replacement failed.', token_id=self.training_token, prefix=self.common_prefix) log_error(traceback.format_exc(), token_id=self.training_token, prefix=self.common_prefix) return False
def request(url, headers, postData=None, method="GET"): """ :param url: :param headers: :param cookies: :param postData: :return: """ if headers is None: headers = {} try: if method == 'GET': if postData is not None: url = getUrlWithParamters(url, postData) logger.log_info("Get request URL is : " + url) request = urllib.request.Request(url, None, headers) elif method == 'POST': # params = urllib.parse.urlencode(postData) # params = params.encode('utf-8') data = json.loads(postData) formdata = urllib.parse.urlencode(data).encode("utf-8") request = urllib.request.Request(url, data=formdata, headers=headers) else: raise urllib.error.URLError("method not defined.") resp = urllib.request.urlopen(request) if resp.info().get('Content-Encoding') == 'gzip': #buf = BytesIO(resp.read()) f = gzip.decompress(mode='rb', fileobj=StringIO(resp.read())) r = str(f, 'utf-8') else: #decode with utf-8 solve response content contain chinese. r = str(resp.read(), 'utf-8') if r.startswith("jsonpCallback"): r = jsonp_2_json(r) return r, 200 except Exception as e: logger.log_error("http tools request fail: " + str(e)) # if the http status code 404 401 .. 500 , 501.. raise urllib.error.HTTPError return None, 450
def verify_response(self, verification_dict): """ 测试用例断言: demo:a = {'assertType': 'assertIsNone', 'assert_list': ['$.data.token'], 'msg': 'token取值错误'} data = {'data': {'token': 'None'}} res = ResponseParse(data).verify_response(a) :return (bool,msg) """ assert_list = verification_dict['assert_list'] for index, data in enumerate(assert_list): if '$' in str(data): assert_list[index] = jsonpath.jsonpath(self.response, data)[0] try: result = Asserts(verification_dict).result return result except TypeError: logger.log_error('断言方法{}不存在'.format( verification_dict['assertType'])) return False
def call_api(self): #only support http or https if self.apiType in supportProtocols: url = self.apiType + "://" + self.uri logger.log_info("method : " + url) logger.log_info("method : " + self.method) #request(url, headers, postData=None, method = "GET"): postdata = "" if self.method == 'POST': postdata = self.body else: postdata = self.params logger.log_info("request data : " + str(postdata)) self.response, self.response_code = HttpTools.request(url, postData=postdata, headers=self.headers,method=self.method) logger.log_info("API response data: {0}".format(self.response)) else: logger.log_error("no support API type.")
action='store_true', help='run VCS without UI display') parser.add_argument('--clposition', help='position of counting line (options: top, bottom, \ left, right | default: bottom)') args = parser.parse_args() # capture traffic scene video video = int(args.video) if args.iscam else args.video cap = cv2.VideoCapture(r'path of local video') #cap = cv2.VideoCapture (r'rtsp://strmr5.sha.*****.gov:****/rtplive/****************') #'path of the real-time video signal' if not cap.isOpened(): log_error('Error capturing video. Invalid source.', {'event': 'VIDEO_CAPTURE'}) ret, frame = cap.read() f_height, f_width, _ = frame.shape di = 10 if args.di == None else args.di mcdf = 2 if args.mcdf == None else args.mcdf mctf = 3 if args.mctf == None else args.mctf detector = 'yolo' if args.detector == None else args.detector tracker = 'csrt' if args.tracker == None else args.tracker # create detection region of interest polygon if args.droi == None: droi = [(0, 0), (f_width, 0), (f_width, f_height), (0, f_height)] #droi =[(0, 45), (329, 45), (331, 180), (1, 180)]#47
def __retrain(self, model_path): has_error = False try: log_info("============== Re-training {0} ==============".format( self.training_token), token_id=self.training_token, prefix=self.common_prefix) model = BiLSTM(params=self.params, fn_log_info=log_info, fn_log_debug=log_debug, training_token=self.training_token, training_prefix=self.common_prefix) log_debug("Loading model %s" % model_path, token_id=self.training_token, prefix=self.common_prefix) model.loadModel(model_path, "") log_debug("Loaded model %s" % model_path, token_id=self.training_token, prefix=self.common_prefix) # os.remove(model_path) except Exception: log_error('Re-training: model loading failed.', token_id=self.training_token, prefix=self.common_prefix) log_error(traceback.format_exc(), token_id=self.training_token, prefix=self.common_prefix) has_error = True else: try: log_debug(model.mappings.keys()) with DataGenerator(dataset_name=self.dataset_name, mappings=model.mappings, cols=self.data_columns) as generator: log_debug("Train Sentences: %d" % len(generator.data['trainMatrix'])) log_debug("Dev Sentences: %d" % len(generator.data['devMatrix'])) log_debug("Test Sentences: %d" % len(generator.data['testMatrix'])) model.setTrainDataset(generator.data, self.label_key) model.modelSavePath = self.transient_model_path self.train_stats = model.evaluate() log_debug("%s" % self.train_stats, token_id=self.training_token, prefix=self.common_prefix) self.trained_model = model except Exception: log_error('Re-training: failed.', token_id=self.training_token, prefix=self.common_prefix) log_error(traceback.format_exc(), token_id=self.training_token, prefix=self.common_prefix) has_error = True finally: gc.collect() finally: return has_error
def __train(self): has_error = False try: log_debug("Dataset: %s" % self.dataset_name, token_id=self.training_token, prefix=self.common_prefix) log_debug("Label key: %s" % self.label_key, token_id=self.training_token, prefix=self.common_prefix) log_info("============== Training: {0} ==============".format( self.training_token), token_id=self.training_token, prefix=self.common_prefix) ###################################################### # # The training of the network starts here # ###################################################### model = BiLSTM(params=self.params, fn_log_info=log_info, fn_log_debug=log_debug, training_token=self.training_token, training_prefix=self.common_prefix) with EmbeddingsAndDataGenerator( embeddings_path=self.embeddings_path, dataset_file=self.dataset_files, reuse_embedding=self.reuse_embeddings) as generator: log_debug(generator.data['mappings'].keys(), token_id=self.training_token, prefix=self.common_prefix) log_info("Train Sentences: %d" % len(generator.data['trainMatrix']), token_id=self.training_token, prefix=self.common_prefix) log_info("Dev Sentences: %d" % len(generator.data['devMatrix']), token_id=self.training_token, prefix=self.common_prefix) log_info("Test Sentences: %d" % len(generator.data['testMatrix']), token_id=self.training_token, prefix=self.common_prefix) model.setMappings(generator.embeddings, generator.data['mappings']) model.setTrainDataset(generator.data, self.label_key) model.verboseBuild = True model.modelSavePath = self.transient_model_path self.train_stats = model.evaluate() log_debug("%s" % self.train_stats, token_id=self.training_token, prefix=self.common_prefix) # del word2Idx self.trained_model = model except Exception: log_error('Training: failed.', token_id=self.training_token, prefix=self.common_prefix) log_error(traceback.format_exc(), token_id=self.training_token, prefix=self.common_prefix) has_error = True finally: gc.collect() return has_error
def train(self, train_request, training_config): self.training_token = str(uuid.uuid4()) self.prefix = training_config['common_prefix'] if not self._lock.acquire(False): return False, "Busy!!! Already a Training Ongoing" else: try: dataset_name = self.__convert_and_store_train_docs_as_IOB( train_request, training_config['train_document_source']) training_config.update({ 'token': self.training_token, 'dataset_name': dataset_name }) self.train_file_options = { # 'train_set_size': self.train_set_size, 'train_document_source': training_config['train_document_source'].format( dataset_name), 'train_document_path': training_config['train_document_path'].format( dataset_name), 'train_history_file': training_config['train_history_file'].format(dataset_name), 'incremental_training': training_config['is_incremental'], 'new_dataset': not os.path.exists("models/{0}.{1}".format( dataset_name, 'h5')), 're_run_old_data': False, 're_run_offset': 0, 'training_document_threshold': training_config['training_document_threshold'] } is_trainable, result_msg = self.__can_train() if is_trainable: tr = InceptionModelTrainer(training_config) # log_debug('Trainer id={0}'.format(id(tr))) # log_debug('tr members {0}'.format(dir(tr))) # tr.begin() # log_debug('after tr.begin() members {0}', dir(tr)) has_error, result_msg = tr.train_model() # log_debug('after tr.train_model() members {0}', dir(tr)) # tr.tear_down() # log_debug('after tr.tear_down() members', dir(tr)) # log_debug('network dictionary id={0}, data={1}'.format(id(training_config['network_dict']),training_config['network_dict'])) # tr.train_model() # tr.__exit__() # Todo: Try to use it using with block; but was not successful so far # with InceptionModelTrainer(training_config) as trainer: # print('trainer',dir(trainer)) # has_error, result_msg = trainer.train_model() if has_error: log_error(msg=result_msg, token_id=self.training_token, prefix=self.prefix) else: log_info(msg=result_msg, token_id=self.training_token, prefix=self.prefix) else: return is_trainable, result_msg except Exception: log_error(msg="!!!!! Ex: {0}".format(traceback.format_exc()), token_id=self.training_token, prefix=self.prefix) finally: self._lock.release() return has_error, result_msg
def __convert_and_store_train_docs_as_IOB(self, train_request, train_doc_source_path): train_request = parse_train_request(train_request) type_system = load_typesystem(train_request.typeSystem) dataset_name = '{0}_{1}_{2}_{3}'.format(train_request.projectId, train_request.userId, train_request.layer, train_request.feature) doc_dir = train_doc_source_path.format(dataset_name) # Todo: need to use train_doc_target_path param later # Todo: at some later point some of the files should be added to dev set; and use an empty test set; # Todo: For now we will use const dev and train set; may be check if we can use empty test set or not if not os.path.exists(doc_dir): os.makedirs(doc_dir) for doc in train_request.documents: try: text, file_name = doc.documentText, doc.documentId file_path = '{0}/{1}'.format(doc_dir, file_name) if os.path.exists(file_path): # print('file: {0} already exists'.format(file_path)) log_info(msg='file: {0} already exists'.format(file_path), token_id=self.training_token, prefix=self.prefix) continue assert len(text) > 0 cas = load_cas_from_xmi( text.encode('utf-8').decode('unicode-escape'), typesystem=type_system) output = extract_IOB_from_CAS(cas, train_request.feature, train_request.layer) with open(file_path, 'w') as op: for (word, token) in output: op.writelines('{0} {1}\n'.format(word, token)) op.flush() except Exception: log_error(msg="=" * 120, token_id=self.training_token, prefix=self.prefix) log_error( msg= "Exception in function convert_and_store_train_docs_as_IOB() for CAS file: {0}" .format(file_name), token_id=self.training_token, prefix=self.prefix) log_error(msg="=" * 120, token_id=self.training_token, prefix=self.prefix) log_error(msg=text, token_id=self.training_token, prefix=self.prefix) log_error(msg="=" * 120, token_id=self.training_token, prefix=self.prefix) log_error(msg=output, token_id=self.training_token, prefix=self.prefix) log_error(msg="=" * 120, token_id=self.training_token, prefix=self.prefix) log_error(msg=traceback.format_exc(), token_id=self.training_token, prefix=self.prefix) log_error(msg="=" * 120, token_id=self.training_token, prefix=self.prefix) return dataset_name
def run(): ''' Loads environment variables, initializes counter class and runs counting loop. ''' # capture traffic scene video is_cam = ast.literal_eval(os.getenv('IS_CAM')) video = int(os.getenv('VIDEO')) if is_cam else os.getenv('VIDEO') cap = cv2.VideoCapture(video) if not cap.isOpened(): log_error('Error capturing video. Invalid source.', { 'cat': 'VIDEO_CAPTURE', 'source': video }) ret, frame = cap.read() f_height, f_width, _ = frame.shape detection_interval = int(os.getenv('DI')) mcdf = int(os.getenv('MCDF')) mctf = int(os.getenv('MCTF')) detector = os.getenv('DETECTOR') tracker = os.getenv('TRACKER') # create detection region of interest polygon use_droi = ast.literal_eval(os.getenv('USE_DROI')) droi = ast.literal_eval(os.getenv('DROI')) \ if use_droi \ else [(0, 0), (f_width, 0), (f_width, f_height), (0, f_height)] show_droi = ast.literal_eval(os.getenv('SHOW_DROI')) counting_line_position = os.getenv('COUNTING_LINE_POSITION') vehicle_counter = VehicleCounter(frame, detector, tracker, droi, show_droi, mcdf, mctf, detection_interval, counting_line_position) record = ast.literal_eval(os.getenv('RECORD')) headless = ast.literal_eval(os.getenv('HEADLESS')) if record: # initialize video object to record counting output_video = cv2.VideoWriter(os.getenv('OUTPUT_VIDEO_PATH'), \ cv2.VideoWriter_fourcc(*'MJPG'), \ 30, \ (f_width, f_height)) log_info( 'Processing started...', { 'cat': 'COUNT_PROCESS', 'counter_config': { 'di': detection_interval, 'mcdf': mcdf, 'mctf': mctf, 'detector': detector, 'tracker': tracker, 'use_droi': use_droi, 'droi': droi, 'show_droi': show_droi, 'clp': counting_line_position } }) # main loop while is_cam or cap.get(cv2.CAP_PROP_POS_FRAMES) + 1 < cap.get( cv2.CAP_PROP_FRAME_COUNT): if ret: vehicle_counter.count(frame) output_frame = vehicle_counter.visualize() if record: output_video.write(output_frame) if not headless: debug_window_size = ast.literal_eval( os.getenv('DEBUG_WINDOW_SIZE')) resized_frame = cv2.resize(output_frame, debug_window_size) cv2.imshow('Debug', resized_frame) k = cv2.waitKey(1) & 0xFF if k == ord('s'): # save frame if 's' key is pressed take_screenshot(output_frame) if k == ord('q'): # end video loop if 'q' key is pressed log_info('Processing stopped.', {'cat': 'COUNT_PROCESS'}) break ret, frame = cap.read() # end capture, close window, close log file and video object if any cap.release() if not headless: cv2.destroyAllWindows() if record: output_video.release() log_info('Processing ended.', {'cat': 'COUNT_PROCESS'})
def __init__(self,jsondata): try: self.data = json_loads_byteified(jsondata) except JSONDecodeError as e: logger.log_error("JsonVerifyUtils init json data fail." + str(e))