def start_total_test(): env.threadlocal.CASE_START_TIME = "" env.threadlocal.CASE_STOP_TIME = "" env.threadlocal.CASE_NAME = "" env.threadlocal.CASE_PASS = True env.threadlocal.CASE_WARNINGS = 0 env.threadlocal.MODULE_NAME = "" env.threadlocal.BROWSER = None env.threadlocal.TESTING_BROWSER = "" env.threadlocal.TESTING_BROWSERS = "" env.TOTAL_TESTCASE_PASS = 0 env.TOTAL_TESTCASE_FAIL = 0 env.HTMLREPORT_TESTCASES[:] = [] common.delete_file_or_folder( os.path.join(env.RESULT_PATH, "result", "testcase")) common.delete_file_or_folder( os.path.join(env.RESULT_PATH, "result", "screenshots")) env.TOTAL_START_TIME = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") print(">>>>>> [%s] => start testing...... <<<<<<" % (env.TOTAL_START_TIME, )) htmlreport.generate_html_report( [env.TOTAL_START_TIME, "N/A", "N/A", "N/A", "N/A", "N/A"], [])
def finish_total_test(): env.TOTAL_STOP_TIME = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") print((">>>>>> [%s] => [%s], duration [%s], case [%s], pass [%s], fail [%s] <<<<<<" % ( env.TOTAL_START_TIME, env.TOTAL_STOP_TIME, datetime.datetime.strptime(env.TOTAL_STOP_TIME, "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(env.TOTAL_START_TIME, "%Y-%m-%d %H:%M:%S"), env.TOTAL_TESTCASE_PASS + env.TOTAL_TESTCASE_FAIL, env.TOTAL_TESTCASE_PASS, env.TOTAL_TESTCASE_FAIL, ) )) print(( ">>>>>> [%s] => [%s]" % (env.TOTAL_START_TIME, common.get_version_info()) )) htmlreport.generate_html_report([env.TOTAL_START_TIME, env.TOTAL_STOP_TIME, datetime.datetime.strptime(env.TOTAL_STOP_TIME, "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(env.TOTAL_START_TIME, "%Y-%m-%d %H:%M:%S"), env.TOTAL_TESTCASE_PASS+env.TOTAL_TESTCASE_FAIL, env.TOTAL_TESTCASE_PASS, env.TOTAL_TESTCASE_FAIL], env.HTMLREPORT_TESTCASES, countdown=False) htmlreport.save_current_report_to_repository() htmlreport.generate_report_history() htmlreport.generate_html_report([env.TOTAL_START_TIME, env.TOTAL_STOP_TIME, datetime.datetime.strptime(env.TOTAL_STOP_TIME, "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(env.TOTAL_START_TIME, "%Y-%m-%d %H:%M:%S"), env.TOTAL_TESTCASE_PASS+env.TOTAL_TESTCASE_FAIL, env.TOTAL_TESTCASE_PASS, env.TOTAL_TESTCASE_FAIL], env.HTMLREPORT_TESTCASES, countdown=True) env.TOTAL_TESTCASE_PASS = 0 env.TOTAL_TESTCASE_FAIL = 0 env.HTMLREPORT_TESTCASES[:] = [] print ("\n")
def start_total_test(): env.threadlocal.CASE_START_TIME = "" env.threadlocal.CASE_STOP_TIME = "" env.threadlocal.CASE_NAME = "" env.threadlocal.CASE_PASS = True env.threadlocal.CASE_WARNINGS = 0 env.threadlocal.MODULE_NAME = "" env.threadlocal.BROWSER = None env.threadlocal.TESTING_BROWSER = "" env.threadlocal.TESTING_BROWSERS = "" env.TOTAL_TESTCASE_PASS = 0 env.TOTAL_TESTCASE_FAIL = 0 env.HTMLREPORT_TESTCASES[:] = [] common.delete_file_or_folder(os.path.join(env.RESULT_PATH, "result", "testcase")) common.delete_file_or_folder(os.path.join(env.RESULT_PATH, "result", "screenshots")) env.TOTAL_START_TIME = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") print (">>>>>> [%s] => start testing...... <<<<<<" % ( env.TOTAL_START_TIME, ) ) htmlreport.generate_html_report([env.TOTAL_START_TIME, "N/A", "N/A", "N/A", "N/A", "N/A"], [])
def finish_total_test(): env.TOTAL_STOP_TIME = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") print (">>>>>> [%s] => [%s], duration [%s], case [%s], pass [%s], fail [%s] <<<<<<" % ( env.TOTAL_START_TIME, env.TOTAL_STOP_TIME, datetime.datetime.strptime(env.TOTAL_STOP_TIME, "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(env.TOTAL_START_TIME, "%Y-%m-%d %H:%M:%S"), env.TOTAL_TESTCASE_PASS + env.TOTAL_TESTCASE_FAIL, env.TOTAL_TESTCASE_PASS, env.TOTAL_TESTCASE_FAIL, ) ) print ( ">>>>>> [%s] => [%s]" % (env.TOTAL_START_TIME, common.get_version_info()) ) htmlreport.generate_html_report([env.TOTAL_START_TIME, env.TOTAL_STOP_TIME, datetime.datetime.strptime(env.TOTAL_STOP_TIME, "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(env.TOTAL_START_TIME, "%Y-%m-%d %H:%M:%S"), env.TOTAL_TESTCASE_PASS+env.TOTAL_TESTCASE_FAIL, env.TOTAL_TESTCASE_PASS, env.TOTAL_TESTCASE_FAIL], env.HTMLREPORT_TESTCASES, countdown=False) htmlreport.save_current_report_to_repository() htmlreport.generate_report_history() htmlreport.generate_html_report([env.TOTAL_START_TIME, env.TOTAL_STOP_TIME, datetime.datetime.strptime(env.TOTAL_STOP_TIME, "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(env.TOTAL_START_TIME, "%Y-%m-%d %H:%M:%S"), env.TOTAL_TESTCASE_PASS+env.TOTAL_TESTCASE_FAIL, env.TOTAL_TESTCASE_PASS, env.TOTAL_TESTCASE_FAIL], env.HTMLREPORT_TESTCASES, countdown=True) env.TOTAL_TESTCASE_PASS = 0 env.TOTAL_TESTCASE_FAIL = 0 env.HTMLREPORT_TESTCASES[:] = [] print ("\n")
def stop_test(): try: env.THREAD_LOCK.acquire() env.threadlocal.CASE_STOP_TIME = datetime.datetime.now().replace(microsecond=0) env.TOTAL_STOP_TIME = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") if env.threadlocal.CASE_WARNINGS > 0: warning_message = ", has [%s] warning(s)!" % env.threadlocal.CASE_WARNINGS else: warning_message = "" if env.threadlocal.CASE_PASS == True: print(("%s [Pass] => [%s] [%s] [%s] [%s]%s" %(common.stamp_datetime(), env.threadlocal.CASE_STOP_TIME - env.threadlocal.CASE_START_TIME, env.threadlocal.MODULE_NAME, env.threadlocal.CASE_NAME, env.threadlocal.TESTING_BROWSER, warning_message ))) env.TOTAL_TESTCASE_PASS = env.TOTAL_TESTCASE_PASS + 1 env.HTMLREPORT_TESTCASES.append(["%s => %s" % (env.threadlocal.CASE_START_TIME.strftime("%m-%d %H:%M:%S"), env.threadlocal.CASE_STOP_TIME.strftime("%m-%d %H:%M:%S")), '<a href="testcase/%s.log">[%s] - %s</a>' % (env.threadlocal.CASE_NAME, env.threadlocal.MODULE_NAME, env.threadlocal.CASE_NAME), env.threadlocal.CASE_STOP_TIME - env.threadlocal.CASE_START_TIME, env.threadlocal.TESTING_BROWSER, '<td>Pass</td>' ]) else: print(("%s [Fail] => [%s] [%s] [%s] [%s]%s :( " %(common.stamp_datetime(), env.threadlocal.CASE_STOP_TIME - env.threadlocal.CASE_START_TIME, env.threadlocal.MODULE_NAME, env.threadlocal.CASE_NAME, env.threadlocal.TESTING_BROWSER, warning_message ))) env.TOTAL_TESTCASE_FAIL = env.TOTAL_TESTCASE_FAIL + 1 env.HTMLREPORT_TESTCASES.append(["%s => %s" % (env.threadlocal.CASE_START_TIME.strftime("%m-%d %H:%M:%S"),env.threadlocal.CASE_STOP_TIME.strftime("%m-%d %H:%M:%S")), '<a href="testcase/%s.log">[%s] - %s</a>' % (env.threadlocal.CASE_NAME, env.threadlocal.MODULE_NAME, env.threadlocal.CASE_NAME), env.threadlocal.CASE_STOP_TIME - env.threadlocal.CASE_START_TIME, env.threadlocal.TESTING_BROWSER, '<td class="tfail"><a href="screenshots/%s">Fail</a></td>' % env.HTMLREPORT_SCREENSHOT_NAME ]) htmlreport.generate_html_report([env.TOTAL_START_TIME, env.TOTAL_STOP_TIME, datetime.datetime.strptime(env.TOTAL_STOP_TIME, "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(env.TOTAL_START_TIME, "%Y-%m-%d %H:%M:%S"), env.TOTAL_TESTCASE_PASS+env.TOTAL_TESTCASE_FAIL, env.TOTAL_TESTCASE_PASS, env.TOTAL_TESTCASE_FAIL], env.HTMLREPORT_TESTCASES) env.threadlocal.CASE_PASS = True env.threadlocal.CASE_WARNINGS = 0 finally: env.THREAD_LOCK.release()
def stop_test(): try: env.THREAD_LOCK.acquire() env.threadlocal.CASE_STOP_TIME = datetime.datetime.now().replace(microsecond=0) env.TOTAL_STOP_TIME = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") if env.threadlocal.CASE_WARNINGS > 0: warning_message = ", has [%s] warning(s)!" % env.threadlocal.CASE_WARNINGS else: warning_message = "" if env.threadlocal.CASE_PASS == True: print (u"%s [Pass] => [%s] [%s] [%s] [%s]%s" %(common.stamp_datetime(), env.threadlocal.CASE_STOP_TIME - env.threadlocal.CASE_START_TIME, env.threadlocal.MODULE_NAME, env.threadlocal.CASE_NAME, env.threadlocal.TESTING_BROWSER, warning_message )) env.TOTAL_TESTCASE_PASS = env.TOTAL_TESTCASE_PASS + 1 env.HTMLREPORT_TESTCASES.append(["%s => %s" % (env.threadlocal.CASE_START_TIME.strftime("%m-%d %H:%M:%S"), env.threadlocal.CASE_STOP_TIME.strftime("%m-%d %H:%M:%S")), '<a href="testcase/%s.log">[%s] - %s</a>' % (env.threadlocal.CASE_NAME, env.threadlocal.MODULE_NAME, env.threadlocal.CASE_NAME), env.threadlocal.CASE_STOP_TIME - env.threadlocal.CASE_START_TIME, env.threadlocal.TESTING_BROWSER, '<td>Pass</td>' ]) else: print (u"%s [Fail] => [%s] [%s] [%s] [%s]%s :( " %(common.stamp_datetime(), env.threadlocal.CASE_STOP_TIME - env.threadlocal.CASE_START_TIME, env.threadlocal.MODULE_NAME, env.threadlocal.CASE_NAME, env.threadlocal.TESTING_BROWSER, warning_message )) env.TOTAL_TESTCASE_FAIL = env.TOTAL_TESTCASE_FAIL + 1 env.HTMLREPORT_TESTCASES.append(["%s => %s" % (env.threadlocal.CASE_START_TIME.strftime("%m-%d %H:%M:%S"),env.threadlocal.CASE_STOP_TIME.strftime("%m-%d %H:%M:%S")), '<a href="testcase/%s.log">[%s] - %s</a>' % (env.threadlocal.CASE_NAME, env.threadlocal.MODULE_NAME, env.threadlocal.CASE_NAME), env.threadlocal.CASE_STOP_TIME - env.threadlocal.CASE_START_TIME, env.threadlocal.TESTING_BROWSER, '<td class="tfail"><a href="screenshots/%s">Fail</a></td>' % env.HTMLREPORT_SCREENSHOT_NAME ]) htmlreport.generate_html_report([env.TOTAL_START_TIME, env.TOTAL_STOP_TIME, datetime.datetime.strptime(env.TOTAL_STOP_TIME, "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(env.TOTAL_START_TIME, "%Y-%m-%d %H:%M:%S"), env.TOTAL_TESTCASE_PASS+env.TOTAL_TESTCASE_FAIL, env.TOTAL_TESTCASE_PASS, env.TOTAL_TESTCASE_FAIL], env.HTMLREPORT_TESTCASES) env.threadlocal.CASE_PASS = True env.threadlocal.CASE_WARNINGS = 0 finally: env.THREAD_LOCK.release()