Exemplo n.º 1
0
def execute_iterative_parallel_testcases(system_list,
                                         testcase_list,
                                         suite_repository,
                                         data_repository,
                                         from_project,
                                         tc_parallel=True,
                                         auto_defects=False):
    """Takes a list of systems as input and executes the testcases in parallel by
    creating separate process of testcase_driver for each of these systems """

    jobs_list = []
    output_q = None

    for system in system_list:
        target_module = sequential_testcase_driver.main

        #args_list = [testcase_list, suite_repository, data_repository, from_project,
        #             auto_defects, system, tc_parallel]
        tc_args_dict = OrderedDict([("testcase_list", testcase_list),
                                    ("suite_repository", suite_repository),
                                    ("data_repository", data_repository),
                                    ("from_project", from_project),
                                    ("auto_defects", auto_defects),
                                    ("system", system),
                                    ("tc_parallel", tc_parallel),
                                    ("output_q", output_q)])

        process, jobs_list, output_q = create_and_start_process_with_queue(
            target_module, tc_args_dict, jobs_list, output_q)

    print_debug("process: {0}".format(process))
    for job in jobs_list:
        job.join()

    result_list = get_results_from_queue(output_q)

    tc_status_list = []
    tc_name_list = []
    tc_impact_list = []
    tc_duration_list = []
    # Get the junit object of each testcase, extract the information from it and combine with testsuite junit object
    tc_junit_list = []

    # Suite results
    for result in result_list:
        # Case results
        for val in range(len(result[0])):
            tc_status_list.append(result[0][val])
            tc_name_list.append(result[1])
            tc_impact_list.append(result[2][val])
            tc_duration_list.append(result[3][val])
            tc_junit_list.append(result[4][val])
    # parallel testcases generate multiple testcase junit result files
    # each files log the result for one testcase and not intergrated
    # update testsuite junit result file with individual testcase result files
    update_ts_junit_resultfile(suite_repository['wt_junit_object'],
                               tc_junit_list)
    testsuite_status = Utils.testcase_Utils.compute_status_using_impact(
        tc_status_list, tc_impact_list)
    return testsuite_status
Exemplo n.º 2
0
def execute_iterative_parallel(step_list, data_repository, tc_status,
                               system_list):
    """Takes a list of steps as input and executes them in parallel by
    creating separate process of step_driver for each of these steps """

    jobs_list = []
    output_q = None
    for system_name in system_list:
        target_module = testcase_steps_execution.main
        #args_list = [step_list, data_repository, system_name, True]
        args_dict = OrderedDict([
            ("step_list", step_list),
            ("data_repository", data_repository),
            ("system_name", system_name),
            ("kw_parallel", True),
            ("output_q", output_q),
        ])

        process, jobs_list, output_q = create_and_start_process_with_queue(
            target_module, args_dict, jobs_list, output_q)

    print_debug("process: {0}".format(process))
    for job in jobs_list:
        job.join()

    result_list = get_results_from_queue(output_q)

    system_status_list = []
    system_resultfile_list = []
    step_impact_list = []
    tc_junit_list = []

    for result in result_list:
        step_status_list = result[0]
        kw_resultfile_list = result[1]
        system_name = result[2]
        step_impact_list = result[3]
        tc_junit_list.append(result[4])
        system_status = testcase_Utils.compute_status_using_impact(
            step_status_list, step_impact_list)
        system_resultfile = testcase_Utils.compute_system_resultfile(
            kw_resultfile_list, data_repository['wt_resultsdir'], system_name)
        system_status_list.append(system_status)
        system_resultfile_list.append(system_resultfile)

    tc_status = Utils.testcase_Utils.compute_status_without_impact(
        system_status_list)
    # parallel keywords generate multiple keyword junit result files
    # each files log the result for one keyword and not intergrated
    # update testcase junit result file with individual keyword result files
    data_repository['wt_junit_object'] = update_tc_junit_resultfile(
        data_repository['wt_junit_object'], tc_junit_list,
        data_repository['wt_tc_timestamp'])
    print_debug("Updating Testcase result file...")
    Utils.testcase_Utils.append_result_files(data_repository['wt_resultfile'],
                                             system_resultfile_list)

    return tc_status
Exemplo n.º 3
0
def execute_parallel_testsuites(testsuite_list,
                                project_repository,
                                data_repository,
                                auto_defects=False,
                                ts_parallel=True):
    """Takes a list of suites as input and executes them in parallel by
    creating separate process of testsuite_driver for each of these suite """

    jobs_list = []
    output_q = None
    impact_dict = {"IMPACT": "Impact", "NOIMPACT": "No Impact"}
    project_error_action = project_repository['def_on_error_action']
    project_filepath = project_repository['project_filepath']
    project_dir = os.path.dirname(project_filepath)
    wp_results_execdir = project_repository['wp_results_execdir']
    wp_logs_execdir = project_repository['wp_logs_execdir']
    jiraproj = data_repository["jiraproj"]

    for testsuite in testsuite_list:
        target_module = testsuite_driver.main
        testsuite_rel_path = testsuite_utils.get_path_from_xmlfile(testsuite)
        if testsuite_rel_path is not None:
            testsuite_path = Utils.file_Utils.getAbsPath(
                testsuite_rel_path, project_dir)
        else:
            testsuite_path = str(testsuite_rel_path)
        testsuite_impact = Utils.testcase_Utils.get_impact_from_xmlfile(
            testsuite)
        data_repository['wt_ts_impact'] = testsuite_impact
        ts_onError_action = Utils.xml_Utils.get_attributevalue_from_directchildnode(
            testsuite, 'onError', 'action')
        ts_onError_action = ts_onError_action if ts_onError_action else project_error_action

        tc_args_dict = OrderedDict([("testsuite_filepath", testsuite_path),
                                    ("data_repository", data_repository),
                                    ("from_project", True),
                                    ("auto_defects", auto_defects),
                                    ("jiraproj", jiraproj),
                                    ("res_startdir", wp_results_execdir),
                                    ("logs_startdir", wp_logs_execdir),
                                    ("ts_onError_action", ts_onError_action),
                                    ("output_q", output_q),
                                    ("ts_parallel", ts_parallel)])

        process, jobs_list, output_q = create_and_start_process_with_queue(
            target_module, tc_args_dict, jobs_list, output_q)

        print_debug("process: {0}".format(process))

    for job in jobs_list:
        job.join()

    result_list = get_results_from_queue(output_q)

    ts_status_list = []
    ts_impact_list = []
    ts_timestamp_list = []
    # Get the junit object of each suite, extract the information from it
    # and combine with project junit object
    ts_junit_list = []

    for result in result_list:
        ts_status_list.append(result[0])
        ts_impact_list.append(result[1])
        ts_timestamp_list.append(result[2])
        ts_junit_list.append(result[3])

    for i in range(len(ts_junit_list)):
        ts_junit_list[i].update_attr(
            "impact", impact_dict.get(ts_impact_list[i].upper()), "ts",
            ts_timestamp_list[i])
        # onerror is not applicable for parallel execution
        ts_junit_list[i].update_attr("onerror", "N/A", "ts",
                                     ts_timestamp_list[i])

    # parallel suites generate multiple suite junit result files
    # each files log the result for one suite and not integrated
    # update project junit result file with individual suite result files
    data_repository['wt_junit_object'] = update_pj_junit_resultfile(
        data_repository['wt_junit_object'], ts_junit_list)

    project_status = Utils.testcase_Utils.compute_status_using_impact(
        ts_status_list, ts_impact_list)
    return project_status
Exemplo n.º 4
0
def execute_parallel_testcases(testcase_list,
                               suite_repository,
                               data_repository,
                               from_project,
                               tc_parallel=True,
                               auto_defects=False,
                               iter_ts_sys=None):
    """Takes a list of testcase as input and executes them in parallel by
    creating separate process of testcase_driver for each of these testcase """

    jobs_list = []
    output_q = None
    suite = suite_repository['suite_name']
    testsuite_filepath = suite_repository['testsuite_filepath']
    suite_error_action = suite_repository['def_on_error_action']
    jiraproj = data_repository["jiraproj"]
    testsuite_dir = os.path.dirname(testsuite_filepath)

    for testcase in testcase_list:
        target_module = testcase_driver.main
        tc_rel_path = testsuite_utils.get_path_from_xmlfile(testcase)
        if tc_rel_path is not None:
            tc_path = Utils.file_Utils.getAbsPath(tc_rel_path, testsuite_dir)
        else:
            tc_path = str(tc_rel_path)
        tc_runtype = testsuite_utils.get_runtype_from_xmlfile(testcase)
        tc_impact = Utils.testcase_Utils.get_impact_from_xmlfile(testcase)
        tc_context = Utils.testcase_Utils.get_context_from_xmlfile(testcase)
        suite_step_data_file = testsuite_utils.get_data_file_at_suite_step(
            testcase, suite_repository)
        tc_onError_action = Utils.xml_Utils.get_attributevalue_from_directchildnode(
            testcase, 'onError', 'action')
        tc_onError_action = tc_onError_action if tc_onError_action else suite_error_action
        if suite_step_data_file is not None:
            data_file = Utils.file_Utils.getAbsPath(suite_step_data_file,
                                                    testsuite_dir)
            data_repository[tc_path] = data_file

        data_repository['wt_tc_impact'] = tc_impact

        # instead of using args_list, we need to use an ordered dict
        # for tc args because intially q will be none and
        # we need to cange it after creating a new q
        # then we need to maintain the position of arguments
        # before calling the testcase driver main function.

        tc_args_dict = OrderedDict([("tc_path", tc_path),
                                    ("data_repository", data_repository),
                                    ("tc_context", tc_context),
                                    ("tc_runtype", tc_runtype),
                                    ("tc_parallel", tc_parallel),
                                    ("auto_defects", auto_defects),
                                    ("suite", suite),
                                    ("tc_onError_action", tc_onError_action),
                                    ("iter_ts_sys", iter_ts_sys),
                                    ("output_q", output_q),
                                    ("jiraproj", jiraproj)])

        process, jobs_list, output_q = create_and_start_process_with_queue(
            target_module, tc_args_dict, jobs_list, output_q)

    print_debug("process: {0}".format(process))
    for job in jobs_list:
        job.join()

    result_list = get_results_from_queue(output_q)

    tc_status_list = []
    tc_name_list = []
    tc_impact_list = []
    tc_duration_list = []
    # Get the junit object of each testcase, extract the information from it and combine with testsuite junit object
    tc_junit_list = []

    for result in result_list:
        tc_status_list.append(result[0])
        tc_name_list.append(result[1])
        tc_impact_list.append(result[2])
        tc_duration_list.append(result[3])
        tc_junit_list.append(result[4])

    # parallel testcases generate multiple testcase junit result files
    # each files log the result for one testcase and not intergrated
    # update testsuite junit result file with individual testcase result files
    data_repository['wt_junit_object'] = update_ts_junit_resultfile(
        data_repository['wt_junit_object'], tc_junit_list)
    testsuite_status = Utils.testcase_Utils.compute_status_using_impact(
        tc_status_list, tc_impact_list)
    return testsuite_status