def prettify_json_file(file_list):
    """ prettify JSON testcase format
    """
    for json_file in set(file_list):
        if not json_file.endswith(".json"):
            logger.log_warning(
                "Only JSON file format can be prettified, skip: {}".format(
                    json_file))
            continue

        logger.color_print("Start to prettify JSON file: {}".format(json_file),
                           "GREEN")

        dir_path = os.path.dirname(json_file)
        file_name, file_suffix = os.path.splitext(os.path.basename(json_file))
        outfile = os.path.join(dir_path, "{}.pretty.json".format(file_name))

        with io.open(json_file, 'r', encoding='utf-8') as stream:
            try:
                obj = json.load(stream)
            except ValueError as e:
                raise SystemExit(e)

        with io.open(outfile, 'w', encoding='utf-8') as out:
            json.dump(obj, out, indent=4, separators=(',', ': '))
            out.write('\n')

        print("success: {}".format(outfile))
def create_scaffold(project_name):
    """ create scaffold with specified project name.
    """
    if os.path.isdir(project_name):
        logger.log_warning(
            u"Folder {} exists, please specify a new folder name.".format(
                project_name))
        return

    logger.color_print("Start to create new project: {}".format(project_name),
                       "GREEN")
    logger.color_print("CWD: {}\n".format(os.getcwd()), "BLUE")

    def create_path(path, ptype):
        if ptype == "folder":
            os.makedirs(path)
        elif ptype == "file":
            open(path, 'w').close()

        msg = "created {}: {}".format(ptype, path)
        logger.color_print(msg, "BLUE")

    path_list = [(project_name, "folder"),
                 (os.path.join(project_name, "api"), "folder"),
                 (os.path.join(project_name, "testcases"), "folder"),
                 (os.path.join(project_name, "testsuites"), "folder"),
                 (os.path.join(project_name, "reports"), "folder"),
                 (os.path.join(project_name, "debugtalk.py"), "file"),
                 (os.path.join(project_name, ".env"), "file")]
    [create_path(p[0], p[1]) for p in path_list]
def load_api_folder(api_folder_path):
    """ load api definitions from api folder.

    Args:
        api_folder_path (str): api files folder.

            api file should be in the following format:
            [
                {
                    "api": {
                        "def": "api_login",
                        "request": {},
                        "validate": []
                    }
                },
                {
                    "api": {
                        "def": "api_logout",
                        "request": {},
                        "validate": []
                    }
                }
            ]

    Returns:
        dict: api definition mapping.

            {
                "api_login": {
                    "function_meta": {"func_name": "api_login", "args": [], "kwargs": {}}
                    "request": {}
                },
                "api_logout": {
                    "function_meta": {"func_name": "api_logout", "args": [], "kwargs": {}}
                    "request": {}
                }
            }

    """
    api_definition_mapping = {}

    api_items_mapping = load_folder_content(api_folder_path)

    for api_file_path, api_items in api_items_mapping.items():
        # TODO: add JSON schema validation
        for api_item in api_items:
            key, api_dict = api_item.popitem()

            api_def = api_dict.pop("def")
            function_meta = parser.parse_function(api_def)
            func_name = function_meta["func_name"]

            if func_name in api_definition_mapping:
                logger.log_warning(
                    "API definition duplicated: {}".format(func_name))

            api_dict["function_meta"] = function_meta
            api_definition_mapping[func_name] = api_dict

    return api_definition_mapping
def _load_testcase(raw_testcase, project_mapping):
    """ load testcase/testsuite with api/testcase references

    Args:
        raw_testcase (list): raw testcase content loaded from JSON/YAML file:
            [
                # config part
                {
                    "config": {
                        "name": "",
                        "def": "suite_order()",
                        "request": {}
                    }
                },
                # teststeps part
                {
                    "test": {...}
                },
                {
                    "test": {...}
                }
            ]
        project_mapping (dict): project_mapping

    Returns:
        dict: loaded testcase content
            {
                "config": {},
                "teststeps": [teststep11, teststep12]
            }

    """
    loaded_testcase = {"config": {}, "teststeps": []}

    for item in raw_testcase:
        # TODO: add json schema validation
        if not isinstance(item, dict) or len(item) != 1:
            raise exceptions.FileFormatError(
                "Testcase format error: {}".format(item))

        key, test_block = item.popitem()
        if not isinstance(test_block, dict):
            raise exceptions.FileFormatError(
                "Testcase format error: {}".format(item))

        if key == "config":
            loaded_testcase["config"].update(test_block)

        elif key == "test":
            loaded_testcase["teststeps"].extend(
                _load_teststeps(test_block, project_mapping))

        else:
            logger.log_warning(
                "unexpected block key: {}. block key should only be 'config' or 'test'."
                .format(key))

    return loaded_testcase
def _merge_extractor(def_extrators, ref_extractors):
    """ merge def_extrators with ref_extractors

    Args:
        def_extrators (list): [{"var1": "val1"}, {"var2": "val2"}]
        ref_extractors (list): [{"var1": "val111"}, {"var3": "val3"}]

    Returns:
        list: merged extractors

    Examples:
        >>> def_extrators = [{"var1": "val1"}, {"var2": "val2"}]
        >>> ref_extractors = [{"var1": "val111"}, {"var3": "val3"}]
        >>> _merge_extractor(def_extrators, ref_extractors)
            [
                {"var1": "val111"},
                {"var2": "val2"},
                {"var3": "val3"}
            ]

    """
    if not def_extrators:
        return ref_extractors

    elif not ref_extractors:
        return def_extrators

    else:
        extractor_dict = OrderedDict()
        for api_extrator in def_extrators:
            if len(api_extrator) != 1:
                logger.log_warning(
                    "incorrect extractor: {}".format(api_extrator))
                continue

            var_name = list(api_extrator.keys())[0]
            extractor_dict[var_name] = api_extrator[var_name]

        for test_extrator in ref_extractors:
            if len(test_extrator) != 1:
                logger.log_warning(
                    "incorrect extractor: {}".format(test_extrator))
                continue

            var_name = list(test_extrator.keys())[0]
            extractor_dict[var_name] = test_extrator[var_name]

        extractor_list = []
        for key, value in extractor_dict.items():
            extractor_list.append({key: value})

        return extractor_list
def load_file(file_path):
    if not os.path.isfile(file_path):
        raise exceptions.FileNotFound("{} does not exist.".format(file_path))

    file_suffix = os.path.splitext(file_path)[1].lower()
    if file_suffix == '.json':
        return load_json_file(file_path)
    elif file_suffix in ['.yaml', '.yml']:
        return load_yaml_file(file_path)
    elif file_suffix == ".csv":
        return load_csv_file(file_path)
    else:
        # '' or other suffix
        err_msg = u"Unsupported file format: {}".format(file_path)
        logger.log_warning(err_msg)
        return []
    def extract_output(self, output_variables_list):
        """ extract output variables
        """
        variables_mapping = self.context.teststep_variables_mapping

        output = {}
        for variable in output_variables_list:
            if variable not in variables_mapping:
                logger.log_warning(
                    "variable '{}' can not be found in variables mapping, failed to output!"\
                        .format(variable)
                )
                continue

            output[variable] = variables_mapping[variable]

        return output
def validate_json_file(file_list):
    """ validate JSON testcase format
    """
    for json_file in set(file_list):
        if not json_file.endswith(".json"):
            logger.log_warning(
                "Only JSON file format can be validated, skip: {}".format(
                    json_file))
            continue

        logger.color_print("Start to validate JSON file: {}".format(json_file),
                           "GREEN")

        with io.open(json_file) as stream:
            try:
                json.load(stream)
            except ValueError as e:
                raise SystemExit(e)

        print("OK")
def load_test_folder(test_folder_path):
    """ load testcases definitions from folder.

    Args:
        test_folder_path (str): testcases files folder.

            testcase file should be in the following format:
            [
                {
                    "config": {
                        "def": "create_and_check",
                        "request": {},
                        "validate": []
                    }
                },
                {
                    "test": {
                        "api": "get_user",
                        "validate": []
                    }
                }
            ]

    Returns:
        dict: testcases definition mapping.

            {
                "create_and_check": [
                    {"config": {}},
                    {"test": {}},
                    {"test": {}}
                ],
                "tests/testcases/create_and_get.yml": [
                    {"config": {}},
                    {"test": {}},
                    {"test": {}}
                ]
            }

    """
    test_definition_mapping = {}

    test_items_mapping = load_folder_content(test_folder_path)

    for test_file_path, items in test_items_mapping.items():
        # TODO: add JSON schema validation

        testcase = {"config": {}, "teststeps": []}
        for item in items:
            key, block = item.popitem()

            if key == "config":
                testcase["config"].update(block)

                if "def" not in block:
                    test_definition_mapping[test_file_path] = testcase
                    continue

                testcase_def = block.pop("def")
                function_meta = parser.parse_function(testcase_def)
                func_name = function_meta["func_name"]

                if func_name in test_definition_mapping:
                    logger.log_warning(
                        "API definition duplicated: {}".format(func_name))

                testcase["function_meta"] = function_meta
                test_definition_mapping[func_name] = testcase
            else:
                # key == "test":
                testcase["teststeps"].append(block)

    return test_definition_mapping