Ejemplo n.º 1
0
    def is_file_has_used_id(self, file_path):
        """Check if the ID of the given file already exist in the system.

        Args:
            file_path (string): Path to the file.

        Returns:
            bool. Whether the ID of the given file already exist in the system or not.
        """
        is_used = False
        is_json_file = False
        if self.is_circle:
            if re.match(constants.TEST_PLAYBOOK_REGEX, file_path,
                        re.IGNORECASE):
                obj_type = self.TEST_PLAYBOOK_SECTION
                obj_id = collect_ids(file_path)
                obj_data = get_playbook_data(file_path)

            elif re.match(constants.PACKS_SCRIPT_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE) or \
                    re.match(constants.TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                obj_type = self.SCRIPTS_SECTION
                obj_id = get_script_or_integration_id(file_path)
                obj_data = get_script_data(file_path)

            elif re.match(constants.PACKS_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or \
                    re.match(constants.PACKS_INTEGRATION_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE):

                obj_type = self.INTEGRATION_SECTION
                obj_id = get_script_or_integration_id(file_path)
                obj_data = get_integration_data(file_path)

            elif re.match(constants.PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                obj_type = self.PLAYBOOK_SECTION
                obj_id = collect_ids(file_path)
                obj_data = get_playbook_data(file_path)

            elif re.match(constants.PACKS_SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \
                    re.match(constants.PACKS_SCRIPT_PY_REGEX, file_path, re.IGNORECASE):

                unifier = Unifier(os.path.dirname(os.path.dirname(file_path)))
                yml_path, code = unifier.get_script_or_integration_package_data(
                )

                obj_data = get_script_data(yml_path, script_code=code)

                obj_type = self.SCRIPTS_SECTION
                obj_id = get_script_or_integration_id(yml_path)

            else:  # In case of a json file
                is_json_file = True

            if not is_json_file:
                is_used = self.is_id_duplicated(obj_id, obj_data, obj_type)

        return is_used
Ejemplo n.º 2
0
def get_test_list(files_string,
                  branch_name,
                  two_before_ga_ver='0',
                  conf=None,
                  id_set=None):
    """Create a test list that should run"""
    (modified_files, modified_tests_list, changed_common, is_conf_json,
     sample_tests, is_reputations_json,
     is_indicator_json) = get_modified_files(files_string)

    tests = set([])
    if modified_files:
        tests = find_tests_for_modified_files(modified_files, conf, id_set)

    # Adding a unique test for a json file.
    if is_reputations_json:
        tests.add('FormattingPerformance - Test')
        tests.add('reputations.json Test')
        tests.add('Indicators reputation-.json Test')

    if is_indicator_json:
        tests.add('Test IP Indicator Fields')

    for file_path in modified_tests_list:
        test = collect_ids(file_path)
        if test not in tests:
            tests.add(test)

    if is_conf_json:
        tests = tests.union(get_test_from_conf(branch_name, conf))

    if not tests:
        rand = random.Random(branch_name)
        tests = get_random_tests(tests_num=RANDOM_TESTS_NUM,
                                 rand=rand,
                                 conf=conf,
                                 id_set=id_set,
                                 server_version=two_before_ga_ver)
        if changed_common:
            print_warning('Adding 3 random tests due to: {}'.format(
                ','.join(changed_common)))
        elif sample_tests:  # Choosing 3 random tests for infrastructure testing
            print_warning('Collecting sample tests due to: {}'.format(
                ','.join(sample_tests)))
        else:
            print_warning("Running Sanity check only")
            tests.add('DocumentationTest')  # test with integration configured
            tests.add(
                'TestCommonPython')  # test with no integration configured

    if changed_common:
        tests.add('TestCommonPython')

    return tests
Ejemplo n.º 3
0
def update_id_set():
    branches = run_command("git branch")
    branch_name_reg = re.search(r"\* (.*)", branches)
    branch_name = branch_name_reg.group(1)

    print("Getting added files")
    files_string = run_command("git diff --name-status HEAD")
    second_files_string = run_command("git diff --name-status origin/master...{}".format(branch_name))
    added_files, modified_files, added_scripts, modified_scripts = \
        get_changed_files(files_string + '\n' + second_files_string)

    if added_files or modified_files or added_scripts or modified_scripts:
        print("Updating id_set.json")

        with open('./Tests/id_set.json', 'r') as id_set_file:
            try:
                ids_dict = json.load(id_set_file, object_pairs_hook=OrderedDict)
            except ValueError as ex:
                if "Expecting property name" in str(ex):
                    # if we got this error it means we have corrupted id_set.json
                    # usually it will happen if we merged from master and we had a conflict in id_set.json
                    # so we checkout the id_set.json to be exact as in master and then run update_id_set
                    run_command("git checkout origin/master Tests/id_set.json")
                    with open('./Tests/id_set.json', 'r') as id_set_file_from_master:
                        ids_dict = json.load(id_set_file_from_master, object_pairs_hook=OrderedDict)
                else:
                    raise

        test_playbook_set = ids_dict['TestPlaybooks']
        integration_set = ids_dict['integrations']
        playbook_set = ids_dict['playbooks']
        script_set = ids_dict['scripts']

    if added_files:
        for file_path in added_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(get_script_or_integration_id(file_path), get_integration_data(file_path),
                                         integration_set)
                print("Adding {0} to id_set".format(get_script_or_integration_id(file_path)))
            if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(get_script_or_integration_id(file_path), get_script_data(file_path),
                                         script_set)
                print("Adding {0} to id_set".format(get_script_or_integration_id(file_path)))
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path), get_playbook_data(file_path),
                                         playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path), get_playbook_data(file_path),
                                         test_playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(get_script_or_integration_id(file_path), get_script_data(file_path),
                                         script_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))

    if modified_files:
        for file_path in modified_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                id_ = get_script_or_integration_id(file_path)
                integration_data = get_integration_data(file_path)
                update_object_in_id_set(id_, integration_data, file_path, integration_set)
                print("updated {0} in id_set".format(id_))
            if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE) or re.match(TEST_SCRIPT_REGEX,
                                                                            file_path, re.IGNORECASE):
                id_ = get_script_or_integration_id(file_path)
                script_data = get_script_data(file_path)
                update_object_in_id_set(id_, script_data, file_path, script_set)
                print("updated {0} in id_set".format(id_))
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                id_ = collect_ids(file_path)
                playbook_data = get_playbook_data(file_path)
                update_object_in_id_set(id_, playbook_data, file_path, playbook_set)
                print("updated {0} in id_set".format(id_))
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                id_ = collect_ids(file_path)
                playbook_data = get_playbook_data(file_path)
                update_object_in_id_set(id_, playbook_data, file_path, test_playbook_set)
                print("updated {0} in id_set".format(id_))

    if added_scripts:
        for added_script_package in added_scripts:
            unifier = Unifier(added_script_package)
            yml_path, code = unifier.get_script_package_data()
            add_new_object_to_id_set(get_script_or_integration_id(yml_path),
                                     get_script_data(yml_path, script_code=code), script_set)
            print("Adding {0} to id_set".format(get_script_or_integration_id(yml_path)))

    if modified_scripts:
        for modified_script_package in added_scripts:
            unifier = Unifier(modified_script_package)
            yml_path, code = unifier.get_script_package_data()
            update_object_in_id_set(get_script_or_integration_id(yml_path),
                                    get_script_data(yml_path, script_code=code), yml_path, script_set)
            print("Adding {0} to id_set".format(get_script_or_integration_id(yml_path)))

    if added_files or modified_files:
        new_ids_dict = OrderedDict()
        # we sort each time the whole set in case someone manually changed something
        # it shouldn't take too much time
        new_ids_dict['scripts'] = sort(script_set)
        new_ids_dict['playbooks'] = sort(playbook_set)
        new_ids_dict['integrations'] = sort(integration_set)
        new_ids_dict['TestPlaybooks'] = sort(test_playbook_set)

        with open('./Tests/id_set.json', 'w') as id_set_file:
            json.dump(new_ids_dict, id_set_file, indent=4)

    print("Finished updating id_set.json")