Esempio n. 1
0
    def is_file_has_used_id(self, file_path):
        """Check if the ID of the given file already exist in the system.

        Args:
            file_path (string): Path to the file.

        Returns:
            bool. Whether the ID of the given file already exist in the system or not.
        """
        is_used = False
        is_json_file = False
        if self.is_circle:
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                obj_type = self.TEST_PLAYBOOK_SECTION
                obj_id = collect_ids(file_path)
                obj_data = get_playbook_data(file_path)

            elif re.match(SCRIPT_REGEX, file_path, re.IGNORECASE) or \
                    re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                obj_type = self.SCRIPTS_SECTION
                obj_id = get_script_or_integration_id(file_path)
                obj_data = get_script_data(file_path)

            elif re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):

                obj_type = self.INTEGRATION_SECTION
                obj_id = get_script_or_integration_id(file_path)
                obj_data = get_integration_data(file_path)

            elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                obj_type = self.PLAYBOOK_SECTION
                obj_id = collect_ids(file_path)
                obj_data = get_playbook_data(file_path)

            elif re.match(SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \
                    re.match(SCRIPT_PY_REGEX, file_path, re.IGNORECASE) or \
                    re.match(SCRIPT_JS_REGEX, file_path, re.IGNORECASE):

                yml_path, code = get_script_package_data(os.path.dirname(file_path))
                obj_data = get_script_data(yml_path, script_code=code)

                obj_type = self.SCRIPTS_SECTION
                obj_id = get_script_or_integration_id(yml_path)

            else:  # In case of a json file
                is_json_file = True

            if not is_json_file:
                is_used = self.is_id_duplicated(obj_id, obj_data, obj_type)

        return is_used
Esempio n. 2
0
    def validate_added_files(self, added_files):
        """Validate the added files from your branch.

        In case we encounter an invalid file we set the self._is_valid param to False.

        Args:
            added_files (set): A set of the modified files in the current branch.
        """
        for file_path in added_files:
            print("Validating {}".format(file_path))

            structure_validator = StructureValidator(file_path, is_added_file=True)
            if not structure_validator.is_file_valid():
                self._is_valid = False

            if not self.id_set_validator.is_file_valid_in_set(file_path):
                self._is_valid = False

            if self.id_set_validator.is_file_has_used_id(file_path):
                self._is_valid = False

            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                if not self.conf_json_validator.is_test_in_conf_json(collect_ids(file_path)):
                    self._is_valid = False

            elif re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or \
                    re.match(IMAGE_REGEX, file_path, re.IGNORECASE):

                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid():
                    self._is_valid = False

            elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False
Esempio n. 3
0
def get_test_list(files_string, branch_name):
    """Create a test list that should run"""
    modified_files, modified_tests_list, all_tests, is_conf_json, sample_tests = get_modified_files(files_string)

    tests = set([])
    if modified_files:
        tests = find_tests_for_modified_files(modified_files)

    for file_path in modified_tests_list:
        test = collect_ids(file_path)
        if test not in tests:
            tests.add(test)

    if is_conf_json:
        tests = tests.union(get_test_from_conf(branch_name))

    if all_tests:
        print_warning('Running all tests due to: {}'.format(','.join(all_tests)))
        tests.add("Run all tests")

    if sample_tests:  # Choosing 3 random tests for infrastructure testing
        print_warning('Running sample tests due to: {}'.format(','.join(sample_tests)))
        test_ids = get_test_ids(check_nightly_status=True)
        for _ in range(3):
            tests.add(random.choice(test_ids))

    if not tests:
        if modified_files or modified_tests_list or all_tests:
            print_error("There are no tests that check the changes you've done, please make sure you write one")
            sys.exit(1)
        else:
            print_warning("Running Sanity check only")
            tests.add('DocumentationTest')  # test with integration configured
            tests.add('TestCommonPython')  # test with no integration configured

    return tests
Esempio n. 4
0
def update_id_set():
    branches = run_command("git branch")
    branch_name_reg = re.search("\* (.*)", branches)
    branch_name = branch_name_reg.group(1)

    print("Getting added files")
    files_string = run_command("git diff --name-status HEAD")
    second_files_string = run_command(
        "git diff --name-status origin/master...{}".format(branch_name))
    added_files, modified_files, added_scripts, modified_scripts = \
        get_changed_files(files_string + '\n' + second_files_string)

    if added_files or modified_files or added_scripts or modified_scripts:
        print("Updating id_set.json")

        with open('./Tests/id_set.json', 'r') as id_set_file:
            try:
                ids_dict = json.load(id_set_file,
                                     object_pairs_hook=OrderedDict)
            except ValueError as ex:
                if "Expecting property name" in ex.message:
                    # if we got this error it means we have corrupted id_set.json
                    # usually it will happen if we merged from master and we had a conflict in id_set.json
                    # so we checkout the id_set.json to be exact as in master and then run update_id_set
                    run_command("git checkout origin/master Tests/id_set.json")
                    with open('./Tests/id_set.json',
                              'r') as id_set_file_from_master:
                        ids_dict = json.load(id_set_file_from_master,
                                             object_pairs_hook=OrderedDict)
                else:
                    raise ex

        test_playbook_set = ids_dict['TestPlaybooks']
        integration_set = ids_dict['integrations']
        playbook_set = ids_dict['playbooks']
        script_set = ids_dict['scripts']

    if added_files:
        for file_path in added_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_integration_data(file_path), integration_set)
                print("Adding {0} to id_set".format(
                    get_script_or_integration_id(file_path)))
            if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_script_data(file_path), script_set)
                print("Adding {0} to id_set".format(
                    get_script_or_integration_id(file_path)))
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path),
                                         get_playbook_data(file_path),
                                         playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path),
                                         get_playbook_data(file_path),
                                         test_playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_script_data(file_path), script_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))

    if modified_files:
        for file_path in modified_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                id = get_script_or_integration_id(file_path)
                integration_data = get_integration_data(file_path)
                update_object_in_id_set(id, integration_data, file_path,
                                        integration_set)
                print("updated {0} in id_set".format(id))
            if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE) or re.match(
                    TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                id = get_script_or_integration_id(file_path)
                script_data = get_script_data(file_path)
                update_object_in_id_set(id, script_data, file_path, script_set)
                print("updated {0} in id_set".format(id))
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                id = collect_ids(file_path)
                playbook_data = get_playbook_data(file_path)
                update_object_in_id_set(id, playbook_data, file_path,
                                        playbook_set)
                print("updated {0} in id_set".format(id))
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                id = collect_ids(file_path)
                playbook_data = get_playbook_data(file_path)
                update_object_in_id_set(id, playbook_data, file_path,
                                        test_playbook_set)
                print("updated {0} in id_set".format(id))

    if added_scripts:
        for added_script_package in added_scripts:
            yml_path, code = get_script_package_data(added_script_package)
            add_new_object_to_id_set(
                get_script_or_integration_id(yml_path),
                get_script_data(yml_path, script_code=code), script_set)
            print("Adding {0} to id_set".format(
                get_script_or_integration_id(yml_path)))

    if modified_scripts:
        for modified_script_package in added_scripts:
            yml_path, code = get_script_package_data(modified_script_package)
            update_object_in_id_set(
                get_script_or_integration_id(yml_path),
                get_script_data(yml_path, script_code=code), yml_path,
                script_set)
            print("Adding {0} to id_set".format(
                get_script_or_integration_id(yml_path)))

    if added_files or modified_files:
        new_ids_dict = OrderedDict()
        # we sort each time the whole set in case someone manually changed something
        # it shouldn't take too much time
        new_ids_dict['scripts'] = sort(script_set)
        new_ids_dict['playbooks'] = sort(playbook_set)
        new_ids_dict['integrations'] = sort(integration_set)
        new_ids_dict['TestPlaybooks'] = sort(test_playbook_set)

        with open('./Tests/id_set.json', 'w') as id_set_file:
            json.dump(new_ids_dict, id_set_file, indent=4)

    print("Finished updating id_set.json")
Esempio n. 5
0
    def validate_added_files(self, added_files):
        """Validate the added files from your branch.

        In case we encounter an invalid file we set the self._is_valid param to False.

        Args:
            added_files (set): A set of the modified files in the current branch.
        """
        for file_path in added_files:
            print('Validating {}'.format(file_path))

            structure_validator = StructureValidator(file_path,
                                                     is_added_file=True)
            if not structure_validator.is_file_valid():
                self._is_valid = False

            if not self.id_set_validator.is_file_valid_in_set(file_path):
                self._is_valid = False

            if self.id_set_validator.is_file_has_used_id(file_path):
                self._is_valid = False

            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                if not self.conf_json_validator.is_test_in_conf_json(
                        collect_ids(file_path)):
                    self._is_valid = False

            elif re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or \
                    re.match(IMAGE_REGEX, file_path, re.IGNORECASE):

                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid():
                    self._is_valid = False

                integration_validator = IntegrationValidator(file_path)
                if not integration_validator.is_valid_integration():
                    self._is_valid = False

            elif re.match(BETA_INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(BETA_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid_beta_description():
                    self._is_valid = False

                integration_validator = IntegrationValidator(file_path)
                if not integration_validator.is_valid_beta_integration(
                        is_new=True):
                    self._is_valid = False
            elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

            elif re.match(INCIDENT_FIELD_REGEX, file_path, re.IGNORECASE):
                incident_field_validator = IncidentFieldValidator(file_path)
                if not incident_field_validator.is_valid():
                    self._is_valid = False
Esempio n. 6
0
        for file_path in added_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_integration_data(file_path), integration_set)
                print("Adding {0} to id_set".format(
                    get_script_or_integration_id(file_path)))
            if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_script_data(file_path), script_set)
                print("Adding {0} to id_set".format(
                    get_script_or_integration_id(file_path)))
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path),
                                         get_playbook_data(file_path),
                                         playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path),
                                         get_playbook_data(file_path),
                                         test_playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_script_data(file_path), script_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))

    if modified_files:
Esempio n. 7
0
def get_test_list(files_string,
                  branch_name,
                  two_before_ga_ver='0',
                  conf=None,
                  id_set=None):
    """Create a test list that should run"""
    (modified_files, modified_tests_list, changed_common, is_conf_json,
     sample_tests, is_reputations_json,
     is_indicator_json) = get_modified_files(files_string)

    tests = set([])
    if modified_files:
        tests = find_tests_for_modified_files(modified_files, conf, id_set)

    # Adding a unique test for a json file.
    if is_reputations_json:
        tests.add('FormattingPerformance - Test')
        tests.add('reputations.json Test')
        tests.add('Indicators reputation-.json Test')

    if is_indicator_json:
        tests.add('Test IP Indicator Fields')

    for file_path in modified_tests_list:
        test = collect_ids(file_path)
        if test not in tests:
            tests.add(test)

    if is_conf_json:
        tests = tests.union(get_test_from_conf(branch_name, conf))

    if sample_tests:  # Choosing 3 random tests for infrastructure testing
        print_warning('Collecting sample tests due to: {}'.format(
            ','.join(sample_tests)))
        tests = tests.union(
            get_random_tests(tests_num=RANDOM_TESTS_NUM,
                             conf=conf,
                             id_set=id_set,
                             server_version=two_before_ga_ver))

    if not tests:
        if modified_files or modified_tests_list:
            print_error(
                "There is no test-playbook that checks the changes you've done, please make sure you write one."
            )
            global _FAILED
            _FAILED = True
        elif changed_common:
            print_warning('Adding 3 random tests due to: {}'.format(
                ','.join(changed_common)))
            tests = tests.union(
                get_random_tests(tests_num=RANDOM_TESTS_NUM,
                                 conf=conf,
                                 id_set=id_set,
                                 server_version=two_before_ga_ver))
        else:
            print_warning("Running Sanity check only")
            tests = get_random_tests(tests_num=RANDOM_TESTS_NUM,
                                     conf=conf,
                                     id_set=id_set,
                                     server_version=two_before_ga_ver)
            tests.add('DocumentationTest')  # test with integration configured
            tests.add(
                'TestCommonPython')  # test with no integration configured

    if changed_common:
        tests.add('TestCommonPython')

    return tests
Esempio n. 8
0
        playbook_set = ids_dict['playbooks']
        script_set = ids_dict['scripts']

    if added_files:
        for file_path in added_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(get_script_or_integration_id(file_path), get_integration_data(file_path),
                                         integration_set)
                print("Adding {0} to id_set".format(get_script_or_integration_id(file_path)))
            if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(get_script_or_integration_id(file_path), get_script_data(file_path),
                                         script_set)
                print("Adding {0} to id_set".format(get_script_or_integration_id(file_path)))
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path), get_playbook_data(file_path),
                                         playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path), get_playbook_data(file_path),
                                         test_playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(get_script_or_integration_id(file_path), get_script_data(file_path),
                                         script_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))

    if modified_files:
        for file_path in modified_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):