def is_file_has_used_id(self, file_path): """Check if the ID of the given file already exist in the system. Args: file_path (string): Path to the file. Returns: bool. Whether the ID of the given file already exist in the system or not. """ is_used = False is_json_file = False if self.is_circle: if re.match(constants.TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): obj_type = self.TEST_PLAYBOOK_SECTION obj_id = collect_ids(file_path) obj_data = get_playbook_data(file_path) elif re.match(constants.PACKS_SCRIPT_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.TEST_SCRIPT_REGEX, file_path, re.IGNORECASE): obj_type = self.SCRIPTS_SECTION obj_id = get_script_or_integration_id(file_path) obj_data = get_script_data(file_path) elif re.match(constants.PACKS_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_INTEGRATION_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE): obj_type = self.INTEGRATION_SECTION obj_id = get_script_or_integration_id(file_path) obj_data = get_integration_data(file_path) elif re.match(constants.PLAYBOOK_REGEX, file_path, re.IGNORECASE): obj_type = self.PLAYBOOK_SECTION obj_id = collect_ids(file_path) obj_data = get_playbook_data(file_path) elif re.match(constants.PACKS_SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_SCRIPT_PY_REGEX, file_path, re.IGNORECASE): unifier = Unifier(os.path.dirname(os.path.dirname(file_path))) yml_path, code = unifier.get_script_or_integration_package_data( ) obj_data = get_script_data(yml_path, script_code=code) obj_type = self.SCRIPTS_SECTION obj_id = get_script_or_integration_id(yml_path) else: # In case of a json file is_json_file = True if not is_json_file: is_used = self.is_id_duplicated(obj_id, obj_data, obj_type) return is_used
def update_with_tests_sections(missing_ids, modified_files, test_ids, tests): test_ids.append(RUN_ALL_TESTS_FORMAT) # Search for tests section for file_path in modified_files: tests_from_file = get_tests(file_path) for test in tests_from_file: if test in test_ids or re.match(NO_TESTS_FORMAT, test, re.IGNORECASE): if checked_type(file_path, INTEGRATION_REGEXES): _id = get_script_or_integration_id(file_path) else: _id = get_name(file_path) missing_ids = missing_ids - {_id} tests.add(test) else: message = "The test '{0}' does not exist in the conf.json file, please re-check your code".format( test) print_color(message, LOG_COLORS.RED) global _FAILED _FAILED = True return missing_ids
def update_id_set(): branches = run_command("git branch") branch_name_reg = re.search(r"\* (.*)", branches) branch_name = branch_name_reg.group(1) print("Getting added files") files_string = run_command("git diff --name-status HEAD") second_files_string = run_command("git diff --name-status origin/master...{}".format(branch_name)) added_files, modified_files, added_scripts, modified_scripts = \ get_changed_files(files_string + '\n' + second_files_string) if added_files or modified_files or added_scripts or modified_scripts: print("Updating id_set.json") with open('./Tests/id_set.json', 'r') as id_set_file: try: ids_dict = json.load(id_set_file, object_pairs_hook=OrderedDict) except ValueError as ex: if "Expecting property name" in str(ex): # if we got this error it means we have corrupted id_set.json # usually it will happen if we merged from master and we had a conflict in id_set.json # so we checkout the id_set.json to be exact as in master and then run update_id_set run_command("git checkout origin/master Tests/id_set.json") with open('./Tests/id_set.json', 'r') as id_set_file_from_master: ids_dict = json.load(id_set_file_from_master, object_pairs_hook=OrderedDict) else: raise test_playbook_set = ids_dict['TestPlaybooks'] integration_set = ids_dict['integrations'] playbook_set = ids_dict['playbooks'] script_set = ids_dict['scripts'] if added_files: for file_path in added_files: if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \ re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE): add_new_object_to_id_set(get_script_or_integration_id(file_path), get_integration_data(file_path), integration_set) print("Adding {0} to id_set".format(get_script_or_integration_id(file_path))) if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE): add_new_object_to_id_set(get_script_or_integration_id(file_path), get_script_data(file_path), script_set) print("Adding {0} to id_set".format(get_script_or_integration_id(file_path))) if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE): add_new_object_to_id_set(collect_ids(file_path), get_playbook_data(file_path), playbook_set) print("Adding {0} to id_set".format(collect_ids(file_path))) if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): add_new_object_to_id_set(collect_ids(file_path), get_playbook_data(file_path), test_playbook_set) print("Adding {0} to id_set".format(collect_ids(file_path))) if re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE): add_new_object_to_id_set(get_script_or_integration_id(file_path), get_script_data(file_path), script_set) print("Adding {0} to id_set".format(collect_ids(file_path))) if modified_files: for file_path in modified_files: if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \ re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE): id_ = get_script_or_integration_id(file_path) integration_data = get_integration_data(file_path) update_object_in_id_set(id_, integration_data, file_path, integration_set) print("updated {0} in id_set".format(id_)) if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE) or re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE): id_ = get_script_or_integration_id(file_path) script_data = get_script_data(file_path) update_object_in_id_set(id_, script_data, file_path, script_set) print("updated {0} in id_set".format(id_)) if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE): id_ = collect_ids(file_path) playbook_data = get_playbook_data(file_path) update_object_in_id_set(id_, playbook_data, file_path, playbook_set) print("updated {0} in id_set".format(id_)) if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): id_ = collect_ids(file_path) playbook_data = get_playbook_data(file_path) update_object_in_id_set(id_, playbook_data, file_path, test_playbook_set) print("updated {0} in id_set".format(id_)) if added_scripts: for added_script_package in added_scripts: unifier = Unifier(added_script_package) yml_path, code = unifier.get_script_package_data() add_new_object_to_id_set(get_script_or_integration_id(yml_path), get_script_data(yml_path, script_code=code), script_set) print("Adding {0} to id_set".format(get_script_or_integration_id(yml_path))) if modified_scripts: for modified_script_package in added_scripts: unifier = Unifier(modified_script_package) yml_path, code = unifier.get_script_package_data() update_object_in_id_set(get_script_or_integration_id(yml_path), get_script_data(yml_path, script_code=code), yml_path, script_set) print("Adding {0} to id_set".format(get_script_or_integration_id(yml_path))) if added_files or modified_files: new_ids_dict = OrderedDict() # we sort each time the whole set in case someone manually changed something # it shouldn't take too much time new_ids_dict['scripts'] = sort(script_set) new_ids_dict['playbooks'] = sort(playbook_set) new_ids_dict['integrations'] = sort(integration_set) new_ids_dict['TestPlaybooks'] = sort(test_playbook_set) with open('./Tests/id_set.json', 'w') as id_set_file: json.dump(new_ids_dict, id_set_file, indent=4) print("Finished updating id_set.json")
def collect_changed_ids(integration_ids, playbook_names, script_names, modified_files, id_set): tests_set = set([]) updated_script_names = set([]) updated_playbook_names = set([]) catched_scripts, catched_playbooks = set([]), set([]) script_to_version = {} playbook_to_version = {} integration_to_version = {} for file_path in modified_files: if checked_type(file_path, YML_SCRIPT_REGEXES): name = get_name(file_path) script_names.add(name) script_to_version[name] = (get_from_version(file_path), get_to_version(file_path)) package_name = os.path.dirname(file_path) if glob.glob(package_name + "/*_test.py"): catched_scripts.add(name) tests_set.add('Found a unittest for the script {}'.format(package_name)) elif checked_type(file_path, YML_PLAYBOOKS_NO_TESTS_REGEXES): name = get_name(file_path) playbook_names.add(name) playbook_to_version[name] = (get_from_version(file_path), get_to_version(file_path)) elif checked_type(file_path, INTEGRATION_REGEXES + YML_INTEGRATION_REGEXES): _id = get_script_or_integration_id(file_path) integration_ids.add(_id) integration_to_version[_id] = (get_from_version(file_path), get_to_version(file_path)) if not id_set: with open("./Tests/id_set.json", 'r') as conf_file: id_set = json.load(conf_file) script_set = id_set['scripts'] playbook_set = id_set['playbooks'] integration_set = id_set['integrations'] deprecated_msgs = exclude_deprecated_entities(script_set, script_names, playbook_set, playbook_names, integration_set, integration_ids) for script_id in script_names: enrich_for_script_id(script_id, script_to_version[script_id], script_names, script_set, playbook_set, playbook_names, updated_script_names, updated_playbook_names, catched_scripts, catched_playbooks, tests_set) integration_to_command, deprecated_commands_message = get_integration_commands(integration_ids, integration_set) for integration_id, integration_commands in integration_to_command.items(): enrich_for_integration_id(integration_id, integration_to_version[integration_id], integration_commands, script_set, playbook_set, playbook_names, script_names, updated_script_names, updated_playbook_names, catched_scripts, catched_playbooks, tests_set) for playbook_id in playbook_names: enrich_for_playbook_id(playbook_id, playbook_to_version[playbook_id], playbook_names, script_set, playbook_set, updated_playbook_names, catched_playbooks, tests_set) for new_script in updated_script_names: script_names.add(new_script) for new_playbook in updated_playbook_names: playbook_names.add(new_playbook) affected_ids_strings = { 'scripts': '', 'playbooks': '', 'integrations': '' } if script_names: affected_ids_strings['scripts'] += 'Scripts:\n' + '\n'.join(script_names) if playbook_names: affected_ids_strings['playbooks'] += 'Playbooks:\n' + '\n'.join(playbook_names) if integration_ids: affected_ids_strings['integrations'] += 'Integrations:\n' + '\n'.join(integration_ids) print('The following ids are affected due to the changes you made:') for entity in ['scripts', 'playbooks', 'integrations']: print(affected_ids_strings[entity]) print_color(deprecated_msgs[entity], LOG_COLORS.YELLOW) if deprecated_commands_message: print_color(deprecated_commands_message, LOG_COLORS.YELLOW) return tests_set, catched_scripts, catched_playbooks