def __init__(self, pack, pack_path=None, validate_dependencies=False, ignored_errors=None, print_as_warnings=False, should_version_raise=False, id_set_path=None): """Inits the content pack validator with pack's name, pack's path, and unique files to content packs such as: secrets whitelist file, pack-ignore file, pack-meta file and readme file :param pack: content package name, which is the directory name of the pack """ super().__init__(ignored_errors=ignored_errors, print_as_warnings=print_as_warnings) self.pack = pack self.pack_path = pack_name_to_path( self.pack) if not pack_path else pack_path self.secrets_file = PACKS_WHITELIST_FILE_NAME self.pack_ignore_file = PACKS_PACK_IGNORE_FILE_NAME self.pack_meta_file = PACKS_PACK_META_FILE_NAME self.readme_file = PACKS_README_FILE_NAME self.validate_dependencies = validate_dependencies self._errors = [] self.should_version_raise = should_version_raise self.id_set_path = id_set_path
def __init__(self, pack, pack_path=None, validate_dependencies=False, ignored_errors=None, print_as_warnings=False, should_version_raise=False, id_set_path=None, suppress_print=False, private_repo=False, skip_id_set_creation=False, prev_ver=main_branch, json_file_path=None, support=None, specific_validations=None): """Inits the content pack validator with pack's name, pack's path, and unique files to content packs such as: secrets whitelist file, pack-ignore file, pack-meta file and readme file :param pack: content package name, which is the directory name of the pack """ super().__init__(ignored_errors=ignored_errors, print_as_warnings=print_as_warnings, suppress_print=suppress_print, json_file_path=json_file_path, specific_validations=specific_validations) self.pack = pack self.pack_path = pack_name_to_path(self.pack) if not pack_path else pack_path self.secrets_file = PACKS_WHITELIST_FILE_NAME self.pack_ignore_file = PACKS_PACK_IGNORE_FILE_NAME self.pack_meta_file = PACKS_PACK_META_FILE_NAME self.readme_file = PACKS_README_FILE_NAME self.validate_dependencies = validate_dependencies self._errors = [] self.should_version_raise = should_version_raise self.id_set_path = id_set_path self.private_repo = private_repo self.skip_id_set_creation = skip_id_set_creation self.prev_ver = prev_ver self.support = support self.metadata_content: Dict = dict()
def __init__(self, pack_path: str, update_type: Union[str, None], modified_files_in_pack: set, added_files: set, specific_version: str = None, pre_release: bool = False, pack: str = None, pack_metadata_only: bool = False, text: str = '', existing_rn_version_path: str = ''): self.pack = pack if pack else get_pack_name(pack_path) self.update_type = update_type self.pack_meta_file = PACKS_PACK_META_FILE_NAME try: self.pack_path = pack_name_to_path(self.pack) except TypeError: click.secho(f'Please verify the pack path is correct: {self.pack}.', fg='red') sys.exit(1) # renamed files will appear in the modified list as a tuple: (old path, new path) modified_files_in_pack = {file_[1] if isinstance(file_, tuple) else file_ for file_ in modified_files_in_pack} self.modified_files_in_pack = set() for file_path in modified_files_in_pack: self.modified_files_in_pack.add(self.check_for_release_notes_valid_file_path(file_path)) self.added_files = added_files self.pre_release = pre_release self.specific_version = specific_version self.existing_rn_changed = False self.text = text self.existing_rn_version_path = existing_rn_version_path self.should_delete_existing_rn = False self.pack_metadata_only = pack_metadata_only self.metadata_path = os.path.join(self.pack_path, 'pack_metadata.json') self.master_version = self.get_master_version()
def __init__(self, pack): """Inits the content pack validator with pack's name, pack's path, and unique files to content packs such as: secrets whitelist file, pack-ignore file, pack-meta file and readme file :param pack: content package name, which is the directory name of the pack """ self.pack = pack self.pack_path = pack_name_to_path(self.pack) self.secrets_file = PACKS_WHITELIST_FILE_NAME self.pack_ignore_file = PACKS_PACK_IGNORE_FILE_NAME self.pack_meta_file = PACKS_PACK_META_FILE_NAME self.readme_file = PACKS_README_FILE_NAME self._errors = []
def update_api_modules_dependents_rn(pre_release: bool, update_type: Union[str, None], added: Union[list, set], modified: Union[list, set], id_set_path: Optional[str] = None, text: str = '') -> set: """ Updates release notes for any pack that depends on API module that has changed. :param pre_release: The file type update_type: The update type added: The added files modified: The modified files id_set_path: The id set path text: Text to add to the release notes files :rtype: ``set`` :return A set of updated packs """ total_updated_packs: set = set() if not id_set_path: if not os.path.isfile(DEFAULT_ID_SET_PATH): print_error( "Failed to update integrations dependent on the APIModule pack - no id_set.json is " "available. Please run `demisto-sdk create-id-set` to generate it, and rerun this command." ) return total_updated_packs id_set_path = DEFAULT_ID_SET_PATH with open(id_set_path, 'r') as conf_file: id_set = json.load(conf_file) api_module_set = get_api_module_ids(added) api_module_set = api_module_set.union(get_api_module_ids(modified)) print_warning( f"Changes were found in the following APIModules: {api_module_set}, updating all dependent " f"integrations.") integrations = get_api_module_integrations_set( api_module_set, id_set.get('integrations', [])) for integration in integrations: integration_path = integration.get('file_path') integration_pack = integration.get('pack') integration_pack_path = pack_name_to_path(integration_pack) update_pack_rn = UpdateRN(pack_path=integration_pack_path, update_type=update_type, modified_files_in_pack={integration_path}, pre_release=pre_release, added_files=set(), pack=integration_pack, text=text) updated = update_pack_rn.execute_update() if updated: total_updated_packs.add(integration_pack) return total_updated_packs
def __init__(self, pack: str, update_type: str, pack_files: set, pre_release: bool = False): self.pack = pack self.update_type = update_type self.pack_meta_file = PACKS_PACK_META_FILE_NAME self.pack_path = pack_name_to_path(self.pack) self.metadata_path = os.path.join(self.pack_path, 'pack_metadata.json') self.pack_files = pack_files self.pre_release = pre_release
def __init__(self, pack: str, update_type: None, pack_files: set, added_files: set, specific_version: str = None, pre_release: bool = False): self.pack = pack self.update_type = update_type self.pack_meta_file = PACKS_PACK_META_FILE_NAME self.pack_path = pack_name_to_path(self.pack) self.metadata_path = os.path.join(self.pack_path, 'pack_metadata.json') self.pack_files = pack_files self.added_files = added_files self.pre_release = pre_release self.specific_version = specific_version self.existing_rn_changed = False
def create_pack_release_notes(self, pack: str, filtered_modified_files: set, filtered_added_files: set, old_format_files: set): """ Creates the release notes for a given pack if was changed. :param pack: The pack to create release notes for filtered_modified_files: A set of filtered modified files filtered_added_files: A set of filtered added files old_format_files: A set of old formatted files """ existing_rn_version = self.get_existing_rn(pack) if existing_rn_version is None: # New release notes file already found for the pack raise RuntimeError(f"New release notes file already found for {pack}. " f"Please update manually or run `demisto-sdk update-release-notes " f"-i {pack}` without specifying the update_type.") pack_modified = filter_files_on_pack(pack, filtered_modified_files) pack_added = filter_files_on_pack(pack, filtered_added_files) pack_old = filter_files_on_pack(pack, old_format_files) # Checks if update is required if pack_modified or pack_added or pack_old or self.is_force: pack_path = pack_name_to_path(pack) update_pack_rn = UpdateRN(pack_path=pack_path, update_type=self.update_type, modified_files_in_pack=pack_modified.union(pack_old), pre_release=self.pre_release, added_files=pack_added, specific_version=self.specific_version, text=self.text, is_force=self.is_force, existing_rn_version_path=existing_rn_version, is_bc=self.is_bc) updated = update_pack_rn.execute_update() self.rn_path.append(update_pack_rn.rn_path) # If new release notes were created add it to the total number of packs that were updated. if updated: self.total_updated_packs.add(pack) # If there is an outdated previous release notes, remove it (for example: User updated his version to # 1.0.4 and meanwhile the master version changed to 1.0.4, so we want to remove the user's 1_0_4 file # and add a 1_0_5 file.) if update_pack_rn.should_delete_existing_rn: os.unlink(self.packs_existing_rn[pack]) else: print_warning(f'Either no changes were found in {pack} pack ' f'or the changes found should not be documented in the release notes file.\n' f'If relevant changes were made, please commit the changes and rerun the command.')
def __init__(self, pack_path: str, update_type: Union[str, None], modified_files_in_pack: set, added_files: set, specific_version: str = None, pre_release: bool = False, pack: str = None, pack_metadata_only: bool = False, text: str = ''): self.pack = pack if pack else get_pack_name(pack_path) self.update_type = update_type self.pack_meta_file = PACKS_PACK_META_FILE_NAME self.pack_path = pack_name_to_path(self.pack) # renamed files will appear in the modified list as a tuple: (old path, new path) modified_files_in_pack = { file_[1] if isinstance(file_, tuple) else file_ for file_ in modified_files_in_pack } self.modified_files_in_pack = set() for file_path in modified_files_in_pack: self.modified_files_in_pack.add( self.check_for_release_notes_valid_file_path(file_path)) self.added_files = added_files self.pre_release = pre_release self.specific_version = specific_version self.existing_rn_changed = False self.text = text self.pack_metadata_only = pack_metadata_only try: self.metadata_path = os.path.join(self.pack_path, 'pack_metadata.json') except TypeError: print_error( f"pack_metadata.json was not found for the {self.pack} pack. Please verify " f"the pack path is correct.") sys.exit(1)