Exemple #1
0
    def build_custom_content_object(self, file_path: str) -> dict:
        """
        Build the custom content object represents a custom content entity instance.
        For example: integration-HelloWorld.yml downloaded from Demisto.
        """
        file_data, file_ending = get_dict_from_file(
            file_path)  # For example: yml, for integration files
        file_type = find_type(
            path=file_path, _dict=file_data,
            file_type=file_ending)  # For example: integration
        if file_type:
            file_type = file_type.value

        file_entity = self.file_type_to_entity(
            file_data, file_type)  # For example: Integrations
        file_id: str = get_entity_id_by_entity_type(file_data, file_entity)
        file_name: str = get_entity_name_by_entity_type(file_data, file_entity)

        custom_content_object: dict = {
            'id': file_id,
            'name': file_name,
            'path': file_path,
            'entity': file_entity,
            'type': file_type,
            'file_ending': file_ending,
        }

        file_code_language = get_code_lang(file_data, file_entity)
        if file_code_language:
            custom_content_object['code_lang'] = file_code_language

        return custom_content_object
Exemple #2
0
def test_integration_format_yml_with_no_test_negative(
        tmp_path: PosixPath, source_path: str, destination_path: str,
        formatter: BaseUpdateYML, yml_title: str, file_type: str):
    """
        Given
        - A yml file (integration, playbook or script) with no 'tests' configured

        When
        - Entering 'N' into the prompt message about that asks the user if he wants to add 'No tests' to the file

        Then
        -  Ensure no exception is raised
        -  Ensure 'No tests' is not added
    """
    saved_file_path = str(tmp_path / os.path.basename(destination_path))
    runner = CliRunner()
    result = runner.invoke(
        main, [FORMAT_CMD, '-i', source_path, '-o', saved_file_path],
        input='N')
    assert not result.exception
    prompt = f'The file {source_path} has no test playbooks configured. Do you want to configure it with "No tests"'
    assert prompt in result.output
    yml_content = get_dict_from_file(saved_file_path)
    assert not yml_content[0].get('tests')
    os.remove(saved_file_path)
Exemple #3
0
def update_json(path, key, value):
    import json

    js = get_dict_from_file(path=path)[0]
    js[key] = value
    with open(path, 'w') as f:
        json.dump(js, f)
def get_new_entity_record(entity_path: str) -> Tuple[str, str]:
    data, _ = get_dict_from_file(entity_path)

    if 'layouts' in entity_path.lower():
        layout_kind = LAYOUT_TYPE_TO_NAME.get(data.get('kind', ''))
        type_id = data.get('typeId', '')
        if not type_id:
            return f'{data.get("id")}', f'(Available from Cortex XSOAR {data.get("fromVersion")})'
        return f'{type_id} - {layout_kind}', ''

    name = data.get('name', entity_path)
    if 'integrations' in entity_path.lower() and data.get('display'):
        name = data.get('display')

    if 'classifiers' in entity_path.lower():
        name = data.get('name')
        if not name:
            name = data.get('brandName')

    if name == entity_path:
        logging.error(f'missing name for {entity_path}')

    # script entities has "comment" instead of "description"
    description = data.get('description', '') or data.get('comment', '')
    if not description:
        logging.warning(f'missing description for {entity_path}')

    return name, description
    def __init__(self,
                 input: str = '',
                 output: str = '',
                 path: str = '',
                 from_version: str = '',
                 no_validate: bool = False,
                 verbose: bool = False):
        self.source_file = input
        self.output_file = self.set_output_file_path(output)
        _, self.relative_content_path = is_file_from_content_repo(
            self.output_file)
        self.old_file = self.is_old_file(
            self.relative_content_path if self.relative_content_path else self.
            output_file)
        self.schema_path = path
        self.from_version = from_version
        self.no_validate = no_validate
        self.verbose = verbose

        if not self.source_file:
            raise Exception(
                'Please provide <source path>, <optional - destination path>.')
        try:
            self.data, self.file_type = get_dict_from_file(self.source_file,
                                                           use_ryaml=True)
        except Exception:
            raise Exception(
                F'Provided file {self.source_file} is not a valid file.')
        self.from_version_key = self.set_from_version_key_name()
Exemple #6
0
    def update_data(file_path_to_write: str, file_path_to_read: str,
                    file_ending: str) -> None:
        """
        Collects special chosen fields from the file_path_to_read and writes them into the file_path_to_write.
        :param file_path_to_write: The output file path to add the special fields to.
        :param file_path_to_read: The input file path to read the special fields from.
        :param file_ending: The files ending
        :return: None
        """

        pack_obj_data, _ = get_dict_from_file(file_path_to_read)
        fields: list = DELETED_YML_FIELDS_BY_DEMISTO if file_ending == 'yml' else DELETED_JSON_FIELDS_BY_DEMISTO
        # Creates a nested-complex dict of all fields to be deleted by Demisto.
        # We need the dict to be nested, to easily merge it later to the file data.
        preserved_data: dict = unflatten(
            {
                field: dictor(pack_obj_data, field)
                for field in fields if dictor(pack_obj_data, field)
            },
            splitter='dot')

        if file_ending == 'yml':
            with open(file_path_to_write, 'r') as yf:
                file_yaml_object = yaml.load(yf)
            if pack_obj_data:
                merge(file_yaml_object, preserved_data)
            with open(file_path_to_write, 'w') as yf:
                yaml.dump(file_yaml_object, yf)

        elif file_ending == 'json':
            file_data: dict = get_json(file_path_to_write)
            if pack_obj_data:
                merge(file_data, preserved_data)
            with open(file_path_to_write, 'w') as jf:
                json.dump(obj=file_data, fp=jf, indent=4)
Exemple #7
0
    def __init__(self,
                 input: str = '',
                 output: str = '',
                 path: str = '',
                 from_version: str = '',
                 no_validate: bool = False,
                 verbose: bool = False,
                 assume_yes: bool = False,
                 interactive: bool = True,
                 clear_cache: bool = False,
                 **kwargs):
        self.source_file = input
        self.output_file = self.set_output_file_path(output)
        self.verbose = verbose
        _, self.relative_content_path = is_file_from_content_repo(
            self.output_file)
        self.old_file = self.is_old_file(
            self.relative_content_path
            if self.relative_content_path else self.output_file, self.verbose)
        self.schema_path = path
        self.from_version = from_version
        self.no_validate = no_validate
        self.assume_yes = assume_yes
        self.interactive = interactive
        self.updated_ids: Dict = {}
        if not self.no_validate:
            self.validate_manager = ValidateManager(
                silence_init_prints=True,
                skip_conf_json=True,
                skip_dependencies=True,
                skip_pack_rn_validation=True,
                check_is_unskipped=False,
                validate_id_set=False)

        if not self.source_file:
            raise Exception(
                'Please provide <source path>, <optional - destination path>.')
        try:
            self.data, self.file_type = get_dict_from_file(
                self.source_file, clear_cache=clear_cache)
        except Exception:
            raise Exception(
                F'Provided file {self.source_file} is not a valid file.')
        self.from_version_key = self.set_from_version_key_name()
        self.id_set_file, _ = get_dict_from_file(
            path=kwargs.get('id_set_path'))  # type: ignore[arg-type]
Exemple #8
0
 def __init__(self,
              configuration_file_path,
              json_file_path=None,
              ignored_errors=None,
              print_as_warnings=False,
              suppress_print=False):
     super().__init__(ignored_errors=ignored_errors,
                      print_as_warnings=print_as_warnings,
                      suppress_print=suppress_print,
                      json_file_path=json_file_path)
     self._is_valid = True
     self.configuration_file_path = configuration_file_path
     self.schema_path = os.path.normpath(
         os.path.join(__file__, '..', '..', 'schemas', 'xsoar_config.json'))
     self.configuration_json = self.load_xsoar_configuration_file()
     self.schema_json, _ = get_dict_from_file(self.schema_path)
Exemple #9
0
 def validate_json_when_breaking_changes(self) -> bool:
     """
     In case of a breaking change in the release note, ensure the existence of a proper json file.
     """
     is_valid = True
     if 'breaking change' in self.latest_release_notes.lower():
         json_path = self.release_notes_file_path[:-2] + 'json'
         error_message, error_code = Errors.release_notes_bc_json_file_missing(json_path)
         try:
             json_file_content = get_dict_from_file(path=json_path)[0]  # extract only the dictionary
             if 'breakingChanges' not in json_file_content or not json_file_content.get('breakingChanges'):
                 if self.handle_error(error_message, error_code, self.release_notes_file_path):
                     is_valid = False
         except FileNotFoundError:
             if self.handle_error(error_message, error_code, self.release_notes_file_path):
                 is_valid = False
     return is_valid
def get_new_entity_record(entity_path: str) -> Tuple[str, str]:
    data, _ = get_dict_from_file(entity_path)

    if 'layouts' in entity_path.lower():
        layout_kind = LAYOUT_TYPE_TO_NAME.get(data.get('kind', ''))
        type_id = data.get('typeId', '')
        return f'{type_id} - {layout_kind}', ''

    name = data.get('name', '')
    if 'integrations' in entity_path.lower() and data.get('display'):
        name = data.get('display')

    if not name:
        print_error(f'missing name for {entity_path}')

    # script entities has "comment" instead of "description"
    description = data.get('description', '') or data.get('comment', '')
    if not description:
        print_warning(f'missing description for {entity_path}')

    return name, description
def test_integration_format_yml_with_no_test_no_interactive_positive(
        tmp_path: PosixPath, source_path: str, destination_path: str,
        formatter: BaseUpdateYML, yml_title: str, file_type: str):
    """
        Given
        - A yml file (integration, playbook or script) with no 'tests' configured

        When
        - using the '-y' option

        Then
        -  Ensure no exception is raised
        -  Ensure 'No tests' is added in the first time
    """
    saved_file_path = str(tmp_path / os.path.basename(destination_path))
    runner = CliRunner()
    # Running format in the first time
    result = runner.invoke(
        main, [FORMAT_CMD, '-i', source_path, '-o', saved_file_path, '-y'])
    assert not result.exception
    yml_content = get_dict_from_file(saved_file_path)
    assert yml_content[0].get('tests') == ['No tests (auto formatted)']
    def _get_incident_fields_by_aliases(self, aliases: List[dict]):
        """Get from the id_set the actual fields for the given aliases

        Args:
            aliases (list): The alias list.

        Returns:
            A generator that generates a tuple with the incident field and it's path for each alias in the given list.
        """
        alias_ids: set = {
            f'incident_{alias.get("cliName")}'
            for alias in aliases
        }
        id_set = open_id_set_file(self.id_set_path)
        incident_field_list: list = id_set.get('IncidentFields')

        for incident_field in incident_field_list:
            field_id = list(incident_field.keys())[0]
            if field_id in alias_ids:
                alias_data = incident_field[field_id]
                alias_file_path = alias_data.get('file_path')
                aliased_field, _ = get_dict_from_file(path=alias_file_path)

                yield aliased_field, alias_file_path
Exemple #13
0
 def test_get_dict_from_file(self, path, _type):
     output = get_dict_from_file(str(path))[1]
     assert output == _type, f'get_dict_from_file({path}) returns: {output} instead {_type}'