Exemple #1
0
    def get_main_file_details(content_entity: str,
                              entity_instance_path: str) -> tuple:
        """
        Returns the details of the "main" file within an entity instance.
        For example: In the HelloWorld integration under Packs/HelloWorld, the main file is the yml file.
        It contains all relevant ids and names for all the files under the HelloWorld integration dir.
        :param content_entity: The content entity, for example Integrations
        :param entity_instance_path: For example: ~/.../content/Packs/TestPack/Integrations/HelloWorld
        :return: The main file id & name
        """
        main_file_data: dict = dict()
        main_file_path: str = str()

        # Entities which contain yml files
        if content_entity in (INTEGRATIONS_DIR, SCRIPTS_DIR, PLAYBOOKS_DIR,
                              TEST_PLAYBOOKS_DIR):
            if os.path.isdir(entity_instance_path):
                _, main_file_path = get_yml_paths_in_dir(entity_instance_path)
            elif os.path.isfile(entity_instance_path):
                main_file_path = entity_instance_path

            if main_file_path:
                main_file_data = get_yaml(main_file_path)

        # Entities which are json files (md files are ignored - changelog/readme)
        else:
            if os.path.isfile(entity_instance_path) and retrieve_file_ending(
                    entity_instance_path) == 'json':
                main_file_data = get_json(entity_instance_path)

        main_id = get_entity_id_by_entity_type(main_file_data, content_entity)
        main_name = get_entity_name_by_entity_type(main_file_data,
                                                   content_entity)

        return main_id, main_name
Exemple #2
0
 def get_pack_metadata(self):
     try:
         data_dictionary = get_json(self.metadata_path)
     except FileNotFoundError:
         print_error(f"Pack {self.pack} was not found. Please verify the pack name is correct.")
         sys.exit(1)
     return data_dictionary
Exemple #3
0
    def update_data(file_path_to_write: str, file_path_to_read: str,
                    file_ending: str) -> None:
        """
        Collects special chosen fields from the file_path_to_read and writes them into the file_path_to_write.
        :param file_path_to_write: The output file path to add the special fields to.
        :param file_path_to_read: The input file path to read the special fields from.
        :param file_ending: The files ending
        :return: None
        """

        pack_obj_data, _ = get_dict_from_file(file_path_to_read)
        fields: list = DELETED_YML_FIELDS_BY_DEMISTO if file_ending == 'yml' else DELETED_JSON_FIELDS_BY_DEMISTO
        # Creates a nested-complex dict of all fields to be deleted by Demisto.
        # We need the dict to be nested, to easily merge it later to the file data.
        preserved_data: dict = unflatten(
            {
                field: dictor(pack_obj_data, field)
                for field in fields if dictor(pack_obj_data, field)
            },
            splitter='dot')

        if file_ending == 'yml':
            with open(file_path_to_write, 'r') as yf:
                file_yaml_object = yaml.load(yf)
            if pack_obj_data:
                merge(file_yaml_object, preserved_data)
            with open(file_path_to_write, 'w') as yf:
                yaml.dump(file_yaml_object, yf)

        elif file_ending == 'json':
            file_data: dict = get_json(file_path_to_write)
            if pack_obj_data:
                merge(file_data, preserved_data)
            with open(file_path_to_write, 'w') as jf:
                json.dump(obj=file_data, fp=jf, indent=4)
Exemple #4
0
    def incident_type_uploader(self, path: str):
        file_name = os.path.basename(path)

        try:
            # Wrap the incident object with a list to be compatible with Cortex XSOAR
            incident_types_unified_data = [get_json(path)]
            # Create a temp file object
            incidents_unified_file = NamedTemporaryFile(
                dir=f'{os.path.dirname(path)}', suffix='.json', delete=False)
            incidents_unified_file.write(
                bytes(json.dumps(incident_types_unified_data), 'utf-8'))
            new_file_path = incidents_unified_file.name
            incidents_unified_file.close()

            # Upload the file to Cortex XSOAR
            result = self.client.import_incident_types_handler(
                file=new_file_path)

            # Print results
            print_v(f'Result:\n{result.to_str()}', self.log_verbose)
            print_color(
                f'Uploaded incident type - \'{os.path.basename(path)}\': successfully',
                LOG_COLORS.GREEN)
            self.successfully_uploaded_files.append(
                (file_name, 'Incident Type'))

        except Exception as err:
            self._parse_error_response(err, 'incident type', file_name)
            self.failed_uploaded_files.append((file_name, 'Incident Type'))
            self.status_code = 1

        finally:
            self._remove_temp_file(new_file_path)
Exemple #5
0
def get_incident_type_data(path):
    data = OrderedDict()
    json_data = get_json(path)

    id_ = json_data.get('id')
    name = json_data.get('name', '')
    fromversion = json_data.get('fromVersion')
    toversion = json_data.get('toVersion')
    playbook_id = json_data.get('playbookId')
    pre_processing_script = json_data.get('preProcessingScript')
    pack = get_pack_name(path)

    if name:
        data['name'] = name
    data['file_path'] = path
    if toversion:
        data['toversion'] = toversion
    if fromversion:
        data['fromversion'] = fromversion
    if pack:
        data['pack'] = pack
    if playbook_id and playbook_id != '':
        data['playbooks'] = playbook_id
    if pre_processing_script and pre_processing_script != '':
        data['scripts'] = pre_processing_script

    return {id_: data}
def get_layout_data(path):
    data = OrderedDict()
    json_data = get_json(path)
    layout = json_data.get('layout')
    name = layout.get('name', '-')
    id_ = json_data.get('id', layout.get('id', '-'))
    type_ = json_data.get('typeId')
    type_name = json_data.get('TypeName')
    fromversion = json_data.get('fromVersion')
    toversion = json_data.get('toVersion')
    kind = json_data.get('kind')
    pack = get_pack_name(path)

    if type_:
        data['typeID'] = type_
    if type_name:
        data['typename'] = type_name
    data['name'] = name
    if toversion:
        data['toversion'] = toversion
    if fromversion:
        data['fromversion'] = fromversion
    if pack:
        data['pack'] = pack
    if kind:
        data['kind'] = kind
    data['path'] = path

    return {id_: data}
Exemple #7
0
def get_classifier_data(path):
    data = OrderedDict()
    json_data = get_json(path)

    id_ = json_data.get('id')
    name = json_data.get('name', '')
    fromversion = json_data.get('fromVersion')
    toversion = json_data.get('toVersion')
    pack = get_pack_name(path)
    incidents_types = set()

    default_incident_type = json_data.get('defaultIncidentType')
    if default_incident_type and default_incident_type != '':
        incidents_types.add(default_incident_type)
    key_type_map = json_data.get('keyTypeMap', {})
    for key, value in key_type_map.items():
        incidents_types.add(value)

    if name:
        data['name'] = name
    data['file_path'] = path
    if toversion:
        data['toversion'] = toversion
    if fromversion:
        data['fromversion'] = fromversion
    if pack:
        data['pack'] = pack
    if incidents_types:
        data['incident_types'] = list(incidents_types)

    return {id_: data}
Exemple #8
0
def get_layoutscontainer_data(path):
    json_data = get_json(path)
    layouts_container_fields = ["group", "edit", "indicatorsDetails", "indicatorsQuickView", "quickView", "close",
                                "details", "detailsV2", "mobile", "name"]
    data = OrderedDict({field: json_data[field] for field in layouts_container_fields if json_data.get(field)})

    id_ = json_data.get('id')
    pack = get_pack_name(path)
    incident_indicator_types_dependency = {id_}
    incident_indicator_fields_dependency = get_values_for_keys_recursively(json_data, ['fieldId'])

    if data.get('name'):
        incident_indicator_types_dependency.add(data['name'])
    if json_data.get('toVersion'):
        data['toversion'] = json_data['toVersion']
    if json_data.get('fromVersion'):
        data['fromversion'] = json_data['fromVersion']
    if pack:
        data['pack'] = pack
    data['file_path'] = path
    data['incident_and_indicator_types'] = list(incident_indicator_types_dependency)
    if incident_indicator_fields_dependency['fieldId']:
        data['incident_and_indicator_fields'] = incident_indicator_fields_dependency['fieldId']

    return {id_: data}
Exemple #9
0
def get_file_description(path, file_type) -> str:
    """ Gets the file description.

        :param
            path: The file path
            file_type: The file type

        :rtype: ``str``
        :return
        The file description if exists otherwise returns %%UPDATE_RN%%
    """
    if not os.path.isfile(path):
        print_warning(
            f'Cannot get file description: "{path}" file does not exist')
        return ''

    elif file_type in (FileType.PLAYBOOK, FileType.INTEGRATION):
        yml_file = get_yaml(path)
        return yml_file.get('description', '')

    elif file_type == FileType.SCRIPT:
        yml_file = get_yaml(path)
        return yml_file.get('comment', '')

    elif file_type in (FileType.CLASSIFIER, FileType.REPORT, FileType.WIDGET,
                       FileType.DASHBOARD, FileType.JOB):
        json_file = get_json(path)
        return json_file.get('description', '')

    return '%%UPDATE_RN%%'
 def is_file_structure_list(self) -> bool:
     """
     Checks whether the content of the file has a structure of a list.
     Assuming the file is a valid json file, use this to determine whether the file holds a list of values or a dictionary.
     """
     data = get_json(str(self.path))
     return isinstance(data, list)
Exemple #11
0
    def create_json_output(self) -> None:
        """Creates a JSON file output for lints"""
        if not self.json_file_path:
            return

        if os.path.exists(self.json_file_path):
            json_contents = get_json(self.json_file_path)

        else:
            json_contents = {}

        # format all linters to JSON format -
        # if any additional linters are added, please add a formatting function here
        for check in self.linters_error_list:
            if check.get('linter') == 'flake8':
                self.flake8_error_formatter(check, json_contents)
            elif check.get('linter') == 'mypy':
                self.mypy_error_formatter(check, json_contents)
            elif check.get('linter') == 'bandit':
                self.bandit_error_formatter(check, json_contents)
            elif check.get('linter') == 'vulture':
                self.vulture_error_formatter(check, json_contents)
            elif check.get('linter') == 'XSOAR_linter':
                self.xsoar_linter_error_formatter(check, json_contents)

        with open(self.json_file_path, 'w') as f:
            json.dump(json_contents, f, indent=4)
Exemple #12
0
def get_dict_from_file(path: str) -> Tuple[Dict, Union[str, None]]:
    if path:
        if path.endswith('.yml'):
            return get_yaml(path), 'yml'
        elif path.endswith('.json'):
            return get_json(path), 'json'
    return {}, None
Exemple #13
0
    def _is_price_changed(self) -> bool:
        # only check on private repo
        if not self.private_repo:
            return True

        metadata_file_path = self._get_pack_file_path(self.pack_meta_file)
        old_meta_file_content = self.get_master_private_repo_meta_file(
            metadata_file_path)

        # if there was no past version or running on master branch
        if not old_meta_file_content:
            return True

        current_meta_file_content = get_json(metadata_file_path)
        current_price = current_meta_file_content.get('price')
        old_price = old_meta_file_content.get('price')

        # if a price was added, removed or changed compared to the master version - return an error
        if (old_price and not current_price) or (
                current_price
                and not old_price) or (old_price != current_price):
            if self._add_error(
                    Errors.pack_metadata_price_change(old_price,
                                                      current_price),
                    self.pack_meta_file):
                return False

        return True
Exemple #14
0
 def validate_version_bump(self):
     metadata_file_path = self._get_pack_file_path(self.pack_meta_file)
     old_meta_file_content = get_remote_file(metadata_file_path, tag=self.prev_ver)
     current_meta_file_content = get_json(metadata_file_path)
     old_version = old_meta_file_content.get('currentVersion', '0.0.0')
     current_version = current_meta_file_content.get('currentVersion', '0.0.0')
     if LooseVersion(old_version) < LooseVersion(current_version):
         return True
     elif self._add_error(Errors.pack_metadata_version_should_be_raised(self.pack, old_version), metadata_file_path):
         return False
     return True
    def json_output(self, file_path: str, error_code: str, error_message: str,
                    warning: bool) -> None:
        """Adds an error's info to the output JSON file

        Args:
            file_path (str): The file path where the error ocurred.
            error_code (str): The error code
            error_message (str): The error message
            warning (bool): Whether the error is defined as a warning
        """
        if not self.json_file_path:
            return

        error_data = get_error_object(error_code)

        output = {
            'severity': 'warning' if warning else 'error',
            'errorCode': error_code,
            'message': error_message,
            'ui': error_data.get('ui_applicable'),
            'relatedField': error_data.get('related_field'),
            'linter': 'validate'
        }

        json_contents = []
        existing_json = ''
        if os.path.exists(self.json_file_path):
            try:
                existing_json = get_json(self.json_file_path)
            except ValueError:
                pass
            if isinstance(existing_json, list):
                json_contents = existing_json

        file_type = find_type(file_path)
        entity_type = file_type.value if file_type else 'pack'

        # handling unified yml image errors
        if entity_type == FileType.INTEGRATION.value and error_code.startswith(
                'IM'):
            entity_type = FileType.IMAGE.value

        formatted_error_output = {
            'filePath': file_path,
            'fileType': os.path.splitext(file_path)[1].replace('.', ''),
            'entityType': entity_type,
            'errorType': 'Settings',
            'name': get_file_displayed_name(file_path),
            'linter': 'validate',
            **output
        }
        json_contents.append(formatted_error_output)
        with open(self.json_file_path, 'w') as f:
            json.dump(json_contents, f, indent=4)
Exemple #16
0
    def get_pack_metadata(self) -> dict:
        """ Gets the pack metadata.

            :rtype: ``dict``
            :return
            The pack metadata dictionary
        """
        try:
            data_dictionary = get_json(self.metadata_path, cache_clear=True)
        except FileNotFoundError as e:
            raise FileNotFoundError(
                f'Pack {self.pack} was not found. Please verify the pack name is correct.'
            ) from e
        return data_dictionary
Exemple #17
0
def xsoar_configure_and_install_all_packs(options, branch_name: str,
                                          build_number: str):
    """
    Args:
        options: script arguments.
        branch_name(str): name of the current branch.
        build_number(str): number of the current build flow
    """
    # Get the host by the ami env
    server_to_port_mapping, server_version = XSOARBuild.get_servers(
        ami_env=options.ami_env)

    logging.info('Retrieving the credentials for Cortex XSOAR server')
    secret_conf_file = get_json(file_path=options.secret)
    username: str = secret_conf_file.get('username')
    password: str = secret_conf_file.get('userPassword')

    # Configure the Servers
    for server_url, port in server_to_port_mapping.items():
        server = XSOARServer(internal_ip=server_url,
                             port=port,
                             user_name=username,
                             password=password)
        logging.info(f'Adding Marketplace configuration to {server_url}')
        error_msg: str = 'Failed to set marketplace configuration.'
        server.add_server_configuration(config_dict=MARKET_PLACE_CONFIGURATION,
                                        error_msg=error_msg)
        XSOARBuild.set_marketplace_url(servers=[server],
                                       branch_name=branch_name,
                                       ci_build_number=build_number)

        # Acquire the server's host and install all content packs (one threaded execution)
        logging.info(f'Starting to install all content packs in {server_url}')
        server_host: str = server.client.api_client.configuration.host
        success_flag = install_all_content_packs_from_build_bucket(
            client=server.client,
            host=server_host,
            server_version=server_version,
            bucket_packs_root_path=GCPConfig.
            BUILD_BUCKET_PACKS_ROOT_PATH.format(branch=branch_name,
                                                build=build_number,
                                                marketplace='xsoar'),
            service_account=options.service_account,
            extract_destination_path=options.extract_path)
        if success_flag:
            logging.success(
                f'Finished installing all content packs in {server_url}')
        else:
            logging.error('Failed to install all packs.')
            sys.exit(1)
Exemple #18
0
def get_incident_field_data(path, incidents_types_list):
    data = OrderedDict()
    json_data = get_json(path)

    id_ = json_data.get('id')
    name = json_data.get('name', '')
    fromversion = json_data.get('fromVersion')
    toversion = json_data.get('toVersion')
    pack = get_pack_name(path)
    all_associated_types = set()
    all_scripts = set()

    associated_types = json_data.get('associatedTypes')
    if associated_types:
        all_associated_types = set(associated_types)

    system_associated_types = json_data.get('systemAssociatedTypes')
    if system_associated_types:
        all_associated_types = all_associated_types.union(
            set(system_associated_types))

    if 'all' in all_associated_types:
        all_associated_types = [
            list(incident_type.keys())[0]
            for incident_type in incidents_types_list
        ]

    scripts = json_data.get('script')
    if scripts:
        all_scripts = {scripts}

    field_calculations_scripts = json_data.get('fieldCalcScript')
    if field_calculations_scripts:
        all_scripts = all_scripts.union({field_calculations_scripts})

    if name:
        data['name'] = name
    data['file_path'] = path
    if toversion:
        data['toversion'] = toversion
    if fromversion:
        data['fromversion'] = fromversion
    if pack:
        data['pack'] = pack
    if all_associated_types:
        data['incident_types'] = list(all_associated_types)
    if all_scripts:
        data['scripts'] = list(all_scripts)

    return {id_: data}
Exemple #19
0
 def bump_version_number(self, pre_release: bool = False):
     new_version = None  # This will never happen since we pre-validate the argument
     try:
         data_dictionary = get_json(self.metadata_path)
     except FileNotFoundError:
         print_error(
             f"Pack {self.pack} was not found. Please verify the pack name is correct."
         )
         sys.exit(1)
     if self.update_type is None:
         new_version = data_dictionary.get('currentVersion', '99.99.99')
         return new_version, data_dictionary
     elif self.update_type == 'major':
         version = data_dictionary.get('currentVersion', '99.99.99')
         version = version.split('.')
         version[0] = str(int(version[0]) + 1)
         if int(version[0]) > 99:
             raise ValueError(
                 f"Version number is greater than 99 for the {self.pack} pack. "
                 f"Please verify the currentVersion is correct.")
         version[1] = '0'
         version[2] = '0'
         new_version = '.'.join(version)
     elif self.update_type == 'minor':
         version = data_dictionary.get('currentVersion', '99.99.99')
         version = version.split('.')
         version[1] = str(int(version[1]) + 1)
         if int(version[1]) > 99:
             raise ValueError(
                 f"Version number is greater than 99 for the {self.pack} pack. "
                 f"Please verify the currentVersion is correct. If it is, "
                 f"then consider bumping to a new Major version.")
         version[2] = '0'
         new_version = '.'.join(version)
     # We validate the input via click
     elif self.update_type == 'revision':
         version = data_dictionary.get('currentVersion', '99.99.99')
         version = version.split('.')
         version[2] = str(int(version[2]) + 1)
         if int(version[2]) > 99:
             raise ValueError(
                 f"Version number is greater than 99 for the {self.pack} pack. "
                 f"Please verify the currentVersion is correct. If it is, "
                 f"then consider bumping to a new Minor version.")
         new_version = '.'.join(version)
     if pre_release:
         new_version = new_version + '_prerelease'
     data_dictionary['currentVersion'] = new_version
     return new_version, data_dictionary
Exemple #20
0
    def bump_version_number(self, pre_release: bool = False):

        new_version = None  # This will never happen since we pre-validate the argument
        data_dictionary = get_json(self.metadata_path)
        if self.update_type == 'major':
            version = data_dictionary.get('currentVersion', '99.99.99')
            version = version.split('.')
            version[0] = str(int(version[0]) + 1)
            if int(version[0]) > 99:
                raise ValueError(
                    f"Version number is greater than 99 for the {self.pack} pack. "
                    f"Please verify the currentVersion is correct.")
            version[1] = '0'
            version[2] = '0'
            new_version = '.'.join(version)
        elif self.update_type == 'minor':
            version = data_dictionary.get('currentVersion', '99.99.99')
            version = version.split('.')
            version[1] = str(int(version[1]) + 1)
            if int(version[1]) > 99:
                raise ValueError(
                    f"Version number is greater than 99 for the {self.pack} pack. "
                    f"Please verify the currentVersion is correct. If it is, "
                    f"then consider bumping to a new Major version.")
            version[2] = '0'
            new_version = '.'.join(version)
        # We validate the input via click
        elif self.update_type == 'revision':
            version = data_dictionary.get('currentVersion', '99.99.99')
            version = version.split('.')
            version[2] = str(int(version[2]) + 1)
            if int(version[2]) > 99:
                raise ValueError(
                    f"Version number is greater than 99 for the {self.pack} pack. "
                    f"Please verify the currentVersion is correct. If it is, "
                    f"then consider bumping to a new Minor version.")
            new_version = '.'.join(version)
        if pre_release:
            new_version = new_version + '_prerelease'
        data_dictionary['currentVersion'] = new_version

        if self._does_pack_metadata_exist():
            with open(self.metadata_path, 'w') as fp:
                json.dump(data_dictionary, fp, indent=4)
                print_color(
                    f"Updated pack metadata version at path : {self.metadata_path}",
                    LOG_COLORS.GREEN)
        return new_version
    def add_from_version_to_json(self, file_path):
        if self.no_fromversion:
            return {}

        json_content = tools.get_json(file_path)

        if parse_version(json_content.get(
                'toVersion', '99.99.99')) > parse_version(
                    LATEST_SUPPORTED_VERSION) > parse_version(
                        json_content.get('fromVersion', '0.0.0')):
            json_content['fromVersion'] = LATEST_SUPPORTED_VERSION

            with open(file_path, 'w') as f:
                json.dump(json_content, f, indent=4)

        return json_content
Exemple #22
0
def get_indicator_type_data(path, all_integrations):
    data = OrderedDict()
    json_data = get_json(path)

    id_ = json_data.get('id')
    name = json_data.get('details', '')
    fromversion = json_data.get('fromVersion')
    toversion = json_data.get('toVersion')
    reputation_command = json_data.get('reputationCommand')
    pack = get_pack_name(path)
    all_scripts = set()
    associated_integrations = set()

    for field in ['reputationScriptName', 'enhancementScriptNames']:
        associated_scripts = json_data.get(field)
        if not associated_scripts or associated_scripts == 'null':
            continue

        associated_scripts = [
            associated_scripts
        ] if not isinstance(associated_scripts, list) else associated_scripts
        if associated_scripts:
            all_scripts = all_scripts.union(set(associated_scripts))

    for integration in all_integrations:
        integration_name = next(iter(integration))
        integration_commands = integration.get(integration_name).get(
            'commands')
        if integration_commands and reputation_command in integration_commands:
            associated_integrations.add(integration_name)

    if name:
        data['name'] = name
    data['file_path'] = path
    if toversion:
        data['toversion'] = toversion
    if fromversion:
        data['fromversion'] = fromversion
    if pack:
        data['pack'] = pack
    if associated_integrations:
        data['integrations'] = list(associated_integrations)
    if all_scripts:
        data['scripts'] = list(all_scripts)

    return {id_: data}
Exemple #23
0
 def test_update_data_json(self, tmp_path):
     env = Environment(tmp_path)
     downloader = Downloader(output='', input='', regex='')
     downloader.update_data(env.CUSTOM_CONTENT_LAYOUT_PATH,
                            env.LAYOUT_INSTANCE_PATH, 'json')
     file_data: dict = get_json(env.CUSTOM_CONTENT_LAYOUT_PATH)
     for field in DELETED_JSON_FIELDS_BY_DEMISTO:
         obj = file_data
         dotted_path_list = field.split('.')
         for path_part in dotted_path_list:
             if path_part != dotted_path_list[-1]:
                 obj = obj.get(path_part)
             else:
                 if obj.get(path_part):
                     assert True
                 else:
                     assert False
    def check_from_version_not_above_6_0_0(self, file_path):
        if file_path.endswith('.yml'):
            yml_content = get_yaml(file_path)
            if parse_version(yml_content.get(
                    'fromversion', '0.0.0')) >= parse_version('6.0.0'):
                return False

        elif file_path.endswith('.json'):
            json_content = tools.get_json(file_path)
            if parse_version(json_content.get(
                    'fromVersion', '0.0.0')) >= parse_version('6.0.0'):
                return False

        elif file_path.endswith('.md'):
            return self.check_md_related_from_version(file_path)

        return True
Exemple #25
0
def get_file_description(path, file_type):
    if not os.path.isfile(path):
        print_warning(f'Cannot get file description: "{path}" file does not exist')
        return ''

    elif file_type in (FileType.PLAYBOOK, FileType.INTEGRATION):
        yml_file = get_yaml(path)
        return yml_file.get('description', '')

    elif file_type == FileType.SCRIPT:
        yml_file = get_yaml(path)
        return yml_file.get('comment', '')

    elif file_type in (FileType.CLASSIFIER, FileType.REPORT, FileType.WIDGET, FileType.DASHBOARD):
        json_file = get_json(path)
        return json_file.get('description', '')

    return '%%UPDATE_RN%%'
    def should_process_file_to_bundle(self, file_path, bundle):
        """

        Args:
            file_path (str): the file_path being processed
            bundle (str): the bundle being created

        Returns:
            bool. True if the file should be added to the bundle under the following conditions:
             * a file exists above version 6.0.0 and in packs bundle
             * a file exists below 6.0.0 in the content or test bundles
        """
        if file_path.endswith('.yml'):
            yml_content = get_yaml(file_path)
            if self.packs_bundle in bundle:
                # in packs bundle we keep only if the to version is above 6.0.0
                if parse_version(yml_content.get(
                        'toversion', '99.99.99')) < parse_version('6.0.0'):
                    return False

            else:
                # in content and test bundles we keep only if the from version is below 6.0.0
                if parse_version(yml_content.get(
                        'fromversion', '0.0.0')) >= parse_version('6.0.0'):
                    return False

        elif file_path.endswith('.json'):
            json_content = tools.get_json(file_path)
            if self.packs_bundle in bundle:
                # in packs bundle we keep only if the to version is above 6.0.0
                if parse_version(json_content.get(
                        'toVersion', '99.99.99')) <= parse_version('6.0.0'):
                    return False

            else:
                # in content and test bundles we keep only if the from version is below 6.0.0
                if parse_version(json_content.get(
                        'fromVersion', '0.0.0')) >= parse_version('6.0.0'):
                    return False

        elif file_path.endswith('.md'):
            return self.check_md_related_from_version(file_path, bundle)

        return True
Exemple #27
0
def get_file_description(path, file_type):
    if not os.path.isfile(path):
        print_warning(
            f'Cannot get file description: "{path}" file does not exist')
        return ''

    elif file_type in ('Playbook', 'Integration'):
        yml_file = get_yaml(path)
        return yml_file.get('description', '')

    elif file_type == 'Script':
        yml_file = get_yaml(path)
        return yml_file.get('comment', '')

    elif file_type in ('Classifiers', 'Reports', 'Widgets', 'Dashboards'):
        json_file = get_json(path)
        return json_file.get('description', '')

    return '%%UPDATE_RN%%'
 def test_update_data_json(self):
     env_guard = EnvironmentGuardian()
     downloader = Downloader(output='', input='')
     downloader.update_data(CUSTOM_CONTENT_LAYOUT_PATH, LAYOUT_INSTANCE_PATH, 'json')
     test_answer = True
     file_data: dict = get_json(CUSTOM_CONTENT_LAYOUT_PATH)
     for field in DELETED_JSON_FIELDS_BY_DEMISTO:
         obj = file_data
         dotted_path_list = field.split('.')
         for path_part in dotted_path_list:
             if path_part != dotted_path_list[-1]:
                 obj = obj.get(path_part)
             else:
                 if obj.get(path_part):
                     test_answer = test_answer and True
                 else:
                     test_answer = False
     env_guard.restore_environment('test_update_data_json')
     assert test_answer
Exemple #29
0
    def json_output(self, file_path: str, error_code: str, error_message: str,
                    warning: bool) -> None:
        """Adds an error's info to the output JSON file

        Args:
            file_path (str): The file path where the error ocurred.
            error_code (str): The error code
            error_message (str): The error message
            warning (bool): Whether the error is defined as a warning
        """
        if not self.json_file_path:
            return

        error_data = get_error_object(error_code)

        output = {
            "severity": "warning" if warning else "error",
            "code": error_code,
            "message": error_message,
            "ui": error_data.get('ui_applicable'),
            'related-field': error_data.get('related_field')
        }

        if os.path.exists(self.json_file_path):
            json_contents = get_json(self.json_file_path)

        else:
            json_contents = {}

        file_type = find_type(file_path)
        if file_path in json_contents:
            if output in json_contents[file_path].get('outputs'):
                return
            json_contents[file_path]['outputs'].append(output)
        else:
            json_contents[file_path] = {
                "file-type": os.path.splitext(file_path)[1].replace('.', ''),
                "entity-type": file_type.value if file_type else 'pack',
                "display-name": get_file_displayed_name(file_path),
                "outputs": [output]
            }
        with open(self.json_file_path, 'w') as f:
            json.dump(json_contents, f, indent=4)
Exemple #30
0
def get_mapper_data(path):
    data = OrderedDict()
    json_data = get_json(path)

    id_ = json_data.get('id')
    name = json_data.get('name', '')
    fromversion = json_data.get('fromVersion')
    toversion = json_data.get('toVersion')
    pack = get_pack_name(path)
    incidents_types = set()
    incidents_fields = set()

    default_incident_type = json_data.get('defaultIncidentType')
    if default_incident_type and default_incident_type != '':
        incidents_types.add(default_incident_type)
    mapping = json_data.get('mapping', {})
    for key, value in mapping.items():
        incidents_types.add(key)
        incidents_fields = incidents_fields.union(
            set(value.get('internalMapping').keys()))

    incidents_fields = {
        incident_field
        for incident_field in incidents_fields
        if incident_field not in BUILT_IN_FIELDS
    }

    if name:
        data['name'] = name
    data['file_path'] = path
    if toversion:
        data['toversion'] = toversion
    if fromversion:
        data['fromversion'] = fromversion
    if pack:
        data['pack'] = pack
    if incidents_types:
        data['incident_types'] = list(incidents_types)
    if incidents_fields:
        data['incident_fields'] = list(incidents_fields)

    return {id_: data}