Ejemplo n.º 1
0
    def copy_packs_content_to_packs_bundle(self, packs):
        '''
        Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure
        except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file
        prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration
        yml filenames and 'script-' is prepended to script yml filenames and so on and so forth.
        '''
        for pack in packs:
            pack_name = os.path.basename(pack)
            if pack_name in self.packs_to_skip:
                continue
            pack_dst = os.path.join(self.packs_bundle, pack_name)
            os.mkdir(pack_dst)
            pack_dirs = get_child_directories(pack)
            pack_files = get_child_files(pack)
            # copy first level pack files over
            for file_path in pack_files:
                shutil.copy(
                    file_path,
                    os.path.join(pack_dst, os.path.basename(file_path)))
            # handle content directories in the pack
            for content_dir in pack_dirs:
                dir_name = os.path.basename(content_dir)
                dest_dir = os.path.join(pack_dst, dir_name)
                os.mkdir(dest_dir)
                if dir_name in DIR_TO_PREFIX:
                    packages_dirs = get_child_directories(content_dir)
                    for package_dir in packages_dirs:
                        ymls, _ = get_yml_paths_in_dir(package_dir,
                                                       error_msg='')
                        if not ymls or (len(ymls) == 1
                                        and ymls[0].endswith('_unified.yml')):
                            msg = 'Skipping package: {} -'.format(package_dir)
                            if not ymls:
                                print_warning(
                                    '{} No yml files found in the package directory'
                                    .format(msg))
                            else:
                                print_warning(
                                    '{} Only unified yml found in the package directory'
                                    .format(msg))
                            continue
                        package_dir_name = os.path.basename(package_dir)
                        unifier = Unifier(package_dir, dir_name, dest_dir)
                        unifier.merge_script_package_to_yml()

                        # also copy CHANGELOG markdown files over (should only be one per package)
                        package_files = get_child_files(package_dir)
                        changelog_files = [
                            file_path for file_path in package_files
                            if 'CHANGELOG.md' in file_path
                        ]
                        for md_file_path in changelog_files:
                            md_out_name = '{}-{}_CHANGELOG.md'.format(
                                DIR_TO_PREFIX.get(dir_name), package_dir_name)
                            shutil.copyfile(
                                md_file_path,
                                os.path.join(dest_dir, md_out_name))
                else:
                    self.copy_dir_files(content_dir, dest_dir)
Ejemplo n.º 2
0
    def update_content_version(
            content_ver: str = '',
            path: str = './Scripts/CommonServerPython/CommonServerPython.py'):
        regex = r'CONTENT_RELEASE_VERSION = .*'
        if not content_ver:
            try:
                with open('content-descriptor.json') as file_:
                    descriptor = json.load(file_)
                content_ver = descriptor['release']
            except (FileNotFoundError, json.JSONDecodeError, KeyError):
                print_error(
                    'Invalid descriptor file. make sure file content is a valid json with "release" key.'
                )
                return

        try:
            with open(path, 'r+') as file_:
                content = file_.read()
                content = re.sub(regex,
                                 f"CONTENT_RELEASE_VERSION = '{content_ver}'",
                                 content, re.M)
                file_.seek(0)
                file_.write(content)
        except Exception as ex:
            print_warning(f'Could not open CommonServerPython File - {ex}')
Ejemplo n.º 3
0
    def run(self):
        """Runs an integration command on Demisto and prints the result.
        """
        playground_id = self._get_playground_id()

        log_ids = self._run_query(playground_id)

        if self.debug:
            if not log_ids:
                print_warning('Entry with debug log not found')
            else:
                self._export_debug_log(log_ids)
Ejemplo n.º 4
0
    def is_outputs_for_reputations_commands_valid(self):
        # type: () -> bool
        """Check if a reputation command (domain/email/file/ip/url)
            has the correct DBotScore outputs according to the context standard
            https://github.com/demisto/content/blob/master/docs/context_standards/README.MD

        Returns:
            bool. Whether a reputation command holds valid outputs
        """
        context_standard = "https://github.com/demisto/content/blob/master/docs/context_standards/README.MD"
        commands = self.current_file.get('script', {}).get('commands', [])
        output_for_reputation_valid = True
        for command in commands:
            command_name = command.get('name')
            # look for reputations commands
            if command_name in BANG_COMMAND_NAMES:
                context_outputs_paths = set()
                context_outputs_descriptions = set()
                for output in command.get('outputs', []):
                    context_outputs_paths.add(output.get('contextPath'))
                    context_outputs_descriptions.add(output.get('description'))

                # validate DBotScore outputs and descriptions
                missing_outputs = set()
                missing_descriptions = set()
                for dbot_score_output in DBOT_SCORES_DICT:
                    if dbot_score_output not in context_outputs_paths:
                        missing_outputs.add(dbot_score_output)
                        self.is_valid = False
                        output_for_reputation_valid = False
                    else:  # DBot Score output path is in the outputs
                        if DBOT_SCORES_DICT.get(dbot_score_output) not in context_outputs_descriptions:
                            missing_descriptions.add(dbot_score_output)
                            # self.is_valid = False - Do not fail build over wrong description

                if missing_outputs:
                    print_error(Errors.dbot_invalid_output(
                        self.file_path, command_name, missing_outputs, context_standard))
                if missing_descriptions:
                    print_warning(Errors.dbot_invalid_description(
                        self.file_path, command_name, missing_descriptions, context_standard))

                # validate the IOC output
                reputation_output = IOC_OUTPUTS_DICT.get(command_name)
                if reputation_output and not reputation_output.intersection(context_outputs_paths):
                    self.is_valid = False
                    output_for_reputation_valid = False
                    print_error(Errors.missing_reputation(
                        self.file_path, command_name, reputation_output, context_standard))

        return output_for_reputation_valid
Ejemplo n.º 5
0
    def create_unifieds_and_copy(self,
                                 package_dir,
                                 dest_dir='',
                                 skip_dest_dir=''):
        '''
        For directories that have packages, aka subdirectories for each integration/script
        e.g. "Integrations", "Beta_Integrations", "Scripts". Creates a unified yml and writes
        it to the dest_dir

        Arguments:
            package_dir: (str)
                Path to directory in which there are package subdirectories. e.g. "Integrations",
                "Beta_Integrations", "Scripts"
            dest_dir: (str)
                Path to destination directory to which the unified yml for a package should be written
            skip_dest_dir: (str)
                Path to the directory to which the unified yml for a package should be written in the
                case the package is part of the skipped list
        '''
        dest_dir = dest_dir if dest_dir else self.content_bundle
        skip_dest_dir = skip_dest_dir if skip_dest_dir else self.test_bundle

        scanned_packages = glob.glob(os.path.join(package_dir, '*/'))
        package_dir_name = os.path.basename(package_dir)
        for package in scanned_packages:
            ymls, _ = get_yml_paths_in_dir(package, error_msg='')
            if not ymls or (len(ymls) == 1
                            and ymls[0].endswith('_unified.yml')):
                msg = 'Skipping package: {} -'.format(package)
                if not ymls:
                    print_warning(
                        '{} No yml files found in the package directory'.
                        format(msg))
                else:
                    print_warning(
                        '{} Only unified yml found in the package directory'.
                        format(msg))
                continue
            unification_tool = Unifier(package, package_dir_name, dest_dir)
            if any(package_to_skip in package
                   for package_to_skip in self.packages_to_skip):
                # there are some packages that we don't want to include in the content zip
                # for example HelloWorld integration
                unification_tool = Unifier(package, package_dir_name,
                                           skip_dest_dir)
                print('skipping {}'.format(package))
            unification_tool.merge_script_package_to_yml()
Ejemplo n.º 6
0
    def update_branch(
            path: str = './Scripts/CommonServerPython/CommonServerPython.py'):

        regex = r'CONTENT_BRANCH_NAME = .*'
        branch_name = get_current_working_branch()
        try:
            with open(path, 'r+') as file_:
                content = file_.read()
                content = re.sub(regex,
                                 f"CONTENT_BRANCH_NAME = '{branch_name}'",
                                 content, re.M)
                file_.seek(0)
                file_.write(content)
        except Exception as ex:
            print_warning(f'Could not open CommonServerPython File - {ex}')

        return branch_name
Ejemplo n.º 7
0
def has_duplicate(id_set, id_to_check):
    duplicates = [
        duplicate for duplicate in id_set if duplicate.get(id_to_check)
    ]

    if len(duplicates) < 2:
        return False

    for dup1, dup2 in itertools.combinations(duplicates, 2):
        dict1 = list(dup1.values())[0]
        dict2 = list(dup2.values())[0]
        dict1_from_version = LooseVersion(dict1.get('fromversion', '0.0.0'))
        dict2_from_version = LooseVersion(dict2.get('fromversion', '0.0.0'))
        dict1_to_version = LooseVersion(dict1.get('toversion', '99.99.99'))
        dict2_to_version = LooseVersion(dict2.get('toversion', '99.99.99'))

        if dict1['name'] != dict2['name']:
            print_warning(
                'The following objects has the same ID but different names: '
                '"{}", "{}".'.format(dict1['name'], dict2['name']))

        # A: 3.0.0 - 3.6.0
        # B: 3.5.0 - 4.5.0
        # C: 3.5.2 - 3.5.4
        # D: 4.5.0 - 99.99.99
        if any([
                dict1_from_version <= dict2_from_version <
                dict1_to_version,  # will catch (B, C), (A, B), (A, C)
                dict1_from_version < dict2_to_version <=
                dict1_to_version,  # will catch (B, C), (A, C)
                dict2_from_version <= dict1_from_version <
                dict2_to_version,  # will catch (C, B), (B, A), (C, A)
                dict2_from_version < dict1_to_version <=
                dict2_to_version,  # will catch (C, B), (C, A)
        ]):
            return True

    return False
Ejemplo n.º 8
0
    def is_duplicate_description(self):
        """Check if the integration has a non-duplicate description ."""
        is_description_in_yml = False
        is_description_in_package = False
        package_path = None
        md_file_path = None
        if not re.match(INTEGRATION_REGEX, self.file_path, re.IGNORECASE) \
                and not re.match(BETA_INTEGRATION_REGEX, self.file_path, re.IGNORECASE):
            package_path = os.path.dirname(self.file_path)
            try:
                md_file_path = glob.glob(
                    os.path.join(os.path.dirname(self.file_path),
                                 '*_description.md'))[0]
            except IndexError:
                print_warning(
                    "No detailed description file was found in the package {}."
                    " Consider adding one.".format(package_path))
            if md_file_path:
                is_description_in_package = True

        data_dictionary = get_yaml(self.file_path)

        if not data_dictionary:
            return is_description_in_package

        if data_dictionary.get('detaileddescription'):
            is_description_in_yml = True

        if is_description_in_package and is_description_in_yml:
            self._is_valid = False
            print_error(
                "A description was found both in the package and in the yml, "
                "please update the package {}.".format(package_path))
            return False

        return True
Ejemplo n.º 9
0
    def validate_modified_files(self, modified_files):  # noqa: C901
        """Validate the modified files from your branch.

        In case we encounter an invalid file we set the self._is_valid param to False.

        Args:
            modified_files (set): A set of the modified files in the current branch.
        """
        for file_path in modified_files:
            old_file_path = None
            if isinstance(file_path, tuple):
                old_file_path, file_path = file_path

            print('Validating {}'.format(file_path))
            if not checked_type(file_path):
                print_warning(
                    '- Skipping validation of non-content entity file.')
                continue

            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                continue

            structure_validator = StructureValidator(file_path, old_file_path)
            if not structure_validator.is_valid_file():
                self._is_valid = False

            if self.validate_id_set:
                if not self.id_set_validator.is_file_valid_in_set(file_path):
                    self._is_valid = False

            elif checked_type(file_path, YML_INTEGRATION_REGEXES):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid():
                    self._is_valid = False

                integration_validator = IntegrationValidator(
                    structure_validator)
                if self.is_backward_check and not integration_validator.is_backward_compatible(
                ):
                    self._is_valid = False

                if not integration_validator.is_valid_file():
                    self._is_valid = False

            elif checked_type(file_path, YML_BETA_INTEGRATIONS_REGEXES):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid_beta_description():
                    self._is_valid = False

                integration_validator = IntegrationValidator(
                    structure_validator)
                if not integration_validator.is_valid_beta_integration():
                    self._is_valid = False

            elif checked_type(file_path, [SCRIPT_REGEX]):
                script_validator = ScriptValidator(structure_validator)
                if self.is_backward_check and not script_validator.is_backward_compatible(
                ):
                    self._is_valid = False
                if not script_validator.is_valid_file():
                    self._is_valid = False

            elif checked_type(file_path, PLAYBOOKS_REGEXES_LIST):
                playbook_validator = PlaybookValidator(structure_validator)
                if not playbook_validator.is_valid_playbook(
                        is_new_playbook=False):
                    self._is_valid = False

            elif checked_type(file_path, PACKAGE_SCRIPTS_REGEXES):
                unifier = Unifier(os.path.dirname(file_path))
                yml_path, _ = unifier.get_script_package_data()
                # Set file path to the yml file
                structure_validator.file_path = yml_path
                script_validator = ScriptValidator(structure_validator)
                if self.is_backward_check and not script_validator.is_backward_compatible(
                ):
                    self._is_valid = False

                if not script_validator.is_valid_file():
                    self._is_valid = False

            elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

            # incident fields and indicator fields are using the same scheme.
            elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS):
                incident_field_validator = IncidentFieldValidator(
                    structure_validator)
                if not incident_field_validator.is_valid_file():
                    self._is_valid = False
                if self.is_backward_check and not incident_field_validator.is_backward_compatible(
                ):
                    self._is_valid = False

            elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES):
                layout_validator = LayoutValidator(structure_validator)
                if not layout_validator.is_valid_layout():
                    self._is_valid = False

            elif 'CHANGELOG' in file_path:
                self.is_valid_release_notes(file_path)

            elif checked_type(file_path, [REPUTATION_REGEX]):
                print_color(
                    F'Skipping validation for file {file_path} since no validation is currently defined.',
                    LOG_COLORS.YELLOW)

            elif checked_type(file_path, CHECKED_TYPES_REGEXES):
                pass

            else:
                print_error(
                    "The file type of {} is not supported in validate command".
                    format(file_path))
                print_error(
                    "'validate' command supports: Integrations, Scripts, Playbooks, "
                    "Incident fields, Indicator fields, Images, Release notes, Layouts and Descriptions"
                )
                self._is_valid = False
Ejemplo n.º 10
0
    def get_modified_files(files_string,
                           tag='master',
                           print_ignored_files=False):
        """Get lists of the modified files in your branch according to the files string.

        Args:
            files_string (string): String that was calculated by git using `git diff` command.
            tag (string): String of git tag used to update modified files.
            print_ignored_files (bool): should print ignored files.

        Returns:
            (modified_files_list, added_files_list, deleted_files). Tuple of sets.
        """
        all_files = files_string.split('\n')
        deleted_files = set([])
        added_files_list = set([])
        modified_files_list = set([])
        old_format_files = set([])
        for f in all_files:
            file_data = f.split()
            if not file_data:
                continue

            file_status = file_data[0]
            file_path = file_data[1]

            if file_status.lower().startswith('r'):
                file_status = 'r'
                file_path = file_data[2]

            if checked_type(file_path, CODE_FILES_REGEX) and file_status.lower() != 'd' \
                    and not file_path.endswith('_test.py'):
                # naming convention - code file and yml file in packages must have same name.
                file_path = os.path.splitext(file_path)[0] + '.yml'
            elif file_path.endswith('.js') or file_path.endswith('.py'):
                continue

            if file_status.lower() in ['m', 'a', 'r'] and checked_type(file_path, OLD_YML_FORMAT_FILE) and \
                    FilesValidator._is_py_script_or_integration(file_path):
                old_format_files.add(file_path)
            elif file_status.lower() == 'm' and checked_type(
                    file_path) and not file_path.startswith('.'):
                modified_files_list.add(file_path)
            elif file_status.lower() == 'a' and checked_type(
                    file_path) and not file_path.startswith('.'):
                added_files_list.add(file_path)
            elif file_status.lower() == 'd' and checked_type(
                    file_path) and not file_path.startswith('.'):
                deleted_files.add(file_path)
            elif file_status.lower().startswith('r') and checked_type(
                    file_path):
                # if a code file changed, take the associated yml file.
                if checked_type(file_data[2], CODE_FILES_REGEX):
                    modified_files_list.add(file_path)
                else:
                    modified_files_list.add((file_data[1], file_data[2]))

            elif checked_type(file_path, [SCHEMA_REGEX]):
                modified_files_list.add(file_path)

            elif file_status.lower() not in KNOWN_FILE_STATUSES:
                print_error(
                    '{} file status is an unknown one, please check. File status was: {}'
                    .format(file_path, file_status))

            elif print_ignored_files and not checked_type(
                    file_path, IGNORED_TYPES_REGEXES):
                print_warning('Ignoring file path: {}'.format(file_path))

        modified_files_list, added_files_list, deleted_files = filter_packagify_changes(
            modified_files_list, added_files_list, deleted_files, tag)

        return modified_files_list, added_files_list, deleted_files, old_format_files
Ejemplo n.º 11
0
    def create_content(self):
        '''Creates the content artifact zip files "content_test.zip", "content_new.zip", and "content_packs.zip"'''
        print('Starting to create content artifact...')

        try:
            print('creating dir for bundles...')
            for bundle_dir in [
                    self.content_bundle, self.test_bundle, self.packs_bundle
            ]:
                os.mkdir(bundle_dir)

            self.add_tools_to_bundle(self.content_bundle)

            for package_dir in DIR_TO_PREFIX:
                # handles nested package directories
                self.create_unifieds_and_copy(package_dir)

            for content_dir in self.content_directories:
                print(f'Copying dir {content_dir} to bundles...')
                self.copy_dir_files(content_dir, self.content_bundle)

            self.copy_test_files()

            # handle copying packs content to bundles for zipping to content_new.zip and content_test.zip
            packs = get_child_directories(PACKS_DIR)
            self.copy_packs_content_to_old_bundles(packs)

            # handle copying packs content to packs_bundle for zipping to `content_packs.zip`
            self.copy_packs_content_to_packs_bundle(packs)

            print('Copying content descriptor to content and test bundles')
            for bundle_dir in [self.content_bundle, self.test_bundle]:
                shutil.copyfile(
                    'content-descriptor.json',
                    os.path.join(bundle_dir, 'content-descriptor.json'))

            if os.path.exists('./Documentation/doc-CommonServer.json'):
                print('copying common server doc to content bundle')
                shutil.copyfile(
                    './Documentation/doc-CommonServer.json',
                    os.path.join(self.content_bundle, 'doc-CommonServer.json'))
            else:
                print_warning(
                    './Documentation/doc-CommonServer.json was not found and '
                    'therefore was not added to the content bundle')

            print('Compressing bundles...')
            shutil.make_archive(self.content_zip, 'zip', self.content_bundle)
            shutil.make_archive(self.test_zip, 'zip', self.test_bundle)
            shutil.make_archive(self.packs_zip, 'zip', self.packs_bundle)
            shutil.copyfile("./Tests/id_set.json",
                            os.path.join(self.artifacts_path, "id_set.json"))
            if os.path.exists('release-notes.md'):
                print('copying release-notes.md to artifacts directory "{}"'.
                      format(self.artifacts_path))
                shutil.copyfile(
                    'release-notes.md',
                    os.path.join(self.artifacts_path, 'release-notes.md'))
            else:
                print_warning(
                    'release-notes.md was not found in the content directory and therefore not '
                    'copied over to the artifacts directory')
            print(
                f'finished creating the content artifacts at "{os.path.abspath(self.artifacts_path)}"'
            )
        finally:
            if not self.preserve_bundles:
                if os.path.exists(self.content_bundle):
                    shutil.rmtree(self.content_bundle)
                if os.path.exists(self.test_bundle):
                    shutil.rmtree(self.test_bundle)
                if os.path.exists(self.packs_bundle):
                    shutil.rmtree(self.packs_bundle)