Ejemplo n.º 1
0
    def validate_all_files(self):
        """Validate all files in the repo are in the right format."""
        # go over packs
        for root, dirs, _ in os.walk(PACKS_DIR):
            for dir_in_dirs in dirs:
                for directory in PACKS_DIRECTORIES:
                    for inner_root, inner_dirs, files in os.walk(os.path.join(root, dir_in_dirs, directory)):
                        for inner_dir in inner_dirs:
                            if inner_dir.startswith('.'):
                                continue

                            project_dir = os.path.join(inner_root, inner_dir)
                            _, file_path = get_yml_paths_in_dir(os.path.normpath(project_dir),
                                                                Errors.no_yml_file(project_dir))
                            if file_path:
                                print("Validating {}".format(file_path))
                                structure_validator = StructureValidator(file_path)
                                if not structure_validator.is_valid_scheme():
                                    self._is_valid = False

        # go over regular content entities
        for directory in DIR_LIST_FOR_REGULAR_ENTETIES:
            print_color('Validating {} directory:'.format(directory), LOG_COLORS.GREEN)
            for root, dirs, files in os.walk(directory):
                for file_name in files:
                    file_path = os.path.join(root, file_name)
                    # skipping hidden files
                    if not file_name.endswith('.yml'):
                        continue

                    print('Validating ' + file_name)
                    structure_validator = StructureValidator(file_path)
                    if not structure_validator.is_valid_scheme():
                        self._is_valid = False

        # go over regular PACKAGE_SUPPORTING_DIRECTORIES entities
        for directory in PACKAGE_SUPPORTING_DIRECTORIES:
            for root, dirs, files in os.walk(directory):
                for inner_dir in dirs:
                    if inner_dir.startswith('.'):
                        continue

                    project_dir = os.path.join(root, inner_dir)
                    _, file_path = get_yml_paths_in_dir(project_dir, Errors.no_yml_file(project_dir))
                    if file_path:
                        print('Validating ' + file_path)
                        structure_validator = StructureValidator(file_path)
                        if not structure_validator.is_valid_scheme():
                            self._is_valid = False
Ejemplo n.º 2
0
def test_upload_an_integration_directory(demisto_client_configure, mocker):
    """
    Given
        - An integration directory called UploadTest

    When
        - Uploading an integration

    Then
        - Ensure integration is uploaded successfully
        - Ensure success upload message is printed as expected
    """
    mocker.patch("builtins.print")
    integration_dir_name = "UploadTest"
    integration_path = f"{git_path()}/demisto_sdk/tests/test_files/Packs/DummyPack/Integrations/{integration_dir_name}"
    uploader = Uploader(input=integration_path, insecure=False, verbose=False)
    mocker.patch.object(uploader, 'client')
    uploader.upload()
    _, integration_yml_name = get_yml_paths_in_dir(integration_path)
    uploaded_file_name = f'integration-{os.path.basename(integration_yml_name)}'
    upload_success_message = u'{}{}{}'.format(
        LOG_COLORS.GREEN,
        f"Uploaded integration - '{uploaded_file_name}': successfully",
        LOG_COLORS.NATIVE
    )

    assert print.call_args_list[3][0][0] == upload_success_message
Ejemplo n.º 3
0
    def copy_packs_content_to_packs_bundle(self, packs):
        """
        Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure
        except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file
        prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration
        yml filenames and 'script-' is prepended to script yml filenames and so on and so forth.
        """
        for pack in packs:
            pack_name = os.path.basename(pack)
            if pack_name in self.packs_to_skip:
                continue
            pack_dst = os.path.join(self.packs_bundle, pack_name)
            os.mkdir(pack_dst)
            pack_dirs = get_child_directories(pack)
            pack_files = get_child_files(pack)
            # copy first level pack files over
            for file_path in pack_files:
                shutil.copy(
                    file_path,
                    os.path.join(pack_dst, os.path.basename(file_path)))
            # handle content directories in the pack
            for content_dir in pack_dirs:
                dir_name = os.path.basename(content_dir)
                dest_dir = os.path.join(pack_dst, dir_name)
                os.mkdir(dest_dir)
                if dir_name in DIR_TO_PREFIX:
                    packages_dirs = get_child_directories(content_dir)
                    for package_dir in packages_dirs:
                        ymls, _ = get_yml_paths_in_dir(package_dir,
                                                       error_msg='')
                        if not ymls or (len(ymls) == 1
                                        and ymls[0].endswith('_unified.yml')):
                            msg = 'Skipping package: {} -'.format(package_dir)
                            if not ymls:
                                print_warning(
                                    '{} No yml files found in the package directory'
                                    .format(msg))
                            else:
                                print_warning(
                                    '{} Only unified yml found in the package directory'
                                    .format(msg))
                            continue
                        package_dir_name = os.path.basename(package_dir)
                        unifier = Unifier(package_dir, dir_name, dest_dir)
                        unifier.merge_script_package_to_yml()

                        # also copy CHANGELOG markdown files over (should only be one per package)
                        package_files = get_child_files(package_dir)
                        changelog_files = [
                            file_path for file_path in package_files
                            if 'CHANGELOG.md' in file_path
                        ]
                        for md_file_path in changelog_files:
                            md_out_name = '{}-{}_CHANGELOG.md'.format(
                                DIR_TO_PREFIX.get(dir_name), package_dir_name)
                            shutil.copyfile(
                                md_file_path,
                                os.path.join(dest_dir, md_out_name))
                else:
                    self.copy_dir_files(content_dir, dest_dir)
Ejemplo n.º 4
0
    def copy_dir_yml(self, dir_path, bundle):
        """
        Copy the yml files inside a directory to a bundle.

        :param dir_path: source directory
        :param bundle: destination bundle
        :return: None
        """
        scan_files, _ = get_yml_paths_in_dir(dir_path, error_msg='')
        content_files = 0
        dir_name = os.path.basename(dir_path)
        copy_func = self.copy_playbook_yml if dir_name in [
            'Playbooks', 'TestPlaybooks'
        ] else self.copy_content_yml
        for path in scan_files:
            if len(os.path.basename(path)) >= self.file_name_max_size:
                self.long_file_names.append(path)

            with open(path, 'r') as file_:
                yml_info = yaml.safe_load(file_)

            ver = yml_info.get('fromversion', '0')
            print(f' - processing: {ver} ({path})')
            copy_func(path, os.path.join(bundle, os.path.basename(path)),
                      yml_info)
            content_files += 1
        print(f' - total files: {content_files}')
Ejemplo n.º 5
0
def test_upload_a_script_directory(demisto_client_configure, mocker):
    """
    Given
        - A script directory called DummyScript

    When
        - Uploading an script

    Then
        - Ensure script is uploaded successfully
        - Ensure success upload message is printed as expected
    """
    mocker.patch("builtins.print")
    script_dir_name = "DummyScript"
    scripts_path = f"{git_path()}/demisto_sdk/tests/test_files/Packs/DummyPack/Scripts/{script_dir_name}"
    uploader = Uploader(input=scripts_path, insecure=False, verbose=False)
    mocker.patch.object(uploader, 'client')
    uploader.upload()
    _, script_yml_name = get_yml_paths_in_dir(scripts_path)
    uploaded_file_name = f'script-{os.path.basename(script_yml_name)}'
    upload_success_message = u'{}{}{}'.format(
        LOG_COLORS.GREEN,
        f"Uploaded script - '{uploaded_file_name}': successfully",
        LOG_COLORS.NATIVE
    )

    assert print.call_args_list[3][0][0] == upload_success_message
Ejemplo n.º 6
0
    def copy_dir_yml(self, dir_path, bundle):
        """
        Copy the yml files inside a directory to a bundle.

        :param dir_path: source directory
        :param bundle: destination bundle
        :return: None
        """
        scan_files, _ = get_yml_paths_in_dir(dir_path, error_msg='')
        content_files = 0
        dir_name = os.path.basename(dir_path)
        for path in scan_files:
            if len(os.path.basename(path)) >= self.file_name_max_size:
                self.long_file_names.append(path)

            ryaml = YAML()
            ryaml.allow_duplicate_keys = True
            with io.open(path, mode='r', encoding='utf-8') as file_:
                yml_info = ryaml.load(file_)
            ver = yml_info.get('fromversion', '0')
            print(f' - processing: {ver} ({path})')
            if dir_name in ['Playbooks', 'TestPlaybooks']:
                # in TestPlaybook dir we might have scripts - all should go to test_bundle
                if dir_name == 'TestPlaybooks' and os.path.basename(path).startswith('script-'):
                    self.copy_content_yml(path, os.path.join(bundle, os.path.basename(path)), yml_info)
                self.copy_playbook_yml(path, os.path.join(bundle, os.path.basename(path)))
            else:
                self.copy_content_yml(path, os.path.join(bundle, os.path.basename(path)), yml_info)
            content_files += 1
        print(f' - total files: {content_files}')
Ejemplo n.º 7
0
    def get_main_file_details(content_entity: str,
                              entity_instance_path: str) -> tuple:
        """
        Returns the details of the "main" file within an entity instance.
        For example: In the HelloWorld integration under Packs/HelloWorld, the main file is the yml file.
        It contains all relevant ids and names for all the files under the HelloWorld integration dir.
        :param content_entity: The content entity, for example Integrations
        :param entity_instance_path: For example: ~/.../content/Packs/TestPack/Integrations/HelloWorld
        :return: The main file id & name
        """
        main_file_data: dict = dict()
        main_file_path: str = str()

        # Entities which contain yml files
        if content_entity in (INTEGRATIONS_DIR, SCRIPTS_DIR, PLAYBOOKS_DIR,
                              TEST_PLAYBOOKS_DIR):
            if os.path.isdir(entity_instance_path):
                _, main_file_path = get_yml_paths_in_dir(entity_instance_path)
            elif os.path.isfile(entity_instance_path):
                main_file_path = entity_instance_path

            if main_file_path:
                main_file_data = get_yaml(main_file_path)

        # Entities which are json files (md files are ignored - changelog/readme)
        else:
            if os.path.isfile(entity_instance_path) and retrieve_file_ending(
                    entity_instance_path) == 'json':
                main_file_data = get_json(entity_instance_path)

        main_id = get_entity_id_by_entity_type(main_file_data, content_entity)
        main_name = get_entity_name_by_entity_type(main_file_data,
                                                   content_entity)

        return main_id, main_name
Ejemplo n.º 8
0
 def test_get_yml_paths_in_dir(self, dir_path):
     yml_paths, first_yml_path = tools.get_yml_paths_in_dir(dir_path, error_msg='')
     yml_paths_test = glob.glob(os.path.join(dir_path, '*yml'))
     assert sorted(yml_paths) == sorted(yml_paths_test)
     if yml_paths_test:
         assert first_yml_path == yml_paths_test[0]
     else:
         assert not first_yml_path
Ejemplo n.º 9
0
    def __init__(self,
                 input: str,
                 dir_name=INTEGRATIONS_DIR,
                 output: str = '',
                 image_prefix=DEFAULT_IMAGE_PREFIX,
                 force: bool = False):

        directory_name = ''
        # Changing relative path to current abspath fixed problem with default output file name.
        if input == '.':
            input = os.path.abspath(input)
        for optional_dir_name in DIR_TO_PREFIX:
            if optional_dir_name in input:
                directory_name = optional_dir_name

        if not directory_name:
            print_error(
                'You have failed to provide a legal file path, a legal file path '
                'should contain either Integrations or Scripts directories')

        self.image_prefix = image_prefix
        self.package_path = input
        self.use_force = force
        if self.package_path.endswith(os.sep):
            self.package_path = self.package_path.rstrip(os.sep)

        self.dest_path = output

        yml_paths, self.yml_path = get_yml_paths_in_dir(
            self.package_path, Errors.no_yml_file(self.package_path))
        for path in yml_paths:
            # The plugin creates a unified YML file for the package.
            # In case this script runs locally and there is a unified YML file in the package we need to ignore it.
            # Also,
            # we don't take the unified file by default because
            # there might be packages that were not created by the plugin.
            if 'unified' not in path and os.path.basename(
                    os.path.dirname(path)) not in [
                        SCRIPTS_DIR, INTEGRATIONS_DIR
                    ]:
                self.yml_path = path
                break

        self.ryaml = YAML()
        self.ryaml.preserve_quotes = True
        self.ryaml.width = 50000  # make sure long lines will not break (relevant for code section)
        if self.yml_path:
            with io.open(self.yml_path, 'r', encoding='utf8') as yml_file:
                self.yml_data = self.ryaml.load(yml_file)
        else:
            self.yml_data = {}
            print_error(f'No yml found in path: {self.package_path}')

        # script key for scripts is a string.
        # script key for integrations is a dictionary.
        self.is_script_package = isinstance(self.yml_data.get('script'), str)
        self.dir_name = SCRIPTS_DIR if self.is_script_package else dir_name
Ejemplo n.º 10
0
    def __init__(
        self,
        input: str,
        output: Optional[str] = None,
        force: bool = False,
        marketplace: Optional[str] = None,
    ):
        directory_name = ''
        # Changing relative path to current abspath fixed problem with default output file name.
        input = os.path.abspath(input)
        if not os.path.isdir(input):
            print_error(UNSUPPORTED_INPUT_ERR_MSG)
            sys.exit(1)
        for optional_dir_name in DIR_TO_PREFIX:
            if optional_dir_name in input:
                directory_name = optional_dir_name

        if not directory_name:
            print_error(UNSUPPORTED_INPUT_ERR_MSG)

        self.package_path = input
        self.package_path = self.package_path.rstrip(os.sep)

        self.use_force = force
        self.dest_path = output
        self.dir_name = ''
        self.marketplace = marketplace
        if marketplace:
            MARKETPLACE_TAG_PARSER.marketplace = marketplace

        yml_paths, self.yml_path = get_yml_paths_in_dir(
            self.package_path, Errors.no_yml_file(self.package_path))
        for path in yml_paths:
            # The plugin creates a unified YML file for the package.
            # In case this script runs locally and there is a unified YML file in the package we need to ignore it.
            # Also,
            # we don't take the unified file by default because
            # there might be packages that were not created by the plugin.
            if 'unified' not in path and os.path.basename(
                    os.path.dirname(path)) not in [
                        SCRIPTS_DIR, INTEGRATIONS_DIR
                    ]:
                self.yml_path = path
                break

        self.yaml = YAML_Handler(
            width=50000
        )  # make sure long lines will not break (relevant for code section)

        if self.yml_path:
            with io.open(self.yml_path, 'r', encoding='utf8') as yml_file:
                self.yml_data = self.yaml.load(yml_file)
        else:
            self.yml_data = {}
            print_error(f'No yml found in path: {self.package_path}')
Ejemplo n.º 11
0
    def create_unifieds_and_copy(self,
                                 package_dir,
                                 dest_dir='',
                                 skip_dest_dir=''):
        """
        For directories that have packages, aka subdirectories for each integration/script
        e.g. "Integrations", "Scripts". Creates a unified yml and writes
        it to the dest_dir

        Arguments:
            package_dir: (str)
                Path to directory in which there are package subdirectories. e.g. "Integrations", "Scripts"
            dest_dir: (str)
                Path to destination directory to which the unified yml for a package should be written
            skip_dest_dir: (str)
                Path to the directory to which the unified yml for a package should be written in the
                case the package is part of the skipped list
        """
        dest_dir = dest_dir if dest_dir else self.content_bundle
        skip_dest_dir = skip_dest_dir if skip_dest_dir else self.test_bundle

        scanned_packages = glob.glob(os.path.join(package_dir, '*/'))
        package_dir_name = os.path.basename(package_dir)
        for package in scanned_packages:
            ymls, _ = get_yml_paths_in_dir(package, error_msg='')
            if not ymls or (len(ymls) == 1
                            and ymls[0].endswith('_unified.yml')):
                msg = 'Skipping package: {} -'.format(package)
                if not ymls:
                    print_warning(
                        f'{msg} No yml files found in the package directory')
                else:
                    print_warning(
                        f'{msg} Only unified yml found in the package directory'
                    )
                continue
            unification_tool = Unifier(package, package_dir_name, dest_dir)
            if any(package_to_skip in package
                   for package_to_skip in self.packages_to_skip):
                # there are some packages that we don't want to include in the content zip
                # for example HelloWorld integration
                unification_tool = Unifier(package, package_dir_name,
                                           skip_dest_dir)
                print('skipping {}'.format(package))

            if parse_version(
                    unification_tool.yml_data.get(
                        'fromversion', '0.0.0')) <= parse_version('6.0.0'):
                unified_yml_paths = unification_tool.merge_script_package_to_yml(
                    file_name_suffix=self.file_name_suffix)
                for unified_yml_path in unified_yml_paths:
                    self.add_from_version_to_yml(unified_yml_path)
Ejemplo n.º 12
0
    def copy_dir_yml(self, dir_path, bundle):
        """
        Copy the yml files inside a directory to a bundle.

        :param dir_path: source directory
        :param bundle: destination bundle
        :return: None
        """
        scan_files, _ = get_yml_paths_in_dir(dir_path, error_msg='')
        content_files = 0
        dir_name = os.path.basename(dir_path)
        if scan_files:
            print(f"\nStarting process for {dir_path}")
        for path in scan_files:
            if not self.should_process_file_to_bundle(path, bundle):
                continue

            new_file_path = self.add_suffix_to_file_path(
                os.path.join(bundle, os.path.basename(path)))
            if len(os.path.basename(path)) >= self.file_name_max_size:
                self.long_file_names.append(path)

            ryaml = YAML()
            ryaml.allow_duplicate_keys = True
            with io.open(path, mode='r', encoding='utf-8') as file_:
                yml_info = ryaml.load(file_)
            ver = yml_info.get('fromversion', '0')
            updated_yml_info = self.add_from_version_to_yml(
                yml_content=yml_info, save_yml=False)
            if updated_yml_info:
                yml_info = updated_yml_info

            process_message = f' - processing: {path}'
            if ver != '0' and ver != '':
                process_message += f' - current fromversion: {ver}'
            print(process_message)
            if dir_name in ['Playbooks', 'TestPlaybooks']:
                # in TestPlaybook dir we might have scripts - all should go to test_bundle
                if dir_name == 'TestPlaybooks' and os.path.basename(
                        path).startswith('script-'):
                    self.copy_content_yml(path, new_file_path, yml_info)
                self.copy_playbook_yml(path, new_file_path)
            else:
                self.copy_content_yml(path, new_file_path, yml_info)

            content_files += 1

        if content_files > 0:
            print(f'Finished process - total files: {content_files}\n')
Ejemplo n.º 13
0
    def get_script_package_data(self):
        # should be static method
        _, yml_path = get_yml_paths_in_dir(self.package_path, error_msg='')
        if not yml_path:
            raise Exception(
                f'No yml files found in package path: {self.package_path}. '
                'Is this really a package dir?')

        code_type = get_yaml(yml_path).get('type')
        unifier = Unifier(self.package_path)
        code_path = unifier.get_code_file(TYPE_TO_EXTENSION[code_type])
        with open(code_path, 'r') as code_file:
            code = code_file.read()

        return yml_path, code
Ejemplo n.º 14
0
    def get_script_or_integration_package_data(self):
        # should be static method
        _, yml_path = get_yml_paths_in_dir(self.package_path, error_msg='')
        if not yml_path:
            raise Exception(
                f'No yml files found in package path: {self.package_path}. '
                'Is this really a package dir?')

        if find_type(yml_path) in (FileType.SCRIPT, FileType.TEST_SCRIPT):
            code_type = get_yaml(yml_path).get('type')
        else:
            code_type = get_yaml(yml_path).get('script', {}).get('type')
        unifier = Unifier(self.package_path)
        code_path = unifier.get_code_file(TYPE_TO_EXTENSION[code_type])
        with io.open(code_path, 'r', encoding='utf-8') as code_file:
            code = code_file.read()

        return yml_path, code
Ejemplo n.º 15
0
def test_upload_a_script_directory(demisto_client_configure, mocker):
    """
    Given
        - A script directory called DummyScript

    When
        - Uploading an script

    Then
        - Ensure script is uploaded successfully
        - Ensure success upload message is printed as expected
    """
    mocker.patch.object(demisto_client, 'configure', return_value="object")
    script_dir_name = "DummyScript"
    scripts_path = f"{git_path()}/demisto_sdk/tests/test_files/Packs/DummyPack/Scripts/{script_dir_name}"
    uploader = Uploader(input=scripts_path, insecure=False, verbose=False)
    mocker.patch.object(uploader, 'client')
    uploader.upload()
    _, script_yml_name = get_yml_paths_in_dir(scripts_path)
    uploaded_file_name = script_yml_name.split('/')[-1]

    assert [(uploaded_file_name, FileType.SCRIPT.value)] == uploader.successfully_uploaded_files
Ejemplo n.º 16
0
def test_upload_an_integration_directory(demisto_client_configure, mocker):
    """
    Given
        - An integration directory called UploadTest

    When
        - Uploading an integration

    Then
        - Ensure integration is uploaded successfully
        - Ensure success upload message is printed as expected
    """
    mocker.patch.object(demisto_client, 'configure', return_value="object")
    integration_dir_name = "UploadTest"
    integration_path = f"{git_path()}/demisto_sdk/tests/test_files/Packs/DummyPack/Integrations/{integration_dir_name}"
    uploader = Uploader(input=integration_path, insecure=False, verbose=False)
    mocker.patch.object(uploader, 'client')
    uploader.upload()
    _, integration_yml_name = get_yml_paths_in_dir(integration_path)
    integration_yml_name = integration_yml_name.split('/')[-1]

    assert [(integration_yml_name, FileType.INTEGRATION.value)] == uploader.successfully_uploaded_files
Ejemplo n.º 17
0
    def __init__(self,
                 project_dir: str,
                 no_test: bool = False,
                 no_pylint: bool = False,
                 no_flake8: bool = False,
                 no_mypy: bool = False,
                 root: bool = False,
                 keep_container: bool = False,
                 cpu_num: int = 0,
                 configuration: Configuration = Configuration(),
                 lock: threading.Lock = threading.Lock(),
                 no_bandit: bool = False,
                 no_pslint: bool = False,
                 requirements_3: str = '',
                 requirements_2: str = '',
                 no_vulture: bool = False):

        if no_test and no_pylint and no_flake8 and no_mypy and no_bandit and no_vulture:
            raise ValueError("Nothing to run as all --no-* options specified.")

        self.configuration = configuration
        dev_scripts_dir = os.path.join(self.configuration.sdk_env_dir,
                                       'common', 'dev_sh_scripts')
        self.run_dev_tasks_script_name = 'run_dev_tasks.sh'
        self.run_dev_tasks_script_pwsh_name = 'run_dev_tasks_pwsh.sh'
        self.run_mypy_script_name = 'run_mypy.sh'
        self.container_setup_script_name = 'pkg_dev_container_setup.sh'
        self.container_setup_script_pwsh_name = 'pkg_dev_container_setup_pwsh.sh'
        self.cert_file = os.path.join(dev_scripts_dir, 'panw-cert.crt')
        self.run_dev_tasks_script = os.path.join(
            dev_scripts_dir, self.run_dev_tasks_script_name)
        self.run_dev_tasks_script_pwsh = os.path.join(
            dev_scripts_dir, self.run_dev_tasks_script_pwsh_name)
        self.container_setup_script = os.path.join(
            dev_scripts_dir, self.container_setup_script_name)
        self.container_setup_script_pwsh = os.path.join(
            dev_scripts_dir, self.container_setup_script_pwsh_name)
        self.run_mypy_script = os.path.join(dev_scripts_dir,
                                            self.run_mypy_script_name)
        self.docker_login_completed = False
        self.project_dir = os.path.abspath(
            os.path.join(self.configuration.env_dir, project_dir))
        if self.project_dir[-1] != os.sep:
            self.project_dir = os.path.join(self.project_dir, '')

        self.root = root
        self.keep_container = keep_container
        self.cpu_num = cpu_num
        self.common_server_created = False
        self.run_args = {
            'pylint': not no_pylint,
            'flake8': not no_flake8,
            'mypy': not no_mypy,
            'bandit': not no_bandit,
            'tests': not no_test,
            'pslint': not no_pslint,
            'vulture': not no_vulture,
        }
        self.lock = lock
        self.requirements_3 = requirements_3
        self.requirements_2 = requirements_2
        # load yaml
        _, yml_path = get_yml_paths_in_dir(
            self.project_dir, Errors.no_yml_file(self.project_dir))
        if not yml_path:
            raise ValueError(f'yml path failed for: {self.project_dir}')
        print_v('Using yaml file: {}'.format(yml_path))
        with open(yml_path, 'r') as yml_file:
            yml_data = yaml.safe_load(yml_file)
        self.script_obj = yml_data
        if isinstance(self.script_obj.get('script'), dict):
            self.script_obj = self.script_obj.get('script')
        self.script_type = self.script_obj.get('type')
Ejemplo n.º 18
0
    def run_dev_packages(self) -> int:
        return_code = 0
        # load yaml
        _, yml_path = get_yml_paths_in_dir(
            self.project_dir, Errors.no_yml_file(self.project_dir))
        if not yml_path:
            return 1
        print_v('Using yaml file: {}'.format(yml_path))
        with open(yml_path, 'r') as yml_file:
            yml_data = yaml.safe_load(yml_file)
        script_obj = yml_data
        if isinstance(script_obj.get('script'), dict):
            script_obj = script_obj.get('script')
        script_type = script_obj.get('type')
        if script_type != 'python':
            if script_type == 'powershell':
                # TODO powershell linting
                return 0

            print(
                'Script is not of type "python". Found type: {}. Nothing to do.'
                .format(script_type))
            return 0

        dockers = get_all_docker_images(script_obj)
        py_num = get_python_version(dockers[0], self.log_verbose)
        self.lock.acquire()
        print_color(
            "============ Starting process for: {} ============\n".format(
                self.project_dir), LOG_COLORS.YELLOW)
        if self.lock.locked():
            self.lock.release()
        self._setup_dev_files(py_num)
        if self.run_args['flake8']:
            result_val = self.run_flake8(py_num)
            if result_val:
                return_code = result_val

        if self.run_args['mypy']:
            result_val = self.run_mypy(py_num)
            if result_val:
                return_code = result_val

        if self.run_args['bandit']:
            result_val = self.run_bandit(py_num)
            if result_val:
                return_code = result_val

        for docker in dockers:
            for try_num in (1, 2):
                print_v("Using docker image: {}".format(docker))
                py_num = get_python_version(docker, self.log_verbose)
                try:
                    if self.run_args['tests'] or self.run_args['pylint']:
                        if py_num == 2.7:
                            requirements = self.requirements_2
                        else:
                            requirements = self.requirements_3

                        docker_image_created = self._docker_image_create(
                            docker, requirements)
                        output, status_code = self._docker_run(
                            docker_image_created)

                        self.lock.acquire()
                        print_color(
                            "\n========== Running tests/pylint for: {} ========="
                            .format(self.project_dir), LOG_COLORS.YELLOW)
                        if status_code == 1:
                            raise subprocess.CalledProcessError(*output)

                        else:
                            print(output)
                            print_color(
                                "============ Finished process for: {}  "
                                "with docker: {} ============\n".format(
                                    self.project_dir, docker),
                                LOG_COLORS.GREEN)

                        if self.lock.locked():
                            self.lock.release()

                    break  # all is good no need to retry
                except subprocess.CalledProcessError as ex:
                    if ex.output:
                        print_color(
                            "=========================== ERROR IN {}==========================="
                            "\n{}\n".format(self.project_dir, ex.output),
                            LOG_COLORS.RED)
                    else:
                        print_color(
                            "========= Test Failed on {}, Look at the error/s above ========\n"
                            .format(self.project_dir), LOG_COLORS.RED)
                        return_code = 1

                    if not self.log_verbose:
                        sys.stderr.write(
                            "Need a more detailed log? try running with the -v options as so: \n{} -v\n\n"
                            .format(" ".join(sys.argv[:])))

                    if self.lock.locked():
                        self.lock.release()

                    # circle ci docker setup sometimes fails on
                    if try_num > 1 or not ex.output or 'read: connection reset by peer' not in ex.output:
                        return 2
                    else:
                        sys.stderr.write(
                            "Retrying as failure seems to be docker communication related...\n"
                        )

                finally:
                    sys.stdout.flush()
                    sys.stderr.flush()

        return return_code
Ejemplo n.º 19
0
    def copy_packs_content_to_packs_bundle(self, packs):
        """
        Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure
        except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file
        prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration
        yml filenames and 'script-' is prepended to script yml filenames and so on and so forth.
        """
        for pack in packs:
            pack_name = os.path.basename(pack)
            if pack_name in self.packs_to_skip:
                continue
            pack_dst = os.path.join(self.packs_bundle, pack_name)
            os.mkdir(pack_dst)
            pack_dirs = get_child_directories(pack)
            pack_files = get_child_files(pack)
            # copy first level pack files over
            for file_path in pack_files:
                shutil.copy(
                    file_path,
                    os.path.join(pack_dst, os.path.basename(file_path)))

            # handle content directories in the pack
            for content_dir in pack_dirs:
                dir_name = os.path.basename(content_dir)
                dest_dir = os.path.join(pack_dst, dir_name)
                os.mkdir(dest_dir)
                if dir_name in DIR_TO_PREFIX:
                    packages_dirs = get_child_directories(content_dir)

                    if packages_dirs:  # split yml files directories
                        for package_dir in packages_dirs:
                            ymls, _ = get_yml_paths_in_dir(package_dir,
                                                           error_msg='')
                            if not ymls or (len(ymls) == 1 and
                                            ymls[0].endswith('_unified.yml')):
                                msg = f'Skipping package: {package_dir} -'
                                if not ymls:
                                    print_warning(
                                        '{} No yml files found in the package directory'
                                        .format(msg))
                                else:
                                    print_warning(
                                        '{} Only unified yml found in the package directory'
                                        .format(msg))
                                continue
                            unifier = Unifier(package_dir, dir_name, dest_dir)

                            if parse_version(
                                    unifier.yml_data.get(
                                        'toversion',
                                        '99.99.99')) >= parse_version('6.0.0'):
                                new_file_paths = unifier.merge_script_package_to_yml(
                                    file_name_suffix=self.file_name_suffix)
                                for new_file_path in new_file_paths:
                                    self.add_from_version_to_yml(new_file_path)

                    non_split_yml_files = [
                        f for f in os.listdir(content_dir)
                        if os.path.isfile(os.path.join(content_dir, f)) and (
                            fnmatch.fnmatch(f, 'integration-*.yml')
                            or fnmatch.fnmatch(f, 'script-*.yml'))
                    ]

                    if non_split_yml_files:  # old format non split yml files
                        for yml_file in non_split_yml_files:
                            new_file_path = self.add_suffix_to_file_path(
                                os.path.join(dest_dir, yml_file))
                            shutil.copyfile(
                                os.path.join(content_dir, yml_file),
                                new_file_path)
                            self.add_from_version_to_yml(new_file_path)

                else:
                    self.copy_dir_files(content_dir,
                                        dest_dir,
                                        is_legacy_bundle=False)
Ejemplo n.º 20
0
    def is_context_different_in_yml(self) -> bool:
        """
        Checks if there has been a corresponding change to the integration's README
        when changing the context paths of an integration.
        This validation might run together with is_context_change_in_readme in Integration's validation.
        Returns:
            True if there has been a corresponding change to README file when context is changed in integration
        """
        valid = True

        # disregards scripts as the structure of the files is different:
        dir_path = os.path.dirname(self.file_path)
        if 'Scripts' in dir_path:
            return True

        # Get YML file, assuming only one yml in integration

        yml_file_paths = get_yml_paths_in_dir(dir_path)

        # Handles case of Pack's Readme, so no YML file is found in pack.
        if not yml_file_paths[0]:
            return True

        yml_file_path = yml_file_paths[
            1]  # yml_file_paths[1] should contain the first yml file found in dir

        # If get_yml_paths_in_dir does not return full path, dir_path should be added to path.
        if dir_path not in yml_file_path:
            yml_file_path = os.path.join(dir_path, yml_file_path)

        # Getting the relevant error_code:
        error, missing_from_readme_error_code = Errors.readme_missing_output_context(
            '', '')
        error, missing_from_yml_error_code = Errors.missing_output_context(
            '', '')

        # Only run validation if the validation has not run with is_context_change_in_readme on integration
        # so no duplicates errors will be created:
        errors, ignored_errors = self._get_error_lists()
        if f'{self.file_path} - [{missing_from_readme_error_code}]' in ignored_errors \
                or f'{self.file_path} - [{missing_from_readme_error_code}]' in errors \
                or f'{yml_file_path} - [{missing_from_yml_error_code}]' in ignored_errors \
                or f'{yml_file_path} - [{missing_from_yml_error_code}]' in errors:
            return False

        # get YML file's content:
        yml_as_dict = get_yaml(yml_file_path)

        difference_context_paths = compare_context_path_in_yml_and_readme(
            yml_as_dict, self.readme_content)

        # Add errors to error's list
        for command_name in difference_context_paths:
            if difference_context_paths[command_name].get('only in yml'):
                error, code = Errors.readme_missing_output_context(
                    command_name,
                    ", ".join(difference_context_paths[command_name].get(
                        'only in yml')))
                if self.handle_error(error, code, file_path=self.file_path):
                    valid = False

            if difference_context_paths[command_name].get('only in readme'):
                error, code = Errors.missing_output_context(
                    command_name,
                    ", ".join(difference_context_paths[command_name].get(
                        'only in readme')))
                if self.handle_error(error, code, file_path=yml_file_path):
                    valid = False

        return valid