Exemplo n.º 1
0
    def validate_all_files(self):
        """Validate all files in the repo are in the right format."""
        # go over packs
        for root, dirs, _ in os.walk(PACKS_DIR):
            for dir_in_dirs in dirs:
                for directory in PACKS_DIRECTORIES:
                    for inner_root, inner_dirs, files in os.walk(
                            os.path.join(root, dir_in_dirs, directory)):
                        for inner_dir in inner_dirs:
                            if inner_dir.startswith('.'):
                                continue

                            project_dir = os.path.join(inner_root, inner_dir)
                            _, file_path = get_yml_paths_in_dir(
                                os.path.normpath(project_dir),
                                Errors.no_yml_file(project_dir))
                            if file_path:
                                print("Validating {}".format(file_path))
                                structure_validator = StructureValidator(
                                    file_path)
                                if not structure_validator.is_valid_scheme():
                                    self._is_valid = False

        # go over regular content entities
        for directory in DIR_LIST_FOR_REGULAR_ENTETIES:
            print_color('Validating {} directory:'.format(directory),
                        LOG_COLORS.GREEN)
            for root, dirs, files in os.walk(directory):
                for file_name in files:
                    file_path = os.path.join(root, file_name)
                    # skipping hidden files
                    if not file_name.endswith('.yml'):
                        continue

                    print('Validating ' + file_name)
                    structure_validator = StructureValidator(file_path)
                    if not structure_validator.is_valid_scheme():
                        self._is_valid = False

        # go over regular PACKAGE_SUPPORTING_DIRECTORIES entities
        for directory in PACKAGE_SUPPORTING_DIRECTORIES:
            for root, dirs, files in os.walk(directory):
                for inner_dir in dirs:
                    if inner_dir.startswith('.'):
                        continue

                    project_dir = os.path.join(root, inner_dir)
                    _, file_path = get_yml_paths_in_dir(
                        project_dir, Errors.no_yml_file(project_dir))
                    if file_path:
                        print('Validating ' + file_path)
                        structure_validator = StructureValidator(file_path)
                        if not structure_validator.is_valid_scheme():
                            self._is_valid = False
Exemplo n.º 2
0
    def copy_packs_content_to_packs_bundle(self, packs):
        '''
        Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure
        except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file
        prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration
        yml filenames and 'script-' is prepended to script yml filenames and so on and so forth.
        '''
        for pack in packs:
            pack_name = os.path.basename(pack)
            if pack_name in self.packs_to_skip:
                continue
            pack_dst = os.path.join(self.packs_bundle, pack_name)
            os.mkdir(pack_dst)
            pack_dirs = get_child_directories(pack)
            pack_files = get_child_files(pack)
            # copy first level pack files over
            for file_path in pack_files:
                shutil.copy(
                    file_path,
                    os.path.join(pack_dst, os.path.basename(file_path)))
            # handle content directories in the pack
            for content_dir in pack_dirs:
                dir_name = os.path.basename(content_dir)
                dest_dir = os.path.join(pack_dst, dir_name)
                os.mkdir(dest_dir)
                if dir_name in DIR_TO_PREFIX:
                    packages_dirs = get_child_directories(content_dir)
                    for package_dir in packages_dirs:
                        ymls, _ = get_yml_paths_in_dir(package_dir,
                                                       error_msg='')
                        if not ymls or (len(ymls) == 1
                                        and ymls[0].endswith('_unified.yml')):
                            msg = 'Skipping package: {} -'.format(package_dir)
                            if not ymls:
                                print_warning(
                                    '{} No yml files found in the package directory'
                                    .format(msg))
                            else:
                                print_warning(
                                    '{} Only unified yml found in the package directory'
                                    .format(msg))
                            continue
                        package_dir_name = os.path.basename(package_dir)
                        unifier = Unifier(package_dir, dir_name, dest_dir)
                        unifier.merge_script_package_to_yml()

                        # also copy CHANGELOG markdown files over (should only be one per package)
                        package_files = get_child_files(package_dir)
                        changelog_files = [
                            file_path for file_path in package_files
                            if 'CHANGELOG.md' in file_path
                        ]
                        for md_file_path in changelog_files:
                            md_out_name = '{}-{}_CHANGELOG.md'.format(
                                DIR_TO_PREFIX.get(dir_name), package_dir_name)
                            shutil.copyfile(
                                md_file_path,
                                os.path.join(dest_dir, md_out_name))
                else:
                    self.copy_dir_files(content_dir, dest_dir)
Exemplo n.º 3
0
    def copy_dir_yml(self, dir_path, bundle):
        """
        Copy the yml files inside a directory to a bundle.

        :param dir_path: source directory
        :param bundle: destination bundle
        :return: None
        """
        scan_files, _ = get_yml_paths_in_dir(dir_path, error_msg='')
        content_files = 0
        dir_name = os.path.basename(dir_path)
        copy_func = self.copy_playbook_yml if dir_name in [
            'Playbooks', 'TestPlaybooks'
        ] else self.copy_content_yml
        for path in scan_files:
            if len(os.path.basename(path)) >= self.file_name_max_size:
                self.long_file_names.append(path)

            with open(path, 'r') as file_:
                yml_info = yaml.safe_load(file_)

            ver = yml_info.get('fromversion', '0')
            print(f' - processing: {ver} ({path})')
            copy_func(path, os.path.join(bundle, os.path.basename(path)),
                      yml_info)
            content_files += 1
        print(f' - total files: {content_files}')
Exemplo n.º 4
0
 def test_get_yml_paths_in_dir(self, dir_path):
     yml_paths, first_yml_path = tools.get_yml_paths_in_dir(dir_path, error_msg='')
     yml_paths_test = glob.glob(os.path.join(dir_path, '*yml'))
     assert sorted(yml_paths) == sorted(yml_paths_test)
     if yml_paths_test:
         assert first_yml_path == yml_paths_test[0]
     else:
         assert not first_yml_path
Exemplo n.º 5
0
    def merge_script_package_to_yml(self):
        """Merge the various components to create an output yml file
        """
        print("Merging package: {}".format(self.package_path))
        if self.package_path.endswith('/'):
            self.package_path = self.package_path.rstrip('/')
        package_dir_name = os.path.basename(self.package_path)
        output_filename = '{}-{}.yml'.format(DIR_TO_PREFIX[self.dir_name],
                                             package_dir_name)
        if self.dest_path:
            self.dest_path = os.path.join(self.dest_path, output_filename)
        else:
            self.dest_path = os.path.join(self.dir_name, output_filename)

        yml_paths, yml_path = get_yml_paths_in_dir(
            self.package_path, Errors.no_yml_file(self.package_path))
        for path in yml_paths:
            # The plugin creates a unified YML file for the package.
            # In case this script runs locally and there is a unified YML file in the package we need to ignore it.
            # Also,
            # we don't take the unified file by default because
            # there might be packages that were not created by the plugin.
            if 'unified' not in path:
                yml_path = path
                break

        with open(yml_path, 'r') as yml_file:
            yml_data = yaml.safe_load(yml_file)

        script_obj = yml_data

        if self.dir_name != SCRIPTS_DIR:
            script_obj = yml_data['script']
        script_type = TYPE_TO_EXTENSION[script_obj['type']]

        with io.open(yml_path, mode='r', encoding='utf-8') as yml_file:
            yml_text = yml_file.read()

        yml_text, script_path = self.insert_script_to_yml(
            script_type, yml_text, yml_data)
        image_path = None
        desc_path = None
        if self.dir_name in (INTEGRATIONS_DIR, BETA_INTEGRATIONS_DIR):
            yml_text, image_path = self.insert_image_to_yml(yml_data, yml_text)
            yml_text, desc_path = self.insert_description_to_yml(
                yml_data, yml_text)

        output_map = self.write_yaml_with_docker(yml_text, yml_data,
                                                 script_obj)
        unifier_outputs = list(
            output_map.keys()), yml_path, script_path, image_path, desc_path
        print_color("Created unified yml: {}".format(unifier_outputs[0][0]),
                    LOG_COLORS.GREEN)
        return unifier_outputs
Exemplo n.º 6
0
    def get_script_package_data(self):
        _, yml_path = get_yml_paths_in_dir(self.package_path, error_msg='')
        if not yml_path:
            raise Exception("No yml files found in package path: {}. "
                            "Is this really a package dir? If not remove it.".format(self.package_path))
        code_type = get_yaml(yml_path).get('type')
        unifier = Unifier(self.package_path)
        code_path = unifier.get_code_file(TYPE_TO_EXTENSION[code_type])
        with open(code_path, 'r') as code_file:
            code = code_file.read()

        return yml_path, code
Exemplo n.º 7
0
    def create_unifieds_and_copy(self,
                                 package_dir,
                                 dest_dir='',
                                 skip_dest_dir=''):
        '''
        For directories that have packages, aka subdirectories for each integration/script
        e.g. "Integrations", "Beta_Integrations", "Scripts". Creates a unified yml and writes
        it to the dest_dir

        Arguments:
            package_dir: (str)
                Path to directory in which there are package subdirectories. e.g. "Integrations",
                "Beta_Integrations", "Scripts"
            dest_dir: (str)
                Path to destination directory to which the unified yml for a package should be written
            skip_dest_dir: (str)
                Path to the directory to which the unified yml for a package should be written in the
                case the package is part of the skipped list
        '''
        dest_dir = dest_dir if dest_dir else self.content_bundle
        skip_dest_dir = skip_dest_dir if skip_dest_dir else self.test_bundle

        scanned_packages = glob.glob(os.path.join(package_dir, '*/'))
        package_dir_name = os.path.basename(package_dir)
        for package in scanned_packages:
            ymls, _ = get_yml_paths_in_dir(package, error_msg='')
            if not ymls or (len(ymls) == 1
                            and ymls[0].endswith('_unified.yml')):
                msg = 'Skipping package: {} -'.format(package)
                if not ymls:
                    print_warning(
                        '{} No yml files found in the package directory'.
                        format(msg))
                else:
                    print_warning(
                        '{} Only unified yml found in the package directory'.
                        format(msg))
                continue
            unification_tool = Unifier(package, package_dir_name, dest_dir)
            if any(package_to_skip in package
                   for package_to_skip in self.packages_to_skip):
                # there are some packages that we don't want to include in the content zip
                # for example HelloWorld integration
                unification_tool = Unifier(package, package_dir_name,
                                           skip_dest_dir)
                print('skipping {}'.format(package))
            unification_tool.merge_script_package_to_yml()
Exemplo n.º 8
0
    def run_dev_packages(self) -> int:
        return_code = 0
        # load yaml
        _, yml_path = get_yml_paths_in_dir(
            self.project_dir, Errors.no_yml_file(self.project_dir))
        if not yml_path:
            return 1
        print_v('Using yaml file: {}'.format(yml_path))
        with open(yml_path, 'r') as yml_file:
            yml_data = yaml.safe_load(yml_file)
        script_obj = yml_data
        if isinstance(script_obj.get('script'), dict):
            script_obj = script_obj.get('script')
        script_type = script_obj.get('type')
        if script_type != 'python':
            if script_type == 'powershell':
                # TODO powershell linting
                return 0

            print(
                'Script is not of type "python". Found type: {}. Nothing to do.'
                .format(script_type))
            return 0

        dockers = get_all_docker_images(script_obj)
        py_num = get_python_version(dockers[0], self.log_verbose)
        self.lock.acquire()
        print_color(
            "============ Starting process for: {} ============\n".format(
                self.project_dir), LOG_COLORS.YELLOW)
        if self.lock.locked():
            self.lock.release()
        self._setup_dev_files(py_num)
        if self.run_args['flake8']:
            result_val = self.run_flake8(py_num)
            if result_val:
                return_code = result_val

        if self.run_args['mypy']:
            result_val = self.run_mypy(py_num)
            if result_val:
                return_code = result_val

        if self.run_args['bandit']:
            result_val = self.run_bandit(py_num)
            if result_val:
                return_code = result_val

        for docker in dockers:
            for try_num in (1, 2):
                print_v("Using docker image: {}".format(docker))
                py_num = get_python_version(docker, self.log_verbose)
                try:
                    if self.run_args['tests'] or self.run_args['pylint']:
                        if py_num == 2.7:
                            requirements = self.requirements_2
                        else:
                            requirements = self.requirements_3

                        docker_image_created = self._docker_image_create(
                            docker, requirements)
                        output, status_code = self._docker_run(
                            docker_image_created)

                        self.lock.acquire()
                        print_color(
                            "\n========== Running tests/pylint for: {} ========="
                            .format(self.project_dir), LOG_COLORS.YELLOW)
                        if status_code == 1:
                            raise subprocess.CalledProcessError(*output)

                        else:
                            print(output)
                            print_color(
                                "============ Finished process for: {}  "
                                "with docker: {} ============\n".format(
                                    self.project_dir, docker),
                                LOG_COLORS.GREEN)

                        if self.lock.locked():
                            self.lock.release()

                    break  # all is good no need to retry
                except subprocess.CalledProcessError as ex:
                    if ex.output:
                        print_color(
                            "=========================== ERROR IN {}==========================="
                            "\n{}\n".format(self.project_dir, ex.output),
                            LOG_COLORS.RED)
                    else:
                        print_color(
                            "========= Test Failed on {}, Look at the error/s above ========\n"
                            .format(self.project_dir), LOG_COLORS.RED)
                        return_code = 1

                    if not self.log_verbose:
                        sys.stderr.write(
                            "Need a more detailed log? try running with the -v options as so: \n{} -v\n\n"
                            .format(" ".join(sys.argv[:])))

                    if self.lock.locked():
                        self.lock.release()

                    # circle ci docker setup sometimes fails on
                    if try_num > 1 or not ex.output or 'read: connection reset by peer' not in ex.output:
                        return 2
                    else:
                        sys.stderr.write(
                            "Retrying as failure seems to be docker communication related...\n"
                        )

                finally:
                    sys.stdout.flush()
                    sys.stderr.flush()

        return return_code