Ejemplo n.º 1
0
    def is_script_condition_branches_handled(self, task: Dict) -> bool:
        """Checks whether a script conditional task branches are handled properly

        Args:
            task (dict): task json loaded from a yaml

        Return:
            bool. if the task handles all condition branches correctly.
        """
        is_all_condition_branches_handled: bool = True
        next_tasks: Dict = task.get('nexttasks', {})
        if '#default#' not in next_tasks:
            print_error(
                f'Playbook conditional task with id:{task.get("id")} has unhandled condition: else'
            )
            self.is_valid = is_all_condition_branches_handled = False
        if len(next_tasks) < 2:
            # there should be at least 2 next tasks, we don't know what condition is missing, but we know it's missing
            print_error(
                f'Playbook conditional task with id:{task.get("id")} has unhandled condition'
            )
            self.is_valid = is_all_condition_branches_handled = False
        return is_all_condition_branches_handled
Ejemplo n.º 2
0
    def load_data_from_file(self):
        # type: () -> dict
        """Loads data according to function defined in FILE_SUFFIX_TO_LOAD_FUNCTION
        Returns:
             (dict)
        """
        file_extension = os.path.splitext(self.file_path)[1]
        if file_extension in ACCEPTED_FILE_EXTENSIONS:
            if file_extension in self.FILE_SUFFIX_TO_LOAD_FUNCTION:
                load_function = self.FILE_SUFFIX_TO_LOAD_FUNCTION[
                    file_extension]
                with open(self.file_path, 'r') as file_obj:
                    loaded_file_data = load_function(file_obj)  # type: ignore
                    return loaded_file_data

            # Ignore loading image and markdown
            elif file_extension in ['.png', '.md']:
                return {}

        print_error(
            Errors.wrong_file_extension(
                file_extension, self.FILE_SUFFIX_TO_LOAD_FUNCTION.keys()))
        return {}
Ejemplo n.º 3
0
    def get_master_version(self) -> str:
        """
            Gets the current version from origin/master or origin/main if available, otherwise return '0.0.0'.

            :rtype: ``str``
            :return
                The master version

        """
        master_current_version = '0.0.0'
        master_metadata = None
        try:
            master_metadata = get_remote_file(self.metadata_path,
                                              tag=self.main_branch)
        except Exception as e:
            print_error(
                f"master branch is unreachable.\n The reason is:{e} \n "
                f"The updated version will be taken from local metadata file instead of master"
            )
        if master_metadata:
            master_current_version = master_metadata.get(
                'currentVersion', '0.0.0')
        return master_current_version
Ejemplo n.º 4
0
    def update_fromversion_by_user(self):
        """If no fromversion is specified, asks the user for it's value and updates the playbook."""
        print('Updating fromversion tag')

        if not self.data.get('fromVersion', ''):
            print_color(
                'No fromversion is specified for this playbook, would you like me to update for you? [Y/n]',
                LOG_COLORS.RED)
            user_answer = input()
            if user_answer in ['n', 'N', 'no', 'No']:
                print_error('Moving forward without updating fromversion tag')
                return

            is_input_version_valid = False
            while not is_input_version_valid:
                print_color('Please specify the desired version X.X.X',
                            LOG_COLORS.YELLOW)
                user_desired_version = input()
                if re.match(r'\d+\.\d+\.\d+', user_desired_version):
                    self.data['fromVersion'] = user_desired_version
                    is_input_version_valid = True
                else:
                    print_error('Version format is not valid')
Ejemplo n.º 5
0
    def create_incident_with_playbook(self, incident_name, playbook_id):
        # type: (str, str) -> int
        """Create an incident in Demisto with the given incident_name and the given playbook_id

        Args:
            incident_name (str): The name of the incident
            playbook_id (str): The id of the playbook

        Raises:
            ApiException: if the client has failed to create an incident

        Returns:
            int. The new incident's ID.
        """

        create_incident_request = demisto_client.demisto_api.CreateIncidentRequest(
        )
        create_incident_request.create_investigation = True
        create_incident_request.playbook_id = playbook_id
        create_incident_request.name = incident_name

        try:
            response = self.demisto_client.create_incident(
                create_incident_request=create_incident_request)
        except ApiException as e:
            print_error(
                f'Failed to create incident with playbook id : "{playbook_id}", '
                'possible reasons are:\n'
                '1. This playbook name does not exist \n'
                '2. Schema problems in the playbook \n'
                '3. Unauthorized api key')
            raise e

        print_color(
            f'The playbook: {self.playbook_id} was triggered successfully.',
            LOG_COLORS.GREEN)
        return response.id
Ejemplo n.º 6
0
def download_and_extract_index(storage_bucket, extract_destination_path):
    """Downloads and extracts index zip from cloud storage.

    Args:
        storage_bucket (google.cloud.storage.bucket.Bucket): google storage bucket where index.zip is stored.
        extract_destination_path (str): the full path of extract folder.
    Returns:
        str: extracted index folder full path.
        Blob: google cloud storage object that represents index.zip blob.
        str: downloaded index generation.

    """
    index_storage_path = os.path.join(GCPConfig.STORAGE_BASE_PATH,
                                      f"{GCPConfig.INDEX_NAME}.zip")
    download_index_path = os.path.join(extract_destination_path,
                                       f"{GCPConfig.INDEX_NAME}.zip")

    index_blob = storage_bucket.blob(index_storage_path)
    index_folder_path = os.path.join(extract_destination_path,
                                     GCPConfig.INDEX_NAME)
    index_generation = 0  # Setting to 0 makes the operation succeed only if there are no live versions of the blob

    if not os.path.exists(extract_destination_path):
        os.mkdir(extract_destination_path)

    if not index_blob.exists():
        print_error("The blob does not exist.")
        os.mkdir(index_folder_path)
        return index_folder_path, index_blob, index_generation

    index_blob.reload()
    index_generation = index_blob.generation

    index_blob.download_to_filename(download_index_path,
                                    if_generation_match=index_generation)

    if os.path.exists(download_index_path):
        with ZipFile(download_index_path, 'r') as index_zip:
            index_zip.extractall(extract_destination_path)

        if not os.path.exists(index_folder_path):
            print_error(
                f"Failed creating {GCPConfig.INDEX_NAME} folder with extracted data."
            )
            sys.exit(1)

        os.remove(download_index_path)
        print(
            f"Finished downloading and extracting {GCPConfig.INDEX_NAME} file to {extract_destination_path}"
        )

        return index_folder_path, index_blob, index_generation
    else:
        print_error(
            f"Failed to download {GCPConfig.INDEX_NAME}.zip file from cloud storage."
        )
        sys.exit(1)
Ejemplo n.º 7
0
def parse_json(data, command_name, prefix, verbose=False, interactive=False):
    if data == '':
        raise ValueError('Invalid input JSON - got empty string')

    try:
        data = json.loads(data)
    except ValueError as ex:
        if verbose:
            print_error(str(ex))

        raise ValueError('Invalid input JSON')

    flattened_data = flatten_json(data)
    if prefix:
        flattened_data = {f'{prefix}.{key}': value for key, value in flattened_data.items()}

    arg_json = []
    for key, value in flattened_data.items():
        description = ''
        if interactive:
            print(f'Enter description for: [{key}]')
            description = input_multiline()

        arg_json.append(jsonise(key, value, description))

    if verbose:
        print(f'JSON before converting to YAML: {arg_json}')

    yaml_output = yaml.safe_dump(
        {
            'name': command_name.lstrip('!'),
            'arguments': [],
            'outputs': arg_json
        },
        default_flow_style=False
    )
    return yaml_output
Ejemplo n.º 8
0
    def run_flake8(self, py_num) -> int:
        """Runs flake8

        Args:
            py_num (int): The python version in use

        Returns:
            int. 0 if flake8 is successful, 1 otherwise.
        """
        lint_files = self._get_lint_files()
        python_exe = 'python2' if py_num < 3 else 'python3'
        print_v('Using: {} to run flake8'.format(python_exe))
        output = run_command(f'{python_exe} -m flake8 {self.project_dir}',
                             cwd=self.configuration.env_dir)
        with self.lock:
            print("\n========= Running flake8 on: {}===============".format(
                lint_files))
            if len(output) == 0:
                print_color("flake8 completed for: {}\n".format(lint_files),
                            LOG_COLORS.GREEN)
                return 0
            else:
                print_error(output)
                return 1
Ejemplo n.º 9
0
    def is_current_valid_from_version(self):
        # type: () -> bool
        """Check if the current file fromversion is valid.
        Returns:
            bool. Whether the current fromversion is valid or not.
        """
        is_valid = True

        # if not a new file, will be checked here
        # if has an old_file, will be checked in BC checks
        if not self.old_file:
            try:
                from_version = self.current_file.get("fromVersion", "0.0.0")
                if LooseVersion(from_version) < LooseVersion("5.0.0"):
                    print_error(
                        f'{self.file_path}: fromVersion must be at least 5.0.0'
                    )
                    is_valid = False
            except (AttributeError, ValueError):
                print_error(
                    f'{self.file_path}: "fromVersion" has an invalid value.')
                is_valid = False

        return is_valid
Ejemplo n.º 10
0
    def save_image_and_desc(self, directory: str) -> tuple:
        """
        Writes template image and description.
        Args:
            directory: The directory to save the file to.

        Returns:
            image_path: The path to the image file.
            desc_path: The path to the description file.
        """
        self.print_with_verbose('Creating image and description files...')
        image_path = os.path.join(directory, f'{self.base_name}_image.png')
        desc_path = os.path.join(directory, f'{self.base_name}_description.md')
        try:
            shutil.copy(
                os.path.join(os.path.dirname(__file__), 'resources',
                             'Generated_image.png'), image_path)
            shutil.copy(
                os.path.join(os.path.dirname(__file__), 'resources',
                             'Generated_description.md'), desc_path)
            return image_path, desc_path
        except Exception as err:
            print_error(f'Error copying image and description files - {err}')
            return '', ''
Ejemplo n.º 11
0
    def create_new_directory(self, ) -> bool:
        """Creates a new directory for the integration/script/pack.

        Returns:
            bool. True if directory was successfully created, False otherwise.
        """
        try:
            os.mkdir(self.full_output_path)

        except FileExistsError:
            to_delete = str(input(f"The directory {self.full_output_path} "
                                  f"already exists.\nDo you want to overwrite it? Y/N ")).lower()
            while to_delete != 'y' and to_delete != 'n':
                to_delete = str(input("Your response was invalid.\nDo you want to delete it? Y/N ").lower())

            if to_delete in ['y', 'yes']:
                shutil.rmtree(path=self.full_output_path, ignore_errors=True)
                os.mkdir(self.full_output_path)

            else:
                print_error(f"Pack not created in {self.full_output_path}")
                return False

        return True
Ejemplo n.º 12
0
    def is_changed_context_path(self):
        # type: () -> bool
        """Check if a context path as been changed.

        Returns:
            bool. Whether a context path as been changed.
        """
        current_command_to_context_paths = self._get_command_to_context_paths(
            self.current_file)
        old_command_to_context_paths = self._get_command_to_context_paths(
            self.old_file)

        for old_command, old_context_paths in old_command_to_context_paths.items(
        ):
            if old_command in current_command_to_context_paths.keys():
                if not self._is_sub_set(
                        current_command_to_context_paths[old_command],
                        old_context_paths):
                    print_error(
                        Errors.breaking_backwards_command(
                            self.file_path, old_command))
                    self.is_valid = False
                    return True
        return False
Ejemplo n.º 13
0
def get_pr_comments_url(pr_number: str) -> str:
    """
    Get the comments URL for a PR. If the PR contains a comment about an instance test (for contrib PRs),
    it will use that comment.
    Args:
        pr_number: The pull request number

    Returns:
        The comments URL for the PR.
    """
    pr_url = f'https://api.github.com/repos/demisto/content/pulls/{pr_number}'
    response = requests.get(pr_url)
    response.raise_for_status()
    pr = response.json()
    if not pr:
        print_error('Could not find the pull request to reply on.')
        sys.exit(1)
    page = 1
    comments_url = pr['comments_url']
    while True:
        response = requests.get(comments_url, params={'page': str(page)})
        response.raise_for_status()
        comments = response.json()
        if not comments:
            break

        link_comments = [
            comment for comment in comments
            if 'Instance is ready.' in comment.get('body', '')
        ]
        if link_comments:
            comments_url = link_comments[0]['url']
            break
        page += 1

    return comments_url
Ejemplo n.º 14
0
    def run_vulture(self, py_num) -> int:
        """Run vulture

        Args:
            py_num: The python version in use

        Returns:
            int. 0 on successful vulture run, 1 otherwise.
        """
        lint_files = self._get_lint_files()
        python_exe = 'python2' if py_num < 3 else 'python3'
        cmd_args = [
            python_exe, '-m', 'vulture', lint_files, '--min-confidence',
            os.environ.get('VULTURE_MIN_CONFIDENCE_LEVEL', '100')
        ]
        vulture_whitelist_path = os.path.join(self.project_dir,
                                              '.vulture_whitelist.py')
        if os.path.isfile(vulture_whitelist_path):
            cmd_args.insert(4, vulture_whitelist_path)
        output = run_command(' '.join(cmd_args), cwd=self.project_dir)
        self.lock.acquire()
        print("========= Running vulture on: {} ===============".format(
            lint_files))
        print_v('Using: {} to run vulture'.format(python_exe))
        if len(output) == 0:
            print_color("vulture completed for: {}\n".format(lint_files),
                        LOG_COLORS.GREEN)
            if self.lock.locked():
                self.lock.release()
            return 0

        else:
            print_error(output)
            if self.lock.locked():
                self.lock.release()
            return 1
Ejemplo n.º 15
0
    def is_duplicate_description(self):
        """Check if the integration has a non-duplicate description ."""
        is_description_in_yml = False
        is_description_in_package = False
        package_path = None
        md_file_path = None
        if not re.match(INTEGRATION_REGEX, self.file_path, re.IGNORECASE) \
                and not re.match(BETA_INTEGRATION_REGEX, self.file_path, re.IGNORECASE):
            package_path = os.path.dirname(self.file_path)
            try:
                md_file_path = glob.glob(
                    os.path.join(os.path.dirname(self.file_path),
                                 '*_description.md'))[0]
            except IndexError:
                print_warning(
                    "No detailed description file was found in the package {}."
                    " Consider adding one.".format(package_path))
            if md_file_path:
                is_description_in_package = True

        data_dictionary = get_yaml(self.file_path)

        if not data_dictionary:
            return is_description_in_package

        if data_dictionary.get('detaileddescription'):
            is_description_in_yml = True

        if is_description_in_package and is_description_in_yml:
            self._is_valid = False
            print_error(
                "A description was found both in the package and in the yml, "
                "please update the package {}.".format(package_path))
            return False

        return True
Ejemplo n.º 16
0
    def zip_pack(self):
        """ Zips pack folder.

        Returns:
            bool: whether the operation succeeded.
            str: full path to created pack zip.
        """
        zip_pack_path = f"{self._pack_path}.zip"
        task_status = False

        try:
            with ZipFile(zip_pack_path, 'w', ZIP_DEFLATED) as pack_zip:
                for root, dirs, files in os.walk(self._pack_path, topdown=True):
                    for f in files:
                        full_file_path = os.path.join(root, f)
                        relative_file_path = os.path.relpath(full_file_path, self._pack_path)
                        pack_zip.write(filename=full_file_path, arcname=relative_file_path)

            task_status = True
            print_color(f"Finished zipping {self._pack_name} pack.", LOG_COLORS.GREEN)
        except Exception as e:
            print_error(f"Failed in zipping {self._pack_name} folder.\n Additional info: {e}")
        finally:
            return task_status, zip_pack_path
Ejemplo n.º 17
0
    def _run_query(self, playground_id: str):
        """Runs a query on Demisto instance and prints the output.

        Args:
            playground_id: The investigation ID of the playground.

        Returns:
            list. A list of the log IDs if debug mode is on, otherwise an empty list.
        """
        update_entry = {
            'investigationId': playground_id,
            'data': self.query
        }

        answer = self.client.investigation_add_entries_sync(update_entry=update_entry)
        if not answer:
            raise DemistoRunTimeError('Command did not run, make sure it was written correctly.')

        log_ids = []

        for entry in answer:
            # answer should have entries with `contents` - the readable output of the command
            if entry.parent_content:
                print_color('### Command:', LOG_COLORS.YELLOW)
            if entry.contents:
                print_color('## Readable Output', LOG_COLORS.YELLOW)
                if entry.type == self.ERROR_ENTRY_TYPE:
                    print_error(entry.contents + '\n')
                else:
                    print(entry.contents + '\n')

            # and entries with `file_id`s defined, that is the fileID of the debug log file
            if entry.type == self.DEBUG_FILE_ENTRY_TYPE:
                log_ids.append(entry.id)

        return log_ids
Ejemplo n.º 18
0
    def integration_uploader(self, path: str):
        is_dir = False
        file_name = os.path.basename(path)
        docker45_path = ''

        try:
            if os.path.isdir(path):  # Create a temporary unified yml file
                try:
                    is_dir = True
                    unifier = Unifier(input=path, output=path)
                    unified_paths = unifier.merge_script_package_to_yml()
                    path = unified_paths[0]
                    docker45_path = unified_paths[1] if len(
                        unified_paths) > 1 else ''
                    file_name = os.path.basename(path)
                except IndexError:
                    print_error(
                        f'Error uploading integration from pack. /'
                        f'Check that the given integration path contains a valid integration: {path}.'
                    )

                    self.status_code = 1
                    self.failed_uploaded_files.append(
                        (file_name, 'Integration'))
                    return

                except Exception as err:
                    print_error(str('Upload integration failed\n'))
                    print_error(str(err))
                    self.failed_uploaded_files.append(
                        (file_name, 'Integration'))
                    self.status_code = 1
                    return

            # Upload the file to Cortex XSOAR
            result = self.client.integration_upload(file=path)

            # Print results
            print_v(f'Result:\n{result.to_str()}', self.log_verbose)
            print_color(
                f'Uploaded integration - \'{file_name}\': successfully',
                LOG_COLORS.GREEN)
            self.successfully_uploaded_files.append((file_name, 'Integration'))

        except Exception as err:
            self._parse_error_response(err, 'integration', file_name)
            self.failed_uploaded_files.append((file_name, 'Integration'))
            self.status_code = 1

        finally:
            # Remove the temporary file
            if is_dir:
                self._remove_temp_file(path)
                if docker45_path:
                    self._remove_temp_file(docker45_path)
Ejemplo n.º 19
0
    def __init__(
        self,
        input: str,
        output: Optional[str] = None,
        force: bool = False,
        marketplace: Optional[str] = None,
    ):
        directory_name = ''
        # Changing relative path to current abspath fixed problem with default output file name.
        input = os.path.abspath(input)
        if not os.path.isdir(input):
            print_error(UNSUPPORTED_INPUT_ERR_MSG)
            sys.exit(1)
        for optional_dir_name in DIR_TO_PREFIX:
            if optional_dir_name in input:
                directory_name = optional_dir_name

        if not directory_name:
            print_error(UNSUPPORTED_INPUT_ERR_MSG)

        self.package_path = input
        self.package_path = self.package_path.rstrip(os.sep)

        self.use_force = force
        self.dest_path = output
        self.dir_name = ''
        self.marketplace = marketplace
        if marketplace:
            MARKETPLACE_TAG_PARSER.marketplace = marketplace

        yml_paths, self.yml_path = get_yml_paths_in_dir(
            self.package_path, Errors.no_yml_file(self.package_path))
        for path in yml_paths:
            # The plugin creates a unified YML file for the package.
            # In case this script runs locally and there is a unified YML file in the package we need to ignore it.
            # Also,
            # we don't take the unified file by default because
            # there might be packages that were not created by the plugin.
            if 'unified' not in path and os.path.basename(
                    os.path.dirname(path)) not in [
                        SCRIPTS_DIR, INTEGRATIONS_DIR
                    ]:
                self.yml_path = path
                break

        self.yaml = YAML_Handler(
            width=50000
        )  # make sure long lines will not break (relevant for code section)

        if self.yml_path:
            with io.open(self.yml_path, 'r', encoding='utf8') as yml_file:
                self.yml_data = self.yaml.load(yml_file)
        else:
            self.yml_data = {}
            print_error(f'No yml found in path: {self.package_path}')
Ejemplo n.º 20
0
def validate_inputs_examples(input_path):
    if not input_path:
        print_error(
            'To use the generate_integration_context version of this command please include an `input` argument')
        return 1

    if input_path and not os.path.isfile(input_path):
        print_error(F'Input file {input_path} was not found.')
        return 1

    if not input_path.lower().endswith('.yml'):
        print_error(F'Input {input_path} is not a valid yml file.')
        return 1

    file_type = find_type(input_path, ignore_sub_categories=True)
    if file_type is not FileType.INTEGRATION:
        print_error('File is not an Integration.')
        return 1
Ejemplo n.º 21
0
    def run_playbook(self):
        # type: () -> int
        """Run a playbook in Demisto.

        Returns:
            int. 0 in success, 1 in a failure.
        """
        # create an incident with the given playbook
        try:
            incident_id = self.create_incident_with_playbook(
                incident_name=f'inc_{self.playbook_id}',
                playbook_id=self.playbook_id)
        except ApiException as a:
            print_error(str(a))
            return 1

        work_plan_link = self.base_link_to_workplan + str(incident_id)
        if self.should_wait:
            print(
                f'Waiting for the playbook to finish running.. \n'
                f'To see the playbook run in real-time please go to : {work_plan_link}',
                LOG_COLORS.GREEN)

            elasped_time = 0
            start_time = time.time()

            while elasped_time < self.timeout:
                playbook_results = self.get_playbook_results_dict(incident_id)
                if playbook_results["state"] == "inprogress":
                    time.sleep(10)
                    elasped_time = int(time.time() - start_time)
                else:  # the playbook has finished running
                    break

            # Ended the loop because of timeout
            if elasped_time >= self.timeout:
                print_error(
                    f'The command had timed out while the playbook is in progress.\n'
                    f'To keep tracking the playbook please go to : {work_plan_link}'
                )
            else:
                if playbook_results["state"] == "failed":
                    print_error(
                        "The playbook finished running with status: FAILED")
                else:
                    print_color(
                        "The playbook has completed its run successfully",
                        LOG_COLORS.GREEN)

        # The command does not wait for the playbook to finish running
        else:
            print(f'To see results please go to : {work_plan_link}')

        return 0
Ejemplo n.º 22
0
def upload_index_to_storage(index_folder_path, extract_destination_path, index_blob, build_number, private_packs,
                            current_commit_hash, index_generation):
    """Upload updated index zip to cloud storage.

    Args:
        index_folder_path (str): index folder full path.
        extract_destination_path (str): extract folder full path.
        index_blob (Blob): google cloud storage object that represents index.zip blob.
        build_number (str): circleCI build number, used as an index revision.
        private_packs (list): List of private packs and their price.
        current_commit_hash (str): last commit hash of head.
        index_generation (str): downloaded index generation.

    """
    with open(os.path.join(index_folder_path, f"{GCPConfig.INDEX_NAME}.json"), "w+") as index_file:
        index = {
            'revision': build_number,
            'modified': datetime.utcnow().strftime(Metadata.DATE_FORMAT),
            'packs': private_packs,
            'commit': current_commit_hash
        }
        json.dump(index, index_file, indent=4)

    index_zip_name = os.path.basename(index_folder_path)
    index_zip_path = shutil.make_archive(base_name=index_folder_path, format="zip",
                                         root_dir=extract_destination_path, base_dir=index_zip_name)
    try:
        index_blob.reload()
        current_index_generation = index_blob.generation
        index_blob.cache_control = "no-cache,max-age=0"  # disabling caching for index blob

        if current_index_generation == index_generation:
            index_blob.upload_from_filename(index_zip_path)
            print_color(f"Finished uploading {GCPConfig.INDEX_NAME}.zip to storage.", LOG_COLORS.GREEN)
        else:
            print_error(f"Failed in uploading {GCPConfig.INDEX_NAME}, mismatch in index file generation")
            print_error(f"Downloaded index generation: {index_generation}")
            print_error(f"Current index generation: {current_index_generation}")
            sys.exit(0)
    except Exception as e:
        print_error(f"Failed in uploading {GCPConfig.INDEX_NAME}, additional info: {e}\n")
        sys.exit(1)
    finally:
        shutil.rmtree(index_folder_path)
Ejemplo n.º 23
0
def print_packs_summary(packs_list):
    """Prints summary of packs uploaded to gcs.

    Args:
        packs_list (list): list of initialized packs.

    """
    successful_packs = [pack for pack in packs_list if pack.status == PackStatus.SUCCESS.name]
    skipped_packs = [pack for pack in packs_list if
                     pack.status == PackStatus.PACK_ALREADY_EXISTS.name
                     or pack.status == PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name
                     or pack.status == PackStatus.FAILED_DETECTING_MODIFIED_FILES.name]
    failed_packs = [pack for pack in packs_list if pack not in successful_packs and pack not in skipped_packs]

    print("\n")
    print("------------------------------------------ Packs Upload Summary ------------------------------------------")
    print(f"Total number of packs: {len(packs_list)}")
    print("----------------------------------------------------------------------------------------------------------")

    if successful_packs:
        print_color(f"Number of successful uploaded packs: {len(successful_packs)}", LOG_COLORS.GREEN)
        print_color("Uploaded packs:\n", LOG_COLORS.GREEN)
        successful_packs_table = _build_summary_table(successful_packs)
        print_color(successful_packs_table, LOG_COLORS.GREEN)
    if skipped_packs:
        print_warning(f"Number of skipped packs: {len(skipped_packs)}")
        print_warning("Skipped packs:\n")
        skipped_packs_table = _build_summary_table(skipped_packs)
        print_warning(skipped_packs_table)
    if failed_packs:
        print_error(f"Number of failed packs: {len(failed_packs)}")
        print_error("Failed packs:\n")
        failed_packs_table = _build_summary_table(failed_packs, include_pack_status=True)
        print_error(failed_packs_table)
        sys.exit(1)

    # for external pull requests -  when there is no failed packs, add the build summary to the pull request
    branch_name = os.environ['CIRCLE_BRANCH']
    if branch_name.startswith('pull/'):
        successful_packs_table = build_summary_table_md(successful_packs)

        build_num = os.environ['CIRCLE_BUILD_NUM']

        bucket_path = f'https://console.cloud.google.com/storage/browser/' \
                      f'marketplace-ci-build/content/builds/{branch_name}/{build_num}'

        pr_comment = f'Number of successful uploaded packs: {len(successful_packs)}\n' \
            f'Uploaded packs:\n{successful_packs_table}\n\n' \
            f'Browse to the build bucket with this address:\n{bucket_path}'

        add_pr_comment(pr_comment)
Ejemplo n.º 24
0
def print_packs_summary(packs_list):
    """Prints summary of packs uploaded to gcs.

    Args:
        packs_list (list): list of initialized packs.

    """
    successful_packs = [
        pack for pack in packs_list if pack.status == PackStatus.SUCCESS.name
    ]
    skipped_packs = [
        pack for pack in packs_list
        if pack.status == PackStatus.PACK_ALREADY_EXISTS.name
        or pack.status == PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name
        or pack.status == PackStatus.FAILED_DETECTING_MODIFIED_FILES.name
    ]
    failed_packs = [
        pack for pack in packs_list
        if pack not in successful_packs and pack not in skipped_packs
    ]

    print("\n")
    print(
        "------------------------------------------ Packs Upload Summary ------------------------------------------"
    )
    print(f"Total number of packs: {len(packs_list)}")
    print(
        "----------------------------------------------------------------------------------------------------------"
    )

    if successful_packs:
        print_color(
            f"Number of successful uploaded packs: {len(successful_packs)}",
            LOG_COLORS.GREEN)
        print_color("Uploaded packs:\n", LOG_COLORS.GREEN)
        successful_packs_table = _build_summary_table(successful_packs)
        print_color(successful_packs_table, LOG_COLORS.GREEN)
    if skipped_packs:
        print_warning(f"Number of skipped packs: {len(skipped_packs)}")
        print_warning("Skipped packs:\n")
        skipped_packs_table = _build_summary_table(skipped_packs)
        print_warning(skipped_packs_table)
    if failed_packs:
        print_error(f"Number of failed packs: {len(failed_packs)}")
        print_error("Failed packs:\n")
        failed_packs_table = _build_summary_table(failed_packs,
                                                  include_pack_status=True)
        print_error(failed_packs_table)
        sys.exit(1)
Ejemplo n.º 25
0
def update_pack_releasenotes(**kwargs):
    _pack = kwargs.get('pack')
    update_type = kwargs.get('update_type')
    pre_release = kwargs.get('pre_release')
    is_all = kwargs.get('all')
    modified, added, old, _packs = FilesValidator(use_git=True).get_modified_and_added_files()
    packs_existing_rn = set()
    for pf in added:
        if 'ReleaseNotes' in pf:
            pack_with_existing_rn = get_pack_name(pf)
            packs_existing_rn.add(pack_with_existing_rn)
    if len(packs_existing_rn):
        existing_rns = ''.join(f"{p}, " for p in packs_existing_rn)
        print_warning(f"Found existing release notes for the following packs: {existing_rns.rstrip(', ')}")
    if len(_packs) > 1:
        pack_list = ''.join(f"{p}, " for p in _packs)
        if not is_all:
            if _pack:
                pass
            else:
                print_error(f"Detected changes in the following packs: {pack_list.rstrip(', ')}\n"
                            f"To update release notes in a specific pack, please use the -p parameter "
                            f"along with the pack name.")
                sys.exit(0)
    if (len(modified) < 1) and (len(added) < 1):
        print_warning('No changes were detected.')
        sys.exit(0)
    if is_all and not _pack:
        packs = list(_packs - packs_existing_rn)
        packs_list = ''.join(f"{p}, " for p in packs)
        print_warning(f"Adding release notes to the following packs: {packs_list.rstrip(', ')}")
        for pack in packs:
            update_pack_rn = UpdateRN(pack=pack, update_type=update_type, pack_files=modified,
                                      pre_release=pre_release, added_files=added)
            update_pack_rn.execute_update()
    elif is_all and _pack:
        print_error("Please remove the --all flag when specifying only one pack.")
        sys.exit(0)
    else:
        if _pack:
            if _pack in packs_existing_rn and update_type is not None:
                print_error(f"New release notes file already found for {_pack}. "
                            f"Please update manually or run `demisto-sdk update-release-notes "
                            f"-p {_pack}` without specifying the update_type.")
            else:
                update_pack_rn = UpdateRN(pack=_pack, update_type=update_type, pack_files=modified,
                                          pre_release=pre_release, added_files=added)
                update_pack_rn.execute_update()
Ejemplo n.º 26
0
    def is_valid_beta_description(self):
        """Check if beta disclaimer exists in detailed description"""
        data_dictionary = get_yaml(self.file_path)
        description_in_yml = data_dictionary.get('detaileddescription',
                                                 '') if data_dictionary else ''

        if not re.match(BETA_INTEGRATION_REGEX, self.file_path, re.IGNORECASE):
            package_path = os.path.dirname(self.file_path)
            try:
                md_file_path = glob.glob(
                    os.path.join(os.path.dirname(self.file_path),
                                 '*_description.md'))[0]
            except IndexError:
                self._is_valid = False
                print_error(
                    "No detailed description file was found in the package {}. Please add one,"
                    " and make sure it includes the beta disclaimer note."
                    "It should contain the string in constant"
                    "\"BETA_INTEGRATION_DISCLAIMER\"".format(package_path))
                return False

            with open(md_file_path) as description_file:
                description = description_file.read()
            if BETA_INTEGRATION_DISCLAIMER not in description:
                self._is_valid = False
                print_error(
                    "Detailed description in beta integration package {} "
                    "dose not contain the beta disclaimer note. "
                    "It should contain the string in constant"
                    " \"BETA_INTEGRATION_DISCLAIMER\".".format(package_path))
                return False
            else:
                return True
        elif BETA_INTEGRATION_DISCLAIMER not in description_in_yml:
            self._is_valid = False
            print_error("Detailed description field in beta integration {} "
                        "dose not contain the beta disclaimer note."
                        "It should contain the string in constant"
                        " \"BETA_INTEGRATION_DISCLAIMER\".".format(
                            self.file_path))
            return False
        return True
Ejemplo n.º 27
0
    def load_image_from_yml(self):
        data_dictionary = get_yaml(self.file_path)

        if not data_dictionary:
            print_error("{} isn't an image file or unified integration file.".format(self.file_path))
            self._is_valid = False

        image = data_dictionary.get('image', '')

        if not image:
            print_error("{} is a yml file but has no image field.".format(self.file_path))
            self._is_valid = False

        image_data = image.split('base64,')
        if image_data and len(image_data) == 2:
            return image_data[1]

        else:
            print_error("{}'s image field isn't in base64 encoding.".format(self.file_path))
            self._is_valid = False
Ejemplo n.º 28
0
    def run(self):
        """Runs an integration command on Demisto and prints the result.
        """
        playground_id = self._get_playground_id()

        try:
            log_ids = self._run_query(playground_id)
        except DemistoRunTimeError as err:
            log_ids = None
            print_error(str(err))

        if self.debug:
            if not log_ids:
                print_warning('Entry with debug log not found')
            else:
                self._export_debug_log(log_ids)

        if self.json2outputs:
            if not self.prefix:
                print_error(
                    "A prefix for the outputs is needed for this command. Please provide one"
                )
                return 1
            else:
                raw_output_json = self._return_context_dict_from_log(log_ids)
                if raw_output_json:
                    with tempfile.NamedTemporaryFile(mode='w+') as f:
                        if isinstance(raw_output_json, dict):
                            f.write(json.dumps(raw_output_json))
                        if isinstance(raw_output_json, list):
                            f.write(json.dumps(raw_output_json[0]))
                        f.seek(0)
                        file_path = f.name
                        command = self.query.split(' ')[0]
                        json_to_outputs(command,
                                        json=file_path,
                                        prefix=self.prefix)
                else:
                    print_error(
                        "Could not extract raw output as JSON from command")
                    return 1
Ejemplo n.º 29
0
 def is_bump_required(self):
     """
     This function checks to see if the currentVersion in the pack metadata has been changed or
     not. Additionally, it will verify that there is no conflict with the currentVersion in the
     Master branch.
     """
     try:
         if self.only_docs_changed():
             return False
         new_metadata = self.get_pack_metadata()
         new_version = new_metadata.get('currentVersion', '99.99.99')
         master_metadata = run_command(
             f"git show origin/master:{str(PurePosixPath(PureWindowsPath(self.metadata_path)))}"
         )
         if len(master_metadata) > 0:
             master_metadata_json = json.loads(master_metadata)
             master_current_version = master_metadata_json.get(
                 'currentVersion', '0.0.0')
         else:
             print_error(
                 f"Unable to locate the metadata on the master branch.\n The reason is:{master_metadata}"
             )
             sys.exit(0)
         if LooseVersion(master_current_version) == LooseVersion(
                 new_version):
             return True
         elif LooseVersion(master_current_version) > LooseVersion(
                 new_version):
             print_error(
                 "The master branch is currently ahead of your pack's version. "
                 "Please pull from master and re-run the command.")
             sys.exit(0)
         elif LooseVersion(master_current_version) < LooseVersion(
                 new_version):
             return False
     except RuntimeError:
         print_error(
             f"Unable to locate a pack with the name {self.pack} in the git diff.\n"
             f"Please verify the pack exists and the pack name is correct.")
         sys.exit(0)
     return True
Ejemplo n.º 30
0
    def collect_content_items(self):
        """ Iterates over content items folders inside pack and collects content items data.

        Returns:
            dict: Parsed content items
            .
        """
        task_status = False
        content_items_result = {}

        try:
            # the format is defined in issue #19786, may change in the future
            content_item_name_mapping = {
                PackFolders.SCRIPTS.value: "automation",
                PackFolders.PLAYBOOKS.value: "playbook",
                PackFolders.INTEGRATIONS.value: "integration",
                PackFolders.INCIDENT_FIELDS.value: "incidentfield",
                PackFolders.INCIDENT_TYPES.value: "incidenttype",
                PackFolders.DASHBOARDS.value: "dashboard",
                PackFolders.INDICATOR_FIELDS.value: "indicatorfield",
                PackFolders.REPORTS.value: "reports",
                PackFolders.MISC.value: "reputation"
            }

            for root, pack_dirs, pack_files_names in os.walk(self._pack_path, topdown=False):
                pack_dirs[:] = [d for d in pack_dirs if d not in PackFolders.TEST_PLAYBOOKS.value]
                current_directory = root.split(os.path.sep)[-1]

                if current_directory not in PackFolders.pack_displayed_items():
                    continue

                folder_collected_items = []
                for pack_file_name in pack_files_names:
                    if not pack_file_name.endswith(('.json', '.yml')):
                        continue

                    pack_file_path = os.path.join(root, pack_file_name)

                    # reputation in old format aren't supported in 6.0.0 server version
                    if current_directory == PackFolders.MISC.value and not fnmatch.fnmatch(pack_file_name,
                                                                                           'reputation-*.json'):
                        os.remove(pack_file_path)
                        print(f"Deleted pack {pack_file_name} reputation file for {self._pack_name} pack")
                        continue

                    with open(pack_file_path, 'r') as pack_file:
                        if current_directory in PackFolders.yml_supported_folders():
                            content_item = yaml.safe_load(pack_file)
                        elif current_directory in PackFolders.json_supported_folders():
                            content_item = json.load(pack_file)

                    # check if content item has to version
                    to_version = content_item.get('toversion') or content_item.get('toVersion')

                    if to_version and LooseVersion(to_version) < LooseVersion(Metadata.SERVER_DEFAULT_MIN_VERSION):
                        os.remove(pack_file_path)
                        print(f"{self._pack_name} pack content item {pack_file_name} has to version: {to_version}. "
                              f"{pack_file_name} file was deleted.")
                        continue

                    print(f"Iterating over {pack_file_path} file and collecting items of {self._pack_name} pack")
                    # updated min server version from current content item
                    self._sever_min_version = get_higher_server_version(self._sever_min_version, content_item,
                                                                        self._pack_name)

                    if current_directory == PackFolders.SCRIPTS.value:
                        folder_collected_items.append({
                            'name': content_item.get('name', ""),
                            'description': content_item.get('comment', ""),
                            'tags': content_item.get('tags', [])
                        })
                    elif current_directory == PackFolders.PLAYBOOKS.value:
                        folder_collected_items.append({
                            'name': content_item.get('name', ""),
                            'description': content_item.get('description', "")
                        })
                    elif current_directory == PackFolders.INTEGRATIONS.value:
                        integration_commands = content_item.get('script', {}).get('commands', [])

                        folder_collected_items.append({
                            'name': content_item.get('display', ""),
                            'description': content_item.get('description', ""),
                            'category': content_item.get('category', ""),
                            'commands': [
                                {'name': c.get('name', ""), 'description': c.get('description', "")}
                                for c in integration_commands]
                        })
                    elif current_directory == PackFolders.INCIDENT_FIELDS.value:
                        folder_collected_items.append({
                            'name': content_item.get('name', ""),
                            'type': content_item.get('type', ""),
                            'description': content_item.get('description', "")
                        })
                    elif current_directory == PackFolders.INCIDENT_TYPES.value:
                        folder_collected_items.append({
                            'name': content_item.get('name', ""),
                            'playbook': content_item.get('playbookId', ""),
                            'closureScript': content_item.get('closureScript', ""),
                            'hours': int(content_item.get('hours', 0)),
                            'days': int(content_item.get('days', 0)),
                            'weeks': int(content_item.get('weeks', 0))
                        })
                    elif current_directory == PackFolders.DASHBOARDS.value:
                        folder_collected_items.append({
                            'name': content_item.get('name', "")
                        })
                    elif current_directory == PackFolders.INDICATOR_FIELDS.value:
                        folder_collected_items.append({
                            'name': content_item.get('name', ""),
                            'type': content_item.get('type', ""),
                            'description': content_item.get('description', "")
                        })
                    elif current_directory == PackFolders.REPORTS.value:  # todo finalize this part with server side
                        dash_board_section = content_item.get('dashboard', {})

                        folder_collected_items.append({
                            'name': content_item.get('name', ""),
                            'fromDate': dash_board_section.get('fromDate', ""),
                            'toDate': dash_board_section.get('toDate', ""),
                            'period': dash_board_section.get('period', {}),
                            'fromDateLicense': dash_board_section.get('fromDateLicense', "")
                        })
                    elif current_directory == PackFolders.MISC.value:
                        folder_collected_items.append({
                            'details': content_item.get('details', ""),
                            'reputationScriptName': content_item.get('reputationScriptName', ""),
                            'enhancementScriptNames': content_item.get('enhancementScriptNames', [])
                        })

                content_item_key = content_item_name_mapping[current_directory]
                content_items_result[content_item_key] = folder_collected_items

            print_color(f"Finished collecting content items for {self._pack_name} pack", LOG_COLORS.GREEN)
            task_status = True
        except Exception as e:
            print_error(f"Failed collecting content items in {self._pack_name} pack. Additional info:\n {e}")
        finally:
            return task_status, content_items_result