Esempio n. 1
0
    def insert_script_to_yml(self, script_type, yml_unified, yml_data):
        script_path = self.get_code_file(script_type)
        with io.open(script_path, mode='r', encoding='utf-8') as script_file:
            script_code = script_file.read()

        # Check if the script imports an API module. If it does,
        # the API module code will be pasted in place of the import.
        module_import, module_name = self.check_api_module_imports(script_code)
        if module_import:
            script_code = self.insert_module_code(script_code, module_import,
                                                  module_name)

        if script_type == '.py':
            clean_code = self.clean_python_code(script_code)
        if script_type == '.ps1':
            clean_code = self.clean_pwsh_code(script_code)

        if self.is_script_package:
            if yml_data.get('script', '') not in ('', '-'):
                print_warning(
                    f'Script section is not empty in package {self.package_path}.'
                    f'It should be blank or a dash(-).')

            yml_unified['script'] = FoldedScalarString(clean_code)

        else:
            if yml_data['script'].get('script', '') not in ('', '-'):
                print_warning(
                    f'Script section is not empty in package {self.package_path}.'
                    f'It should be blank or a dash(-).')

            yml_unified['script']['script'] = FoldedScalarString(clean_code)

        return yml_unified, script_path
Esempio n. 2
0
def has_duplicate(id_set, id_to_check, object_type=None):
    duplicates = [duplicate for duplicate in id_set if duplicate.get(id_to_check)]

    if len(duplicates) < 2:
        return False

    for dup1, dup2 in itertools.combinations(duplicates, 2):
        dict1 = list(dup1.values())[0]
        dict2 = list(dup2.values())[0]
        dict1_from_version = LooseVersion(dict1.get('fromversion', '0.0.0'))
        dict2_from_version = LooseVersion(dict2.get('fromversion', '0.0.0'))
        dict1_to_version = LooseVersion(dict1.get('toversion', '99.99.99'))
        dict2_to_version = LooseVersion(dict2.get('toversion', '99.99.99'))

        if dict1['name'] != dict2['name']:
            print_warning('The following {} have the same ID ({}) but different names: '
                          '"{}", "{}".'.format(object_type, id_to_check, dict1['name'], dict2['name']))

        # A: 3.0.0 - 3.6.0
        # B: 3.5.0 - 4.5.0
        # C: 3.5.2 - 3.5.4
        # D: 4.5.0 - 99.99.99
        if any([
                dict1_from_version <= dict2_from_version < dict1_to_version,  # will catch (B, C), (A, B), (A, C)
                dict1_from_version < dict2_to_version <= dict1_to_version,  # will catch (B, C), (A, C)
                dict2_from_version <= dict1_from_version < dict2_to_version,  # will catch (C, B), (B, A), (C, A)
                dict2_from_version < dict1_to_version <= dict2_to_version,  # will catch (C, B), (C, A)
        ]):
            return True

    return False
Esempio n. 3
0
def main():
    option = option_handler()
    storage_bucket_name = option.bucket_name
    zip_path = option.zip_path
    artifacts_path = option.artifacts_path
    service_account = option.service_account
    circle_build = option.circle_build
    branch_name = option.branch_name
    gcp_path = option.gcp_path
    remove_test_playbooks = option.remove_test_playbooks

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    if not circle_build or not branch_name:
        # Ignore build properties
        circle_build = ''
        branch_name = ''

    if not gcp_path:
        gcp_path = BUILD_GCP_PATH

    zipped_packs = []
    success = True
    try:
        zipped_packs = download_packs_from_gcp(storage_bucket, gcp_path,
                                               zip_path, circle_build,
                                               branch_name)
    except Exception as e:
        print_error(f'Failed downloading packs: {e}')
        success = False

    if remove_test_playbooks:
        try:
            remove_test_playbooks_if_exist(zip_path, zipped_packs)
        except Exception as e:
            print_error(f'Failed removing test playbooks from packs: {e}')
            success = False

    if zipped_packs and success:
        try:
            zip_packs(zipped_packs, zip_path)
        except Exception as e:
            print_error(f'Failed zipping packs: {e}')
            success = False

        if success:
            print_success('Successfully zipped packs.')
            if artifacts_path:
                # Save in the artifacts
                shutil.copy(os.path.join(zip_path, ARTIFACT_NAME),
                            os.path.join(artifacts_path, ARTIFACT_NAME))
        else:
            print_error('Failed zipping packs.')
            sys.exit(1)
    else:
        print_warning('Did not find any packs to zip.')

    cleanup(zip_path)
def get_command_examples(commands_examples_input, specific_commands):
    """
    get command examples from command file

    @param commands_examples_input: commands examples file or a comma separeted list of com
    @param specific_commands: commands specified by the user

    @return: a list of command examples
    """

    if not commands_examples_input:
        return []

    if os.path.isfile(commands_examples_input):
        with open(commands_examples_input, 'r') as examples_file:
            command_examples = examples_file.read().splitlines()
    else:
        print_warning(
            'failed to open commands file, using commands as comma seperated list'
        )
        command_examples = commands_examples_input.split(',')

    # Filter from the examples only the commands specified by the user
    if specific_commands:
        command_examples = [
            command_ex for command_ex in command_examples
            if command_ex.split(' ')[0].strip('!') in specific_commands
        ]

    command_examples = list(
        filter(None, map(command_example_filter, command_examples))) or []

    print('found the following commands:\n{}'.format(
        '\n'.join(command_examples)))
    return command_examples
def get_base_branch(pr_num):
    """Fetches the base branch name of PR num {pr_num}

    Args:
        pr_num (string): The string representation of the pr number

    Returns:
        string. The name of the base branch of the pr if succeeds, '' otherwise.
    """

    # Disable insecure warnings
    requests.packages.urllib3.disable_warnings()  # pylint: disable=no-member

    url = 'https://api.github.com/repos/demisto/content/pulls/{}'.format(
        pr_num)

    try:
        res = requests.get(url, verify=False)
        res.raise_for_status()
        pr = res.json()
        if pr and isinstance(pr, list) and len(pr) == 1:
            # github usually returns a list of PRs, if not pr is a dict
            pr = pr[0]
        return pr.get('base', {}).get('ref', '')

    except (requests.exceptions.HTTPError, ValueError) as e:
        # If we didn't succeed to fetch pr for any http error / res.json() we raise an error
        # then we don't want the build to fail
        print_warning('Unable to fetch pull request #{0}.\nError: {1}'.format(
            pr_num, str(e)))
        return ''
def get_new_entity_record(entity_path: str) -> Tuple[str, str]:
    data, _ = get_dict_from_file(entity_path)

    if 'layouts' in entity_path.lower():
        layout_kind = LAYOUT_TYPE_TO_NAME.get(data.get('kind', ''))
        type_id = data.get('typeId', '')
        return f'{type_id} - {layout_kind}', ''

    name = data.get('name', entity_path)
    if 'integrations' in entity_path.lower() and data.get('display'):
        name = data.get('display')

    if 'classifiers' in entity_path.lower():
        name = data.get('name')
        if not name:
            name = data.get('brandName')

    if name == entity_path:
        print_error(f'missing name for {entity_path}')

    # script entities has "comment" instead of "description"
    description = data.get('description', '') or data.get('comment', '')
    if not description:
        print_warning(f'missing description for {entity_path}')

    return name, description
Esempio n. 7
0
def check_docker_image_changed(main_branch: str,
                               packfile: str) -> Optional[str]:
    """ Checks whether the docker image was changed in master.

        :param
            main_branch: The git main branch
            packfile: The added or modified yml path

        :rtype: ``Optional[str]``
        :return
        The latest docker image
    """
    try:
        diff = run_command(f'git diff {main_branch} -- {packfile}',
                           exit_on_error=False)
    except RuntimeError as e:
        if any(['is outside repository' in exp for exp in e.args]):
            return None
        else:
            print_warning(
                f'skipping docker image check, Encountered the following error:\n{e.args[0]}'
            )
            return None
    else:
        diff_lines = diff.splitlines()
        for diff_line in diff_lines:
            if 'dockerimage:' in diff_line:  # search whether exists a line that notes that the Docker image was
                # changed.
                split_line = diff_line.split()
                if split_line[0].startswith('+'):
                    return split_line[-1]
        return None
Esempio n. 8
0
def create_file_release_notes(change_type, full_file_name):
    """
    Create release note for changed file.

    :param change_type: git change status (A, M, R*)
    :param full_file_name: path to file in repository
    :return: None
    """
    if isinstance(full_file_name, tuple):
        _, full_file_name = full_file_name

    is_pack = is_file_path_in_pack(full_file_name)
    if is_pack:
        file_type = full_file_name.split("/")[2]
    else:
        file_type = full_file_name.split("/")[0]
    base_name = os.path.basename(full_file_name)
    file_suffix = os.path.splitext(base_name)[-1]
    file_type_mapping = RELEASE_NOTE_GENERATOR.get(file_type)

    if file_type_mapping is None or file_suffix not in CONTENT_FILE_SUFFIXES:
        print_warning("Unsupported file type: {}".format(full_file_name))
        return

    if change_type != "R100":  # only file name has changed (no actual data was modified
        if 'R' in change_type:
            # handle the same as modified
            change_type = 'M'

        file_type_mapping.add(change_type, CONTENT_LIB_PATH + full_file_name)
Esempio n. 9
0
 def create_markdown(release_notes_path: str, rn_string: str):
     if os.path.exists(release_notes_path):
         print_warning(
             f"Release notes were found at {release_notes_path}. Skipping")
     else:
         with open(release_notes_path, 'w') as fp:
             fp.write(rn_string)
Esempio n. 10
0
 def execute_update(self):
     if self.pack in IGNORED_PACK_NAMES:
         print_warning(
             f"Release notes are not required for the {self.pack} pack since this pack"
             f" is not versioned.")
     else:
         try:
             new_version, new_metadata = self.bump_version_number(
                 self.pre_release)
         except ValueError as e:
             print_error(e)
             sys.exit(1)
         rn_path = self.return_release_notes_path(new_version)
         self.check_rn_dir(rn_path)
         changed_files = {}
         self.find_added_pack_files()
         for packfile in self.pack_files:
             file_name, file_type = self.identify_changed_file_type(
                 packfile)
             changed_files[file_name] = file_type
         rn_string = self.build_rn_template(changed_files)
         if len(rn_string) > 0:
             self.commit_to_bump(new_metadata)
             self.create_markdown(rn_path, rn_string, changed_files)
         else:
             print_warning(
                 "No changes which would belong in release notes were detected."
             )
Esempio n. 11
0
 def are_modules_installed_for_verify(self) -> bool:
     """ Check the following:
         1. npm packages installed - see packs var for specific pack details.
         2. node interperter exists.
     Returns:
         bool: True If all req ok else False
     """
     missing_module = []
     valid = True
     # Check node exist
     stdout, stderr, exit_code = run_command_os('node -v',
                                                cwd=self.content_path)
     if exit_code:
         print_warning(
             f'There is no node installed on the machine, Test Skipped, error - {stderr}, {stdout}'
         )
         valid = False
     else:
         # Check npm modules exsits
         packs = ['@mdx-js/mdx', 'fs-extra', 'commander']
         for pack in packs:
             stdout, stderr, exit_code = run_command_os(
                 f'npm ls {pack}', cwd=self.content_path)
             if exit_code:
                 missing_module.append(pack)
     if missing_module:
         valid = False
         print_warning(
             f"The npm modules: {missing_module} are not installed, Test Skipped, use "
             f"'npm install <module>' to install all required node dependencies"
         )
     return valid
Esempio n. 12
0
    def is_docker_image_latest_tag(self):
        if 'demisto/python:1.3-alpine' == f'{self.docker_image_name}:{self.docker_image_tag}':
            # the docker image is the default one
            self.is_latest_tag = False
            print_error(
                'The current docker image in the yml file is the default one: demisto/python:1.3-alpine,\n'
                'Please create or use another docker image\n')
            return self.is_latest_tag

        if not self.docker_image_name or not self.docker_image_latest_tag:
            # If the docker image isn't in the format we expect it to be or we failed fetching the tag
            # We don't want to print any error msgs to user because they have already been printed
            self.is_latest_tag = False
            return self.is_latest_tag

        if self.docker_image_latest_tag != self.docker_image_tag:
            # If docker image tag is not the most updated one that exists in docker-hub
            print_warning(
                'The docker image tag is not the latest, please update it.\n'
                'The docker image tag in the yml file is: {}\n'
                'The latest docker image tag in docker hub is: {}\n'
                'You can check for the most updated version of {} here: https://hub.docker.com/r/{}/tags\n'
                .format(self.docker_image_tag, self.docker_image_latest_tag,
                        self.docker_image_name, self.docker_image_name))

        # the most updated tag should be numeric and not labeled "latest"
        if self.docker_image_latest_tag == "latest":
            self.is_latest_tag = False
            print_error(
                '"latest" tag is not allowed,\n'
                'Please create or update to an updated versioned image\n'
                'You can check for the most updated version of {} here: https://hub.docker.com/r/{}/tags\n'
                .format(self.docker_image_tag, self.docker_image_name))

        return self.is_latest_tag
Esempio n. 13
0
def get_private_packs(private_index_path):
    """ Get the list of ID and price of the private packs.

    Args:
        private_index_path: The path for the index of the private packs.

    Returns:
        private_packs: A list of ID and price of the private packs.
    """
    try:
        metadata_files = glob.glob(f"{private_index_path}/**/metadata.json")
    except Exception as e:
        print_warning(f'Could not find metadata files in {private_index_path}: {str(e)}')
        return []

    if not metadata_files:
        print_warning(f'No metadata files found in [{private_index_path}]')

    private_packs = []
    for metadata_file_path in metadata_files:
        try:
            with open(metadata_file_path, "r") as metadata_file:
                metadata = json.load(metadata_file)
            if metadata:
                private_packs.append({
                    'id': metadata.get('id'),
                    'price': metadata.get('price')
                })
        except ValueError as e:
            print_error(f'Invalid JSON in the metadata file [{metadata_file_path}]: {str(e)}')

    return private_packs
Esempio n. 14
0
def test_instances(secret_conf_path, server, username, password):
    integrations = get_integrations(secret_conf_path)

    instance_ids = []
    failed_integrations = []
    integrations_counter = 0

    prints_manager = ParallelPrintsManager(1)

    content_installation_client = demisto_client.configure(base_url=server,
                                                           username=username,
                                                           password=password,
                                                           verify_ssl=False)
    install_new_content(content_installation_client, server)
    for integration in integrations:
        c = demisto_client.configure(base_url=server,
                                     username=username,
                                     password=password,
                                     verify_ssl=False)
        integrations_counter += 1
        integration_name = integration.get('name')
        integration_instance_name = integration.get('instance_name', '')
        integration_params = integration.get('params')
        devops_comments = integration.get('devops_comments')
        product_description = integration.get('product_description', '')
        is_byoi = integration.get('byoi', True)
        has_integration = integration.get('has_integration', True)
        validate_test = integration.get('validate_test', True)

        if has_integration:
            instance_id, failure_message, _ = __create_integration_instance(
                c,
                integration_name,
                integration_instance_name,
                integration_params,
                is_byoi,
                prints_manager,
                validate_test=validate_test)
            if failure_message == 'No configuration':
                print_warning(
                    "Warning: skipping {} as it exists in content-test-conf conf.json but not "
                    "in content repo".format(integration_name))
                continue
            if not instance_id:
                print_error(
                    'Failed to create instance of {} with message: {}'.format(
                        integration_name, failure_message))
                failed_integrations.append(
                    "{} {} - devops comments: {}".format(
                        integration_name, product_description,
                        devops_comments))
            else:
                instance_ids.append(instance_id)
                print('Create integration %s succeed' % (integration_name, ))
                __delete_integrations_instances(c, instance_ids,
                                                prints_manager)

            prints_manager.execute_thread_prints(0)

    return failed_integrations, integrations_counter
Esempio n. 15
0
def upload_core_packs_config(storage_bucket, packs_list):
    """Uploads corepacks.json file configuration to bucket. corepacks file includes core packs for server installation.

     Args:
        storage_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is uploaded.
        packs_list (list): list of initialized packs.

    """
    # todo later check if it is not pre release and only then upload corepacks.json
    core_packs_public_urls = [c.public_storage_path for c in packs_list if
                              c.name in GCPConfig.CORE_PACKS_LIST and c.public_storage_path]

    if not core_packs_public_urls:
        print(f"No core packs detected, skipping {GCPConfig.CORE_PACK_FILE_NAME} upload")
        return

    if len(core_packs_public_urls) != len(GCPConfig.CORE_PACKS_LIST):
        print_warning(f"Found core packs does not match configured core packs. "
                      f"Found {len(core_packs_public_urls)} and configured {len(GCPConfig.CORE_PACKS_LIST)}, "
                      f"skipping {GCPConfig.CORE_PACK_FILE_NAME} upload")

    # construct core pack data with public gcs urls
    core_packs_data = {
        'corePacks': core_packs_public_urls
    }

    core_packs_config_path = os.path.join(GCPConfig.STORAGE_BASE_PATH, GCPConfig.CORE_PACK_FILE_NAME)
    blob = storage_bucket.blob(core_packs_config_path)
    blob.upload_from_string(json.dumps(core_packs_data, indent=4))

    print_color(f"Finished uploading {GCPConfig.CORE_PACK_FILE_NAME} to storage.", LOG_COLORS.GREEN)
Esempio n. 16
0
    def update_content_version(self, content_ver: str = '', path: str = ''):
        regex = r'CONTENT_RELEASE_VERSION = .*'
        if not content_ver:
            try:
                with open('content-descriptor.json') as file_:
                    descriptor = json.load(file_)
                content_ver = descriptor['release']
            except (FileNotFoundError, json.JSONDecodeError, KeyError):
                print_error(
                    'Invalid descriptor file. make sure file content is a valid json with "release" key.'
                )
                return

        try:
            if self.no_update_commonserverpython:
                return

            if not path:
                path = get_common_server_path('.')
            with open(path, 'r+') as file_:
                content = file_.read()
                content = re.sub(regex,
                                 f"CONTENT_RELEASE_VERSION = '{content_ver}'",
                                 content, re.M)
                file_.seek(0)
                file_.write(content)
        except Exception as ex:
            print_warning(f'Could not open CommonServerPython File - {ex}')
Esempio n. 17
0
    def copy_docs_files(content_bundle_path, packs_bundle_path):
        for doc_file in ('./Documentation/doc-CommonServer.json',
                         './Documentation/doc-howto.json'):
            if os.path.exists(doc_file):
                if content_bundle_path:
                    print(f'copying {doc_file} doc to content bundle\n')
                    shutil.copyfile(
                        doc_file,
                        os.path.join(content_bundle_path,
                                     os.path.basename(doc_file)))

                # copy doc to packs bundle
                print(f'copying {doc_file} doc to content pack bundle\n')
                base_pack_doc_path = os.path.join(packs_bundle_path, BASE_PACK,
                                                  "Documentation")

                if not os.path.exists(base_pack_doc_path):
                    os.mkdir(base_pack_doc_path)
                shutil.copy(
                    doc_file,
                    os.path.join(base_pack_doc_path,
                                 os.path.basename(doc_file)))
            else:
                print_warning(f'{doc_file} was not found and '
                              'therefore was not added to the content bundle')
Esempio n. 18
0
    def handle_existing_rn_with_docker_image(new_rn: str, header_by_type: str,
                                             docker_image: str,
                                             content_name: str) -> str:
        """
        Receives the new RN to be written, performs operations to add the docker image to the given RN.
        Args:
            new_rn (str): new RN.
            header_by_type (str): Header of the RN to add docker image to, e.g 'Integrations', 'Scripts'
            docker_image (str): Docker image to add
            content_name (str): The content name to add the docker image entry to, e.g integration name, script name.

        Returns:
            (str): Updated RN
        """
        # Writing or re-writing docker image to release notes.
        rn_parts = new_rn.split(header_by_type)
        new_rn_part = f'- Updated the Docker image to: *{docker_image}*.'
        if len(rn_parts) > 1:
            # Splitting again by content name to append the docker image release note to corresponding
            # content entry only
            content_parts = rn_parts[1].split(f'{content_name}\n')
            new_rn = f'{rn_parts[0]}{header_by_type}{content_parts[0]}{content_name}\n{new_rn_part}\n' \
                     f'{content_parts[1]}'
        else:
            print_warning(
                f'Could not parse release notes {new_rn} by header type: {header_by_type}'
            )
        return new_rn
Esempio n. 19
0
    def get_remote_templates(self, files_list):
        """
        Downloading the object related template-files and saving them in the output path.
        Args:
            files_list: List of files to download.
        Returns:
            bool. True if the files were downloaded and saved successfully, False otherwise.
        """
        if self.is_integration:
            path = os.path.join('Packs', 'HelloWorld', 'Integrations',
                                'HelloWorld')
            os.mkdir(os.path.join(self.full_output_path, self.TEST_DATA_DIR))
        else:
            path = os.path.join('Packs', 'HelloWorld', 'Scripts',
                                'HelloWorldScript')

        for file in files_list:
            try:
                file_content = tools.get_remote_file(os.path.join(path, file),
                                                     return_content=True)
                with open(os.path.join(self.full_output_path, file),
                          'wb') as f:
                    f.write(file_content)
            except Exception:
                print_warning(
                    f"Could not fetch remote template - {file}. Using local templates instead."
                )
                return False

        return True
Esempio n. 20
0
    def is_duplicate_description(self):
        """Check if the integration has a non-duplicate description ."""
        is_description_in_yml = False
        is_description_in_package = False
        package_path = None
        md_file_path = None
        if not re.match(PACKS_INTEGRATION_YML_REGEX, self.file_path, re.IGNORECASE):
            package_path = os.path.dirname(self.file_path)
            try:
                md_file_path = glob.glob(os.path.join(os.path.dirname(self.file_path), '*_description.md'))[0]
            except IndexError:
                print_warning("No detailed description file was found in the package {}."
                              " Consider adding one.".format(package_path))
            if md_file_path:
                is_description_in_package = True

        data_dictionary = get_yaml(self.file_path)

        if not data_dictionary:
            return is_description_in_package

        if data_dictionary.get('detaileddescription'):
            is_description_in_yml = True

        if is_description_in_package and is_description_in_yml:
            error_message, error_code = Errors.description_in_package_and_yml()
            if self.handle_error(error_message, error_code, file_path=package_path):
                self._is_valid = False
                return False

        return True
Esempio n. 21
0
 def are_modules_installed_for_verify(content_path: str) -> bool:
     """ Check the following:
         1. npm packages installed - see packs var for specific pack details.
         2. node interperter exists.
     Returns:
         bool: True If all req ok else False
     """
     missing_module = []
     valid = True
     # Check node exist
     stdout, stderr, exit_code = run_command_os('node -v', cwd=content_path)
     if exit_code:
         print_warning(
             f'There is no node installed on the machine, Test Skipped, error - {stderr}, {stdout}'
         )
         valid = False
     else:
         # Check npm modules exsits
         stdout, stderr, exit_code = run_command_os(
             f'npm ls --json {" ".join(REQUIRED_MDX_PACKS)}',
             cwd=content_path)
         if exit_code:  # all are missinig
             missing_module.extend(REQUIRED_MDX_PACKS)
         else:
             deps = json.loads(stdout).get('dependencies', {})
             for pack in REQUIRED_MDX_PACKS:
                 if pack not in deps:
                     missing_module.append(pack)
     if missing_module:
         valid = False
         print_warning(
             f"The npm modules: {missing_module} are not installed, Readme mdx validation skipped. Use "
             f"'npm install' to install all required node dependencies")
     return valid
Esempio n. 22
0
    def _parse_stats_result(cls, stats_lines):
        """Parses the docker statics str and converts to Mib.

            Args:
                stats_lines (str): String that contains docker stats.
            Returns:
                list: List of dictionaries with parsed docker container statistics.

        """
        stats_result = []
        try:
            containers_stats = [json.loads(c) for c in stats_lines.splitlines()]

            for container_stat in containers_stats:
                memory_usage_stats = container_stat.get(cls.MEMORY_USAGE, '').split('/')[0].lower()

                if 'kib' in memory_usage_stats:
                    mib_usage = float(memory_usage_stats.replace('kib', '').strip()) / 1024
                elif 'gib' in memory_usage_stats:
                    mib_usage = float(memory_usage_stats.replace('kib', '').strip()) * 1024
                else:
                    mib_usage = float(memory_usage_stats.replace('mib', '').strip())

                stats_result.append({
                    'memory_usage': mib_usage,
                    'pids': int(container_stat.get(cls.PIDS_USAGE)),
                    'container_name': container_stat.get(cls.CONTAINER_NAME),
                    'container_id': container_stat.get(cls.CONTAINER_ID)
                })
        except Exception as e:
            print_warning("Failed in parsing docker stats result, returned empty list. Additional info: {}".format(e))
        finally:
            return stats_result
Esempio n. 23
0
def update_api_modules_dependents_rn(_pack,
                                     pre_release,
                                     update_type,
                                     added,
                                     modified,
                                     id_set_path=None):
    print_warning(
        "Changes introduced to APIModule, trying to update dependent integrations."
    )
    if not id_set_path:
        if not os.path.isfile('./Tests/id_set.json'):
            print_error(
                "Failed to update integrations dependent on the APIModule pack - no id_set.json is "
                "available. Please run `demisto-sdk create-id-set` to generate it, and rerun this command."
            )
            return
        id_set_path = './Tests/id_set.json'
    with open(id_set_path, 'r') as conf_file:
        id_set = json.load(conf_file)
    api_module_set = get_api_module_ids(added)
    api_module_set = api_module_set.union(get_api_module_ids(modified))
    integrations = get_api_module_integrations_set(
        api_module_set, id_set.get('integrations', []))
    for integration in integrations:
        integration_path = integration.get('file_path')
        integration_pack = integration.get('pack')
        update_pack_rn = UpdateRN(pack_path=integration_pack,
                                  update_type=update_type,
                                  modified_files_in_pack={integration_path},
                                  pre_release=pre_release,
                                  added_files=set(),
                                  pack=integration_pack)
        update_pack_rn.execute_update()
Esempio n. 24
0
def add_pr_comment(comment):
    """Add comment to the pull request.

    Args:
        comment (string): The comment text.

    """
    token = os.environ['CONTENT_GITHUB_TOKEN']
    branch_name = os.environ['CIRCLE_BRANCH']
    sha1 = os.environ['CIRCLE_SHA1']

    query = f'?q={sha1}+repo:demisto/content+is:pr+is:open+head:{branch_name}+is:open'
    url = 'https://api.github.com/search/issues'
    headers = {'Authorization': 'Bearer ' + token}
    try:
        res = requests.get(url + query, headers=headers, verify=False)
        res = handle_github_response(res)
        if res and res.get('total_count', 0) == 1:
            issue_url = res['items'][0].get('comments_url') if res.get(
                'items', []) else None
            if issue_url:
                res = requests.post(issue_url,
                                    json={'body': comment},
                                    headers=headers,
                                    verify=False)
                handle_github_response(res)
        else:
            print_warning(
                'Add pull request comment failed: There is more then one open pull request for branch {}.'
                .format(branch_name))
    except Exception as e:
        print_warning('Add pull request comment failed: {}'.format(e))
Esempio n. 25
0
    def copy_packs_content_to_packs_bundle(self, packs):
        """
        Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure
        except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file
        prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration
        yml filenames and 'script-' is prepended to script yml filenames and so on and so forth.
        """
        for pack in packs:
            pack_name = os.path.basename(pack)
            if pack_name in self.packs_to_skip:
                continue
            pack_dst = os.path.join(self.packs_bundle, pack_name)
            os.mkdir(pack_dst)
            pack_dirs = get_child_directories(pack)
            pack_files = get_child_files(pack)
            # copy first level pack files over
            for file_path in pack_files:
                shutil.copy(
                    file_path,
                    os.path.join(pack_dst, os.path.basename(file_path)))
            # handle content directories in the pack
            for content_dir in pack_dirs:
                dir_name = os.path.basename(content_dir)
                dest_dir = os.path.join(pack_dst, dir_name)
                os.mkdir(dest_dir)
                if dir_name in DIR_TO_PREFIX:
                    packages_dirs = get_child_directories(content_dir)
                    for package_dir in packages_dirs:
                        ymls, _ = get_yml_paths_in_dir(package_dir,
                                                       error_msg='')
                        if not ymls or (len(ymls) == 1
                                        and ymls[0].endswith('_unified.yml')):
                            msg = 'Skipping package: {} -'.format(package_dir)
                            if not ymls:
                                print_warning(
                                    '{} No yml files found in the package directory'
                                    .format(msg))
                            else:
                                print_warning(
                                    '{} Only unified yml found in the package directory'
                                    .format(msg))
                            continue
                        package_dir_name = os.path.basename(package_dir)
                        unifier = Unifier(package_dir, dir_name, dest_dir)
                        unifier.merge_script_package_to_yml()

                        # also copy CHANGELOG markdown files over (should only be one per package)
                        package_files = get_child_files(package_dir)
                        changelog_files = [
                            file_path for file_path in package_files
                            if 'CHANGELOG.md' in file_path
                        ]
                        for md_file_path in changelog_files:
                            md_out_name = '{}-{}_CHANGELOG.md'.format(
                                DIR_TO_PREFIX.get(dir_name), package_dir_name)
                            shutil.copyfile(
                                md_file_path,
                                os.path.join(dest_dir, md_out_name))
                else:
                    self.copy_dir_files(content_dir, dest_dir)
Esempio n. 26
0
def get_file_description(path, file_type) -> str:
    """ Gets the file description.

        :param
            path: The file path
            file_type: The file type

        :rtype: ``str``
        :return
        The file description if exists otherwise returns %%UPDATE_RN%%
    """
    if not os.path.isfile(path):
        print_warning(
            f'Cannot get file description: "{path}" file does not exist')
        return ''

    elif file_type in (FileType.PLAYBOOK, FileType.INTEGRATION):
        yml_file = get_yaml(path)
        return yml_file.get('description', '')

    elif file_type == FileType.SCRIPT:
        yml_file = get_yaml(path)
        return yml_file.get('comment', '')

    elif file_type in (FileType.CLASSIFIER, FileType.REPORT, FileType.WIDGET,
                       FileType.DASHBOARD, FileType.JOB):
        json_file = get_json(path)
        return json_file.get('description', '')

    return '%%UPDATE_RN%%'
Esempio n. 27
0
def print_packs_summary(packs_list):
    """Prints summary of packs uploaded to gcs.

    Args:
        packs_list (list): list of initialized packs.

    """
    successful_packs = [pack for pack in packs_list if pack.status == PackStatus.SUCCESS.name]
    skipped_packs = [pack for pack in packs_list if pack.status == PackStatus.PACK_ALREADY_EXISTS.name]
    failed_packs = [pack for pack in packs_list if pack not in successful_packs and pack not in skipped_packs]

    print("\n")
    print("--------------------------------------- Packs Upload Summary ---------------------------------------")
    print(f"Total number of packs: {len(packs_list)}")

    if successful_packs:
        print_color(f"Number of successful uploaded packs: {len(successful_packs)}", LOG_COLORS.GREEN)
        successful_packs_table = _build_summary_table(successful_packs)
        print_color(successful_packs_table, LOG_COLORS.GREEN)
    if skipped_packs:
        print_warning(f"Number of skipped packs: {len(skipped_packs)}")
        skipped_packs_table = _build_summary_table(skipped_packs)
        print_warning(skipped_packs_table)
    if failed_packs:
        print_error(f"Number of failed packs: {len(failed_packs)}")
        failed_packs_table = _build_summary_table(failed_packs)
        print_error(failed_packs_table)
        sys.exit(1)
    def is_aliased_fields_are_valid(self) -> bool:
        """
        Validates that the aliased fields (fields that appear as Aliases in another field) are valid.
        invalid aliased fields are
        1. fields that are in another field's Aliases list, and present in the same marketplace of that field.
        2. fields that are in another field's Aliases list, and also contain Aliases (nested aliasing)

        Returns:
            (bool): True if aliased fields are valid.
        """

        if not self.id_set_file:
            print_warning('Validate will skip since an id set file was not provided')
            return True

        aliases = self.current_file.get('Aliases', [])
        if not aliases:
            return True

        is_valid = True
        validators_and_error_generators = [
            (self.is_alias_has_invalid_marketplaces, Errors.invalid_marketplaces_in_alias),
            (self.is_alias_has_inner_alias, Errors.aliases_with_inner_alias),
        ]
        for validator, error_generator in validators_and_error_generators:
            invalid_aliases = [alias.get("cliname") for alias in self._get_incident_fields_by_aliases(aliases) if validator(alias)]
            if invalid_aliases:
                error_message, error_code = error_generator(invalid_aliases)
                if self.handle_error(error_message, error_code, file_path=self.file_path, warning=self.structure_validator.quiet_bc):
                    is_valid = False

        return is_valid
Esempio n. 29
0
def main():
    """ Main function for iterating over existing packs folder in content repo and creating json of all
    packs dependencies. The logic of pack dependency is identical to sdk find-dependencies command.

    """
    option = option_handler()
    output_path = option.output_path
    id_set_path = option.id_set_path
    IGNORED_FILES.append(
        GCPConfig.BASE_PACK)  # skip dependency calculation of Base pack
    # loading id set json
    with open(id_set_path, 'r') as id_set_file:
        id_set = json.load(id_set_file)

    pack_dependencies_result = {}

    print("Starting dependencies calculation")
    # starting iteration over pack folders
    for pack in os.scandir(PACKS_FULL_PATH):
        if not pack.is_dir() or pack.name in IGNORED_FILES:
            print_warning(
                f"Skipping dependency calculation of {pack.name} pack.")
            continue  # skipping ignored packs
        print(f"Calculating {pack.name} pack dependencies.")

        try:
            dependency_graph = PackDependencies.build_dependency_graph(
                pack_id=pack.name,
                id_set=id_set,
                verbose_file=VerboseFile(''),
            )
            first_level_dependencies, all_level_dependencies = parse_for_pack_metadata(
                dependency_graph, pack.name)

        except Exception as e:
            print_error(
                f"Failed calculating {pack.name} pack dependencies. Additional info:\n{e}"
            )
            continue

        pack_dependencies_result[pack.name] = {
            "dependencies": first_level_dependencies,
            "displayedImages": list(first_level_dependencies.keys()),
            "allLevelDependencies": all_level_dependencies,
            "path": os.path.join(PACKS_FOLDER, pack.name),
            "fullPath": pack.path
        }

    print(
        f"Number of created pack dependencies entries: {len(pack_dependencies_result.keys())}"
    )
    # finished iteration over pack folders
    print_color("Finished dependencies calculation", LOG_COLORS.GREEN)

    with open(output_path, 'w') as pack_dependencies_file:
        json.dump(pack_dependencies_result, pack_dependencies_file, indent=4)

    print_color(f"Created packs dependencies file at: {output_path}",
                LOG_COLORS.GREEN)
Esempio n. 30
0
    def create_pack_rn(self, rn_path: str, changed_files: dict,
                       new_metadata: dict, new_version: str) -> bool:
        """ Checks whether the pack requires a new rn and if so, creates it.

            :param
                rn_path (str): The rn path
                changed_files (dict): The changed files details
                new_metadata (dict): The new pack metadata
                new_version (str): The new version str representation, e.g 1.0.2, 1.11.2 etc.


            :rtype: ``bool``
            :return
                Whether the RN was updated successfully or not
        """
        rn_string = self.handle_existing_rn_version_path(rn_path)
        if not rn_string:
            rn_string = self.build_rn_template(changed_files)
        if len(rn_string) > 0 or self.is_force:
            if self.is_bump_required():
                self.write_metadata_to_file(new_metadata)
            self.create_markdown(rn_path, rn_string, changed_files)
            self.build_rn_config_file(new_version)
            try:
                run_command(f'git add {rn_path}', exit_on_error=False)
            except RuntimeError:
                print_warning(
                    f'Could not add the release note files to git: {rn_path}')
            if self.is_bc and self.bc_path:
                try:
                    run_command(f'git add {self.bc_path}', exit_on_error=False)
                except RuntimeError:
                    print_warning(
                        f'Could not add the release note config file to git: {rn_path}'
                    )
            if self.existing_rn_changed:
                print_color(
                    f"Finished updating release notes for {self.pack}.",
                    LOG_COLORS.GREEN)
                if not self.text:
                    print_color(
                        f"\nNext Steps:\n - Please review the "
                        f"created release notes found at {rn_path} and document any changes you "
                        f"made by replacing '%%UPDATE_RN%%'.\n - Commit "
                        f"the new release notes to your branch.\nFor information regarding proper"
                        f" format of the release notes, please refer to "
                        f"https://xsoar.pan.dev/docs/integrations/changelog",
                        LOG_COLORS.GREEN)
                return True
            else:
                print_color(
                    f"No changes to {self.pack} pack files were detected from the previous time "
                    "this command was run. The release notes have not been "
                    "changed.", LOG_COLORS.GREEN)
        else:
            print_color(
                "No changes which would belong in release notes were detected.",
                LOG_COLORS.YELLOW)
        return False