예제 #1
0
 def __init__(self,
              input: str,
              git: bool,
              all_packs: bool,
              quiet: bool,
              verbose: int,
              log_path: str,
              prev_ver: str,
              json_file_path: str = ''):
     # Set logging level and file handler if required
     global logger
     logger = logging_setup(verbose=verbose, quiet=quiet, log_path=log_path)
     # Verbosity level
     self._verbose = not quiet if quiet else verbose
     # Gather facts for manager
     self._facts: dict = self._gather_facts()
     self._prev_ver = prev_ver
     self._all_packs = all_packs
     # Set 'git' to true if no packs have been specified, 'lint' should operate as 'lint -g'
     lint_no_packs_command = not git and not all_packs and not input
     if lint_no_packs_command:
         git = True
     # Filter packages to lint and test check
     self._pkgs: List[Path] = self._get_packages(
         content_repo=self._facts["content_repo"],
         input=input,
         git=git,
         all_packs=all_packs,
         base_branch=self._prev_ver)
     if json_file_path:
         if os.path.isdir(json_file_path):
             json_file_path = os.path.join(json_file_path,
                                           'lint_outputs.json')
     self.json_file_path = json_file_path
     self.linters_error_list: list = []
예제 #2
0
def test_sign_pack_error_from_subprocess(repo, capsys, fake_process):
    """
    When:
        - Signing a pack.

    Given:
        - Pack object.
        - subprocess is failing due to an error.

    Then:
        - Verify that exceptions are written to the logger.

    """
    import demisto_sdk.commands.common.content.objects.pack_objects.pack as pack_class
    from demisto_sdk.commands.common.content.objects.pack_objects.pack import Pack

    pack_class.logger = logging_setup(3)

    pack = repo.create_pack('Pack1')
    content_object_pack = Pack(pack.path)
    signer_path = Path('./signer')

    fake_process.register_subprocess(
        f'{signer_path} {pack.path} keyfile base64', stderr=["error"])

    content_object_pack.sign_pack(pack_class.logger, content_object_pack.path,
                                  signer_path)

    captured = capsys.readouterr()
    assert 'Failed to sign pack for Pack1 -' in captured.err
def test_contains_indicator_type():
    """
    Given
    - A pack with old and new indicator type.

    When
    - Running zip-packs on it.

    Then
    - Ensure that the new indicator type is added to the zipped pack, and that the old one is not.
    """
    import demisto_sdk.commands.create_artifacts.content_artifacts_creator as cca
    from demisto_sdk.commands.zip_packs.packs_zipper import PacksZipper

    cca.logger = logging_setup(0)

    with temp_dir() as temp:
        packs_zipper = PacksZipper(pack_paths=str(TEST_DATA / PACKS_DIR /
                                                  'TestIndicatorTypes'),
                                   output=temp,
                                   content_version='6.0.0',
                                   zip_all=False)
        packs_zipper.zip_packs()
        assert packs_zipper.artifacts_manager.packs[
            'TestIndicatorTypes'].metadata.content_items != {}
        assert packs_zipper.artifacts_manager.packs[
            'TestIndicatorTypes'].metadata.content_items['reputation'] == [{
                "details":
                "Good Sample",
                "reputationScriptName":
                "",
                "enhancementScriptNames": []
            }]
    def create_content_artifacts(self) -> int:
        global logger
        logger = logging_setup(3)

        with ArtifactsDirsHandler(self), ProcessPoolHandler(self) as pool:
            futures: List[ProcessFuture] = []
            # content/Packs
            futures.extend(dump_packs(self, pool))
            # content/TestPlaybooks
            if not self.remove_test_playbooks:
                futures.append(pool.schedule(dump_tests_conditionally, args=(self,)))
            # content/content-descriptor.json
            futures.append(pool.schedule(dump_content_descriptor, args=(self,)))
            # content/Documentation/doc-*.json
            futures.append(pool.schedule(dump_content_documentations, args=(self,)))
            # Wait for all futures to be finished
            wait_futures_complete(futures, self)
            # Add suffix
            suffix_handler(self)

        if os.path.exists('keyfile'):
            os.remove('keyfile')
        logger.info(f"\nExecution time: {time.time() - self.execution_start} seconds")

        return self.exit_code
예제 #5
0
def run_lint(file_path: str, json_output_file: str) -> None:
    lint_log_dir = os.path.dirname(json_output_file)
    logging_setup(verbose=3, quiet=False, log_path=lint_log_dir)
    lint_manager = LintManager(input=file_path,
                               git=False,
                               all_packs=False,
                               quiet=False,
                               verbose=1,
                               prev_ver='',
                               json_file_path=json_output_file)
    lint_manager.run_dev_packages(parallel=1,
                                  no_flake8=False,
                                  no_xsoar_linter=False,
                                  no_bandit=False,
                                  no_mypy=False,
                                  no_vulture=False,
                                  keep_container=False,
                                  no_pylint=True,
                                  no_test=True,
                                  no_pwsh_analyze=True,
                                  no_pwsh_test=True,
                                  test_xml='',
                                  failure_report=lint_log_dir)
예제 #6
0
 def __init__(self, input: str, git: bool, all_packs: bool, quiet: bool,
              verbose: int, log_path: str):
     # Set logging level and file handler if required
     global logger
     logger = logging_setup(verbose=verbose, quiet=quiet, log_path=log_path)
     # Verbosity level
     self._verbose = not quiet if quiet else verbose
     # Gather facts for manager
     self._facts: dict = self._gather_facts()
     # Filter packages to lint and test check
     self._pkgs: List[Path] = self._get_packages(
         content_repo=self._facts["content_repo"],
         input=input,
         git=git,
         all_packs=all_packs)
예제 #7
0
def test_load_user_metadata_advanced(repo):
    """
    When:
        - Dumping a specific pack, processing the pack's metadata.

    Given:
        - Pack object.

    Then:
        - Verify that pack's metadata information was loaded successfully.

    """
    from demisto_sdk.commands.create_artifacts.content_artifacts_creator import \
        ArtifactsManager

    pack_1 = repo.setup_one_pack('Pack1')
    pack_1.pack_metadata.write_json({
        'name': 'Pack Number 1',
        'price': 10,
        'tags': ['tag1'],
        'useCases': ['usecase1'],
        'vendorId': 'vendorId',
        'vendorName': 'vendorName'
    })

    with ChangeCWD(repo.path):
        with temp_dir() as temp:
            artifact_manager = ArtifactsManager(artifacts_path=temp,
                                                content_version='6.0.0',
                                                zip=False,
                                                suffix='',
                                                cpus=1,
                                                packs=True)

    pack_1_metadata = artifact_manager.content.packs['Pack1'].metadata
    pack_1_metadata.load_user_metadata('Pack1', 'Pack Number 1', pack_1.path,
                                       logging_setup(3))

    assert pack_1_metadata.id == 'Pack1'
    assert pack_1_metadata.name == 'Pack Number 1'
    assert pack_1_metadata.price == 10
    assert pack_1_metadata.vendor_id == 'vendorId'
    assert pack_1_metadata.vendor_name == 'vendorName'
    assert pack_1_metadata.tags == ['tag1', 'Use Case']
def test_sign_packs_failure(repo, capsys, key, tool):
    """
    When:
        - Signing a pack.

    Given:
        - Pack object.
        - Signature key without the signing tool, or vice-versa.

    Then:
        - Verify that exceptions are written to the logger.

    """
    import demisto_sdk.commands.create_artifacts.content_artifacts_creator as cca
    from demisto_sdk.commands.create_artifacts.content_artifacts_creator import (
        ArtifactsManager, sign_packs)

    cca.logger = logging_setup(3)

    with ChangeCWD(repo.path):
        with temp_dir() as temp:
            artifact_manager = ArtifactsManager(artifacts_path=temp,
                                                content_version='6.0.0',
                                                zip=False,
                                                suffix='',
                                                cpus=1,
                                                packs=True,
                                                signature_key=key)

            if tool:
                with open('./tool', 'w') as tool_file:
                    tool_file.write('some tool')

                artifact_manager.signDirectory = Path(temp / 'tool')

    sign_packs(artifact_manager)

    captured = capsys.readouterr()
    assert 'Failed to sign packs. In order to do so, you need to provide both signature_key and ' \
           'sign_directory arguments.' in captured.err
def test_dump_pack(mock_git):
    import demisto_sdk.commands.create_artifacts.content_artifacts_creator as cca
    from demisto_sdk.commands.create_artifacts.content_artifacts_creator import (
        ArtifactsManager, Pack, create_dirs, dump_pack)

    cca.logger = logging_setup(0)

    with temp_dir() as temp:
        config = ArtifactsManager(artifacts_path=temp,
                                  content_version='6.0.0',
                                  zip=False,
                                  suffix='',
                                  cpus=1,
                                  packs=False)

        create_dirs(artifact_manager=config)
        dump_pack(artifact_manager=config,
                  pack=Pack(TEST_CONTENT_REPO / PACKS_DIR / 'Sample01'))

        assert same_folders(src1=temp / 'content_packs' / 'Sample01',
                            src2=ARTIFACTS_EXPECTED_RESULTS / 'content' /
                            'content_packs' / 'Sample01')
def create_content_artifacts(artifact_manager: ArtifactsManager) -> int:
    global logger
    logger = logging_setup(3)

    with ArtifactsDirsHandler(artifact_manager), ProcessPoolHandler(artifact_manager) as pool:
        futures: List[ProcessFuture] = []
        # content/Packs
        futures.extend(dump_packs(artifact_manager, pool))
        # content/TestPlaybooks
        futures.append(pool.schedule(dump_tests_conditionally, args=(artifact_manager,)))
        # content/content-descriptor.json
        futures.append(pool.schedule(dump_content_descriptor, args=(artifact_manager,)))
        # content/Documentation/doc-*.json
        futures.append(pool.schedule(dump_content_documentations, args=(artifact_manager,)))
        # Wait for all futures to be finished
        wait_futures_complete(futures, artifact_manager)
        # Add suffix
        suffix_handler(artifact_manager)

    logger.info(f"\nExecution time: {time.time() - artifact_manager.execution_start} seconds")

    return artifact_manager.exit_code
예제 #11
0
    def test_with_two_files(self, caplog, tmpdir, cov_file_names):
        logging_setup(3).propagate = True
        cov_files_paths = []
        for cov_file_name in cov_file_names:
            named_coverage_path = tmpdir.join(cov_file_name)
            copy_file(os.path.join(COVERAGE_FILES_DIR, cov_file_name),
                      named_coverage_path)
            cov_files_paths.append(named_coverage_path)
        dot_cov_file_path = tmpdir.join('.covergae')
        cov_obj = coverage.Coverage(data_file=dot_cov_file_path)
        cov_obj.combine(cov_files_paths)

        with caplog.at_level(logging.ERROR & logging.DEBUG,
                             logger='demisto-sdk'):
            fix_file_path(dot_cov_file_path, 'some_path')

        assert len(caplog.records) == 2
        assert caplog.records[
            0].msg == 'unexpected file list in coverage report'
        assert caplog.records[0].levelname == 'ERROR'
        assert caplog.records[
            1].msg == 'removing coverage report for some_path'
        assert caplog.records[1].levelname == 'DEBUG'
        assert not os.path.exists(dot_cov_file_path)
예제 #12
0
'''
from datetime import datetime, timedelta
from typing import Optional

import requests

from demisto_sdk.commands.common.handlers import JSON_Handler
from demisto_sdk.commands.common.logger import logging_setup

json = JSON_Handler()

ONE_DAY = timedelta(days=1)
LATEST_URL = 'https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json'
HISTORY_URL = 'https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/history/coverage-min/{date}.json'

logger = logging_setup(2)


def get_total_coverage(filename: Optional[str] = None,
                       date: Optional[datetime] = None) -> float:
    '''
        Args:
            filename:   The path to the coverage.json/coverage-min.json file.
            date:       A datetime object.
        Returns:
            A float representing the total coverage that was found.
                from file in case that filename was given.
                from history bucket in case that date was given.
                from latest bucket in any other case.
            Or
                0.0 if any errors were encountered.
예제 #13
0
def test_load_user_metadata_basic(repo):
    """
    When:
        - Dumping a specific pack, processing the pack's metadata.

    Given:
        - Pack object.

    Then:
        - Verify that pack's metadata information was loaded successfully.

    """
    from demisto_sdk.commands.create_artifacts.content_artifacts_creator import \
        ArtifactsManager

    pack_1 = repo.setup_one_pack('Pack1')
    pack_1.pack_metadata.write_json({
        'name':
        'Pack Number 1',
        'description':
        'A description for the pack',
        'created':
        '2020-06-08T15:37:54Z',
        'price':
        0,
        'support':
        'xsoar',
        'url':
        'some url',
        'email':
        'some email',
        'currentVersion':
        '1.1.1',
        'author':
        'Cortex XSOAR',
        'tags': ['tag1'],
        'dependencies': [{
            'dependency': {
                'dependency': '1'
            }
        }]
    })

    with ChangeCWD(repo.path):
        with temp_dir() as temp:
            artifact_manager = ArtifactsManager(artifacts_path=temp,
                                                content_version='6.0.0',
                                                zip=False,
                                                suffix='',
                                                cpus=1,
                                                packs=True)

    pack_1_metadata = artifact_manager.content.packs['Pack1'].metadata
    pack_1_metadata.load_user_metadata('Pack1', 'Pack Number 1', pack_1.path,
                                       logging_setup(3))

    assert pack_1_metadata.id == 'Pack1'
    assert pack_1_metadata.name == 'Pack Number 1'
    assert pack_1_metadata.description == 'A description for the pack'
    assert pack_1_metadata.created == datetime(2020, 6, 8, 15, 37, 54)
    assert pack_1_metadata.price == 0
    assert pack_1_metadata.support == 'xsoar'
    assert pack_1_metadata.url == 'some url'
    assert pack_1_metadata.email == 'some email'
    assert pack_1_metadata.certification == 'certified'
    assert pack_1_metadata.current_version == parse('1.1.1')
    assert pack_1_metadata.author == 'Cortex XSOAR'
    assert pack_1_metadata.tags == ['tag1']
    assert pack_1_metadata.dependencies == [{
        'dependency': {
            'dependency': '1'
        }
    }]
예제 #14
0
def test_verbose(verbose: int, logging_level: int):
    logger = logging_setup(verbose=verbose)
    assert logger.level == logging_level
예제 #15
0
 def setup(self):
     logging_setup(3).propagate = True