Exemplo n.º 1
0
def main():
    install_logging('Prepare_Content_Packs_For_Testing.log')
    option = option_handler()
    packs_artifacts_path = option.artifacts_path
    extract_destination_path = option.extract_path
    storage_bucket_name = option.bucket_name
    service_account = option.service_account
    target_packs = option.pack_names if option.pack_names else ""
    build_number = option.ci_build_number if option.ci_build_number else str(
        uuid.uuid4())
    override_all_packs = option.override_all_packs
    signature_key = option.key_string
    id_set_path = option.id_set_path
    packs_dependencies_mapping = load_json(
        option.pack_dependencies) if option.pack_dependencies else {}
    storage_base_path = option.storage_base_path
    remove_test_playbooks = option.remove_test_playbooks
    is_bucket_upload_flow = option.bucket_upload
    private_bucket_name = option.private_bucket_name
    circle_branch = option.circle_branch
    force_upload = option.force_upload

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    # content repo client initialized
    content_repo = get_content_git_client(CONTENT_ROOT_PATH)
    current_commit_hash, previous_commit_hash = get_recent_commits_data(
        content_repo, index_folder_path, is_bucket_upload_flow, circle_branch)

    # detect packs to upload
    pack_names = get_packs_names(target_packs, previous_commit_hash)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    if not option.override_all_packs:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash, previous_commit_hash,
                                  storage_bucket)

    # google cloud bigquery client initialized
    bq_client = init_bigquery_client(service_account)
    packs_statistic_df = get_packs_statistics_dataframe(bq_client)
    if private_bucket_name:  # Add private packs to the index
        private_storage_bucket = storage_client.bucket(private_bucket_name)
        private_packs, _, _ = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path, pack_names)
    else:  # skipping private packs
        logging.debug("Skipping index update of priced packs")
        private_packs = []

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs, storage_bucket)

    # starting iteration over packs
    for pack in packs_list:
        task_status, user_metadata = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value
            pack.cleanup()
            continue

        task_status, pack_content_items = pack.collect_content_items()
        if not task_status:
            pack.status = PackStatus.FAILED_COLLECT_ITEMS.name
            pack.cleanup()
            continue

        task_status, integration_images = pack.upload_integration_images(
            storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status, author_image = pack.upload_author_image(storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.format_metadata(
            user_metadata=user_metadata,
            pack_content_items=pack_content_items,
            integration_images=integration_images,
            author_image=author_image,
            index_folder_path=index_folder_path,
            packs_dependencies_mapping=packs_dependencies_mapping,
            build_number=build_number,
            commit_hash=current_commit_hash,
            packs_statistic_df=packs_statistic_df)
        if not task_status:
            pack.status = PackStatus.FAILED_METADATA_PARSING.name
            pack.cleanup()
            continue

        task_status, not_updated_build = pack.prepare_release_notes(
            index_folder_path, build_number)
        if not task_status:
            pack.status = PackStatus.FAILED_RELEASE_NOTES.name
            pack.cleanup()
            continue

        if not_updated_build:
            pack.status = PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name
            pack.cleanup()
            continue

        task_status = pack.remove_unwanted_files(remove_test_playbooks)
        if not task_status:
            pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS
            pack.cleanup()
            continue

        task_status = pack.sign_pack(signature_key)
        if not task_status:
            pack.status = PackStatus.FAILED_SIGNING_PACKS.name
            pack.cleanup()
            continue

        task_status, zip_pack_path = pack.zip_pack()
        if not task_status:
            pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name
            pack.cleanup()
            continue

        task_status, pack_was_modified = pack.detect_modified(
            content_repo, index_folder_path, current_commit_hash,
            previous_commit_hash)
        if not task_status:
            pack.status = PackStatus.FAILED_DETECTING_MODIFIED_FILES.name
            pack.cleanup()
            continue

        (task_status, skipped_pack_uploading, full_pack_path) = \
            pack.upload_to_storage(zip_pack_path, pack.latest_version,
                                   storage_bucket, override_all_packs
                                   or pack_was_modified)

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        task_status, exists_in_index = pack.check_if_exists_in_index(
            index_folder_path)
        if not task_status:
            pack.status = PackStatus.FAILED_SEARCHING_PACK_IN_INDEX.name
            pack.cleanup()
            continue

        # in case that pack already exist at cloud storage path and in index, skipped further steps
        if skipped_pack_uploading and exists_in_index:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        task_status = pack.prepare_for_index_upload()
        if not task_status:
            pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        task_status = update_index_folder(index_folder_path=index_folder_path,
                                          pack_name=pack.name,
                                          pack_path=pack.path,
                                          pack_version=pack.latest_version,
                                          hidden_pack=pack.hidden)
        if not task_status:
            pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(storage_bucket, build_number, index_folder_path)

    # finished iteration over content packs
    upload_index_to_storage(index_folder_path=index_folder_path,
                            extract_destination_path=extract_destination_path,
                            index_blob=index_blob,
                            build_number=build_number,
                            private_packs=private_packs,
                            current_commit_hash=current_commit_hash,
                            index_generation=index_generation,
                            force_upload=force_upload,
                            previous_commit_hash=previous_commit_hash)

    # upload id_set.json to bucket
    upload_id_set(storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # Store successful and failed packs list in CircleCI artifacts - to be used in Upload Packs To Marketplace job
    packs_results_file_path = os.path.join(
        os.path.dirname(packs_artifacts_path),
        BucketUploadFlow.PACKS_RESULTS_FILE)
    store_successful_and_failed_packs_in_ci_artifacts(
        packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING,
        successful_packs, failed_packs)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs,
                        not is_bucket_upload_flow)
Exemplo n.º 2
0
def main():
    install_logging('Copy_and_Upload_Packs.log')
    options = options_handler()
    packs_artifacts_path = options.artifacts_path
    extract_destination_path = options.extract_path
    production_bucket_name = options.production_bucket_name
    build_bucket_name = options.build_bucket_name
    service_account = options.service_account
    build_number = options.ci_build_number
    circle_branch = options.circle_branch
    production_base_path = options.production_base_path
    target_packs = options.pack_names

    # Google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    build_bucket = storage_client.bucket(build_bucket_name)

    # Initialize build and prod base paths
    build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX, circle_branch, build_number)
    GCPConfig.BUILD_BASE_PATH = os.path.join(build_bucket_path, GCPConfig.STORAGE_BASE_PATH)
    if production_base_path:
        GCPConfig.STORAGE_BASE_PATH = production_base_path

    # Relevant when triggering test upload flow
    if production_bucket_name:
        GCPConfig.PRODUCTION_BUCKET = production_bucket_name

    # Download and extract build index from build and prod buckets
    build_index_folder_path, build_index_blob, build_index_generation = \
        download_and_extract_index(build_bucket, extract_destination_path)

    # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are
    packs_results_file_path = os.path.join(os.path.dirname(packs_artifacts_path), BucketUploadFlow.PACKS_RESULTS_FILE)
    pc_successful_packs_dict, pc_failed_packs_dict, pc_successful_private_packs_dict, \
        pc_uploaded_images = get_upload_data(packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING)

    logging.debug(f"Successful packs from Prepare Content: {pc_successful_packs_dict}")
    logging.debug(f"Failed packs from Prepare Content: {pc_failed_packs_dict}")
    logging.debug(f"Successful private packs from Prepare Content: {pc_successful_private_packs_dict}")
    logging.debug(f"Images from Prepare Content: {pc_uploaded_images}")

    # Check if needs to upload or not
    check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict, pc_successful_private_packs_dict,
                            pc_uploaded_images)

    # Detect packs to upload
    pack_names = get_pack_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names
                  if os.path.exists(os.path.join(extract_destination_path, pack_name))]

    # Starting iteration over packs
    for pack in packs_list:
        # Indicates whether a pack has failed to upload on Prepare Content step
        task_status, pack_status = pack.is_failed_to_upload(pc_failed_packs_dict)
        if task_status:
            pack.status = pack_status
            pack.cleanup()
            continue

        task_status = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.name
            pack.cleanup()
            continue

        task_status = pack.copy_integration_images(production_bucket, build_bucket, pc_uploaded_images)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.copy_author_image(production_bucket, build_bucket, pc_uploaded_images)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage(production_bucket, build_bucket,
                                                                              pc_successful_packs_dict)
        if skipped_pack_uploading:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(production_bucket, build_number, extract_destination_path, build_bucket)

    # finished iteration over content packs
    copy_index(build_index_folder_path, build_index_blob, build_index_generation, production_bucket,
               build_bucket)

    # upload id_set.json to bucket
    copy_id_set(production_bucket, build_bucket)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(packs_list)

    # Store successful and failed packs list in CircleCI artifacts
    store_successful_and_failed_packs_in_ci_artifacts(
        packs_results_file_path, BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE, successful_packs, failed_packs,
        list(pc_successful_private_packs_dict)
    )

    # verify that the successful from Prepare content and are the ones that were copied
    verify_copy(successful_packs, pc_successful_packs_dict)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
Exemplo n.º 3
0
def main():
    install_logging('upload_packs_private.log')
    upload_config = option_handler()
    packs_artifacts_path = upload_config.artifacts_path
    extract_destination_path = upload_config.extract_path
    storage_bucket_name = upload_config.bucket_name
    private_bucket_name = upload_config.private_bucket_name
    service_account = upload_config.service_account
    target_packs = upload_config.pack_names
    build_number = upload_config.ci_build_number
    id_set_path = upload_config.id_set_path
    packs_dependencies_mapping = load_json(
        upload_config.pack_dependencies
    ) if upload_config.pack_dependencies else {}
    storage_base_path = upload_config.storage_base_path
    is_private_build = upload_config.is_private

    print(f"Packs artifact path is: {packs_artifacts_path}")

    prepare_test_directories(packs_artifacts_path)

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)
    private_storage_bucket = storage_client.bucket(private_bucket_name)
    default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    # content repo client initialized
    if not is_private_build:
        content_repo = get_content_git_client(CONTENT_ROOT_PATH)
        current_commit_hash, remote_previous_commit_hash = get_recent_commits_data(
            content_repo,
            index_folder_path,
            is_bucket_upload_flow=False,
            is_private_build=True,
            force_previous_commit="")
    else:
        current_commit_hash, remote_previous_commit_hash = "", ""
        content_repo = None

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # detect packs to upload
    pack_names = get_packs_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    if not is_private_build:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash,
                                  remote_previous_commit_hash, storage_bucket)

    if private_bucket_name:  # Add private packs to the index
        private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path, pack_names, is_private_build)
    else:  # skipping private packs
        logging.info("Skipping index update of priced packs")
        private_packs = []

    # google cloud bigquery client initialized
    packs_statistic_df = None
    if not is_private_build:
        bq_client = init_bigquery_client(service_account)
        packs_statistic_df = get_packs_statistics_dataframe(bq_client)

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs,
                             default_storage_bucket)
    # starting iteration over packs
    for pack in packs_list:
        create_and_upload_marketplace_pack(
            upload_config,
            pack,
            storage_bucket,
            index_folder_path,
            packs_dependencies_mapping,
            private_bucket_name,
            private_storage_bucket=private_storage_bucket,
            content_repo=content_repo,
            current_commit_hash=current_commit_hash,
            remote_previous_commit_hash=remote_previous_commit_hash,
            packs_statistic_df=packs_statistic_df)
    # upload core packs json to bucket

    if should_upload_core_packs(storage_bucket_name):
        upload_core_packs_config(default_storage_bucket, build_number,
                                 index_folder_path)
    # finished iteration over content packs
    if is_private_build:
        delete_public_packs_from_index(index_folder_path)
        upload_index_to_storage(index_folder_path, extract_destination_path,
                                private_index_blob, build_number,
                                private_packs, current_commit_hash,
                                index_generation, is_private_build)

    else:
        upload_index_to_storage(index_folder_path, extract_destination_path,
                                index_blob, build_number, private_packs,
                                current_commit_hash, index_generation)

    # upload id_set.json to bucket
    upload_id_set(default_storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
    """
    Validates that besides the 'description' and 'sections' keys - all keys in the file are sections names that appear
    in the 'sections' part of the file.
    Raises: Exception if the file has non allowed key.
    Args:
        landing_page_sections_json: The content of the landingPage_sections.json file
    """
    logging.info('Validating file keys are valid sections')
    allowed_keys = {'description', 'sections'}
    allowed_keys.update(landing_page_sections_json['sections'])
    not_allowed_key = [
        key for key in landing_page_sections_json.keys()
        if key not in allowed_keys
    ]
    assert not not_allowed_key, f'Unsupported keys found: {not_allowed_key}, please add ' \
                                f'these keys under the "sections" key or remove them.'


def parse_landing_page_sections_to_json():
    try:
        with open(LANDING_PAGE_SECTIONS_PAGE_PATH, 'r') as file:
            return json.load(file)
    except Exception:
        logging.critical('Could not parse the file as json file')
        sys.exit(1)


if __name__ in ("__main__", "__builtin__", "builtins"):
    install_logging('ValidateLandingPageSections.log')
    main()
Exemplo n.º 5
0
def main():
    install_logging('Zip_Content_Packs_From_GCS.log')
    option = option_handler()
    storage_bucket_name = option.bucket_name
    zip_path = option.zip_path
    artifacts_path = option.artifacts_path
    service_account = option.service_account
    circle_build = option.circle_build
    branch_name = option.branch_name
    gcp_path = option.gcp_path
    remove_test_playbooks = option.remove_test_playbooks
    private_build = option.private
    if private_build:
        packs_dir = '/home/runner/work/content-private/content-private/content/artifacts/packs'
        zip_path = '/home/runner/work/content-private/content-private/content/temp-dir'
        if not os.path.exists(packs_dir):
            logging.debug("Packs dir not found. Creating.")
            os.mkdir(packs_dir)
        if not os.path.exists(zip_path):
            logging.debug("Temp dir not found. Creating.")
            os.mkdir(zip_path)
        artifacts_path = '/home/runner/work/content-private/content-private/content/artifacts'

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    if not circle_build or not branch_name:
        # Ignore build properties
        circle_build = ''
        branch_name = ''

    if not gcp_path:
        gcp_path = BUILD_GCP_PATH

    zipped_packs = []
    success = True
    try:
        zipped_packs = download_packs_from_gcp(storage_bucket, gcp_path,
                                               zip_path, circle_build,
                                               branch_name)
    except Exception:
        logging.exception('Failed downloading packs')
        success = False

    if remove_test_playbooks:
        try:
            remove_test_playbooks_if_exist(zip_path, zipped_packs)
        except Exception:
            logging.exception('Failed removing test playbooks from packs')
            success = False

    if zipped_packs and success:
        try:
            zip_packs(zipped_packs, zip_path)
        except Exception:
            logging.exception('Failed zipping packs')
            success = False

        if success:
            logging.info('Successfully zipped packs.')
            if artifacts_path:
                # Save in the artifacts
                shutil.copy(os.path.join(zip_path, ARTIFACT_NAME),
                            os.path.join(artifacts_path, ARTIFACT_NAME))
        else:
            logging.critical('Failed zipping packs.')
            sys.exit(1)
    else:
        logging.warning('Did not find any packs to zip.')

    cleanup(zip_path)
Exemplo n.º 6
0
def main():
    install_logging('Wait_Until_Server_Ready.log')
    global SETUP_TIMEOUT
    instance_name_to_wait_on = sys.argv[1]

    ready_ami_list = []
    env_results_path = os.path.join(ARTIFACTS_FOLDER, 'env_results.json')
    with open(env_results_path, 'r') as json_file:
        env_results = json.load(json_file)
        instance_ips = [(env.get('Role'), env.get('InstanceDNS'),
                         env.get('TunnelPort')) for env in env_results]

    loop_start_time = time.time()
    last_update_time = loop_start_time
    instance_ips_to_poll = [
        ami_instance_ip
        for ami_instance_name, ami_instance_ip, _ in instance_ips
        if ami_instance_name == instance_name_to_wait_on
    ]

    logging.info('Starting wait loop')
    try:
        while instance_ips_to_poll:
            current_time = time.time()
            exit_if_timed_out(loop_start_time, current_time)

            for ami_instance_name, ami_instance_ip, tunnel_port in instance_ips:
                if ami_instance_ip in instance_ips_to_poll:
                    url = f"https://localhost:{tunnel_port}/health"
                    method = 'GET'
                    try:
                        res = requests.request(method=method,
                                               url=url,
                                               verify=False)
                    except (requests.exceptions.RequestException,
                            requests.exceptions.HTTPError) as exp:
                        logging.error(
                            f'{ami_instance_name} encountered an error: {str(exp)}\n'
                        )
                        if SETUP_TIMEOUT != 60 * 10:
                            logging.warning(
                                'Setting SETUP_TIMEOUT to 10 minutes.')
                            SETUP_TIMEOUT = 60 * 10
                        continue
                    except Exception:
                        logging.exception(
                            f'{ami_instance_name} encountered an error, Will retry this step later'
                        )
                        continue
                    if res.status_code == 200:
                        if SETUP_TIMEOUT != 60 * 60:
                            logging.info('Resetting SETUP_TIMEOUT to an hour.')
                            SETUP_TIMEOUT = 60 * 60
                        logging.info(f'{ami_instance_name} is ready to use')
                        instance_ips_to_poll.remove(ami_instance_ip)
                    # printing the message every 30 seconds
                    elif current_time - last_update_time > PRINT_INTERVAL_IN_SECONDS:
                        logging.info(
                            f'{ami_instance_name} at ip {ami_instance_ip} is not ready yet - waiting for it to start'
                        )

            if current_time - last_update_time > PRINT_INTERVAL_IN_SECONDS:
                # The interval has passed, which means we printed a status update.
                last_update_time = current_time
            if len(instance_ips) > len(ready_ami_list):
                sleep(1)
    finally:
        instance_ips_to_download_log_files = [
            ami_instance_ip
            for ami_instance_name, ami_instance_ip, _ in instance_ips
            if ami_instance_name == instance_name_to_wait_on
        ]
        for ip in instance_ips_to_download_log_files:
            download_cloud_init_logs_from_server(ip)
            docker_login(ip)
Exemplo n.º 7
0
def main():
    install_logging('Destroy_instances.log')
    circle_aritfact = sys.argv[1]
    env_file = sys.argv[2]
    instance_role = sys.argv[3]
    time_to_live = sys.argv[4]
    with open(env_file, 'r') as json_file:
        env_results = json.load(json_file)

    filtered_results = [
        env_result for env_result in env_results
        if env_result["Role"] == instance_role
    ]
    for env in filtered_results:
        logging.info(
            f'Downloading server log from {env.get("Role", "Unknown role")}')
        ssh_string = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null {}@{} ' \
                     '"sudo chmod -R 755 /var/log/demisto"'
        scp_string = 'scp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ' \
                     '{}@{}:/var/log/demisto/server.log {} || echo "WARN: Failed downloading server.log"'

        try:
            logging.debug(
                f'Changing permissions of folder /var/log/demisto on server {env["InstanceDNS"]}'
            )
            subprocess.check_output(ssh_string.format(env["SSHuser"],
                                                      env["InstanceDNS"]),
                                    shell=True)

        except subprocess.CalledProcessError:
            logging.exception(
                f'Failed changing permissions of folder /var/log/demisto on server {env["InstanceDNS"]}'
            )

        try:
            logging.debug(
                f'Downloading server logs from server {env["InstanceDNS"]}')
            server_ip = env["InstanceDNS"].split('.')[0]
            subprocess.check_output(scp_string.format(
                env["SSHuser"], env["InstanceDNS"],
                "{}/server_{}_{}.log".format(circle_aritfact,
                                             env["Role"].replace(' ', ''),
                                             server_ip)),
                                    shell=True)

        except subprocess.CalledProcessError:
            logging.exception(
                f'Failed downloading server logs from server {env["InstanceDNS"]}'
            )

        if time_to_live:
            logging.info(
                f'Skipping - Time to live was set to {time_to_live} minutes')
            continue
        if os.path.isfile("./Tests/is_build_passed_{}.txt".format(
                env["Role"].replace(' ', ''))):
            logging.info(
                f'Destroying instance with role - {env.get("Role", "Unknown role")} and IP - '
                f'{env["InstanceDNS"]}')
            rminstance = aws_functions.destroy_instance(
                env["Region"], env["InstanceID"])
            if aws_functions.isError(rminstance):
                logging.error(rminstance['Message'])
        else:
            logging.warning(
                f'Tests failed on {env.get("Role", "Unknown role")}, keeping instance alive'
            )
Exemplo n.º 8
0
def main():
    install_logging('Copy and Upload Packs.log')
    options = options_handler()
    packs_artifacts_path = options.artifacts_path
    extract_destination_path = options.extract_path
    production_bucket_name = options.production_bucket_name
    build_bucket_name = options.build_bucket_name
    service_account = options.service_account
    build_number = options.ci_build_number
    circle_branch = options.circle_branch
    override_all_packs = options.override_all_packs
    production_base_path = options.production_base_path
    target_packs = options.pack_names

    # Google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    build_bucket = storage_client.bucket(build_bucket_name)

    # Initialize base paths
    build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX,
                                     circle_branch, build_number)
    GCPConfig.BUILD_BASE_PATH = os.path.join(build_bucket_path,
                                             GCPConfig.STORAGE_BASE_PATH)
    if production_base_path:
        GCPConfig.STORAGE_BASE_PATH = production_base_path

    # Download and extract build index from build and prod buckets
    build_index_folder_path, build_index_blob, build_index_generation = \
        download_and_extract_index(build_bucket, extract_destination_path)

    # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are
    successful_packs_dict, failed_packs_dict = get_successful_and_failed_packs(
        os.path.join(os.path.dirname(packs_artifacts_path),
                     PACKS_RESULTS_FILE))

    # Detect packs to upload
    pack_names = get_pack_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    # Starting iteration over packs
    for pack in packs_list:
        # Indicates whether a pack has failed to upload on Prepare Content step
        task_status, pack_status = pack.is_failed_to_upload(failed_packs_dict)
        if task_status:
            pack.status = pack_status
            pack.cleanup()
            continue

        task_status, user_metadata = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value
            pack.cleanup()
            continue

        task_status = pack.copy_integration_images(production_bucket,
                                                   build_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.copy_author_image(production_bucket, build_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        # Create a local copy of the pack's index changelog
        task_status = pack.create_local_changelog(build_index_folder_path)
        if not task_status:
            pack.status = PackStatus.FAILED_RELEASE_NOTES.name
            pack.cleanup()
            continue

        task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage(
            production_bucket, build_bucket, override_all_packs,
            pack.latest_version, successful_packs_dict)
        if skipped_pack_uploading:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(production_bucket, build_number,
                             extract_destination_path, build_bucket)

    # finished iteration over content packs
    copy_index(build_index_folder_path, build_index_blob,
               build_index_generation, production_bucket, build_bucket)

    # upload id_set.json to bucket
    copy_id_set(production_bucket, build_bucket)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
Exemplo n.º 9
0
def main():
    install_logging('Run_Tests.log', logger=logging)
    tests_settings = options_handler()
    logging.info(f"Build Name: {tests_settings.buildName}")
    logging.info(f" Build Number: {tests_settings.buildNumber}")
    manage_tests(tests_settings)
Exemplo n.º 10
0
        json={
            'channel':
            'dmst-content-lab',
            'username':
            '******',
            'as_user':
            '******',
            'text':
            "Detailed list of failing instances could be found in the following link:\n"
            "https://{}-60525392-gh.circle-artifacts.com/0/artifacts/failed_instances.txt"
            .format(build_number)
        })


if __name__ == "__main__":
    install_logging('Instance-Test.log')
    options = options_handler()
    if options.instance_tests:
        env_results_path = os.path.join(
            os.getenv('ARTIFACTS_FOLDER', './artifacts'), 'env_results.json')
        with open(env_results_path, 'r') as json_file:
            env_results = json.load(json_file)
            server = f'https://localhost:{env_results[0]["TunnelPort"]}'

        slack_notifier(options.slack, options.secret, server, options.user,
                       options.password, options.buildUrl, options.buildNumber)
        # create this file for destroy_instances script
        with open(
                "./Tests/is_build_passed_{}.txt".format(
                    env_results[0]["Role"].replace(' ', '')), 'a'):
            pass
    """
    Validates that besides the 'description' and 'sections' keys - all keys in the file are sections names that appear
    in the 'sections' part of the file.
    Raises: Exception if the file has non allowed key.
    Args:
        landing_page_sections_json: The content of the landingPage_sections.json file
    """
    logging.info('Validating file keys are valid sections')
    allowed_keys = {'description', 'sections'}
    allowed_keys.update(landing_page_sections_json['sections'])
    not_allowed_key = [
        key for key in landing_page_sections_json.keys()
        if key not in allowed_keys
    ]
    assert not not_allowed_key, f'Unsupported keys found: {not_allowed_key}, please add ' \
                                f'these keys under the "sections" key or remove them.'


def parse_landing_page_sections_to_json():
    try:
        with open(LANDING_PAGE_SECTIONS_PAGE_PATH, 'r') as file:
            return json.load(file)
    except Exception:
        logging.critical('Could not parse the file as json file')
        sys.exit(1)


if __name__ in ("__main__", "__builtin__", "builtins"):
    install_logging('ValidateLandingPageSections.log', logger=logging)
    main()
Exemplo n.º 12
0
def main():
    install_logging("TriggerPrivateBuild.log", logger=logging)
    # get github token parameter
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()

    github_token = args.github_token

    # get branch name
    branches = tools.run_command("git branch")
    branch_name_regex = re.search(r"\* (.*)", branches)
    if branch_name_regex:
        branch_name = branch_name_regex.group(1)

    if branch_has_private_build_infra_change(branch_name):
        # get the workflows ids before triggering the build
        pre_existing_workflow_ids = get_dispatch_workflows_ids(
            github_token, 'master')

        # trigger private build
        payload = {
            'event_type': f'Trigger private build from content/{branch_name}',
            'client_payload': {
                'commit_sha1': branch_name,
                'is_infra_build': 'True'
            }
        }

        res = requests.post(TRIGGER_BUILD_URL,
                            headers={
                                'Accept':
                                'application/vnd.github.everest-preview+json',
                                'Authorization': f'Bearer {github_token}'
                            },
                            data=json.dumps(payload),
                            verify=False)

        if res.status_code != 204:
            logging.critical(
                f'Failed to trigger private repo build, request to '
                f'{TRIGGER_BUILD_URL} failed with error: {str(res.content)}')
            sys.exit(1)

        workflow_ids_diff = []
        for i in range(GET_WORKFLOWS_MAX_RETRIES):
            # wait 5 seconds and get the workflow ids again
            time.sleep(5)
            workflow_ids_after_dispatch = get_dispatch_workflows_ids(
                github_token, 'master')

            # compare with the first workflows list to get the current id
            workflow_ids_diff = [
                x for x in workflow_ids_after_dispatch
                if x not in pre_existing_workflow_ids
            ]
            if workflow_ids_diff:
                break

        if len(workflow_ids_diff) == 1:
            workflow_id = workflow_ids_diff[0]
            logging.success(
                f'Private repo build triggered successfully, workflow id: {workflow_id}\n URL:'
                f' {WORKFLOW_HTML_URL}/{workflow_id}')

            # write the workflow id to text file to use it in get_private_build_status.py
            with open(PRIVATE_REPO_WORKFLOW_ID_FILE, "w") as f:
                f.write(str(workflow_id))
            sys.exit(0)

        else:
            logging.critical('Could not found the private repo workflow')
            sys.exit(1)

    else:
        logging.info('Build private repo skipped')
Exemplo n.º 13
0
def main():
    install_logging('Run_Tests.log')
    tests_settings = options_handler()
    manage_tests(tests_settings)