def parse_landing_page_sections_to_json():
    try:
        with open(LANDING_PAGE_SECTIONS_PAGE_PATH, 'r') as file:
            return json.load(file)
    except Exception:
        logging.critical('Could not parse the file as json file')
        sys.exit(1)
예제 #2
0
def main():
    install_logging("Validate index.log", logger=logging)
    options = options_handler()
    exit_code = 0
    index_data, index_file_path = get_index_json_data(
        service_account=options.service_account,
        production_bucket_name=options.production_bucket_name,
        extract_path=options.extract_path,
        storage_base_path=options.storage_base_path)

    # Validate index.json file
    index_is_valid = check_index_data(index_data)
    log_message_if_statement(
        statement=index_is_valid,
        error_message=
        f"The packs in the {index_file_path} file were found invalid.",
        success_message=f"{index_file_path} file was found valid")

    # Validate commit hash in master history
    commit_hash_is_valid = log_message_if_statement(
        statement=("commit" in index_data),
        error_message="No commit field was found in the index.json")
    if commit_hash_is_valid:
        commit_hash_is_valid = check_commit_in_branch_history(
            index_data.get("commit", ""), options.circle_branch)

    if not all([index_is_valid, commit_hash_is_valid]):
        logging.critical("Index content is invalid. Aborting.")
        exit_code = 1

    # Deleting GCS PATH before exit
    if exit_code == 1 and os.path.exists(options.service_account):
        os.remove(options.service_account)
    sys.exit(exit_code)
예제 #3
0
def upload_core_packs_config(production_bucket: Bucket, build_number: str, extract_destination_path: str,
                             build_bucket: Bucket, storage_base_path: str, build_bucket_base_path: str):
    """Uploads the corepacks.json file to the target bucket. This files contains all of the server's core packs, under
     the key corepacks, and specifies which core packs should be upgraded upon XSOAR upgrade, under the key upgradeCorePacks.

     Args:
        production_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is uploaded.
        build_number (str): CircleCI build number.
        extract_destination_path (str): Full path of folder to extract the corepacks file
        build_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is downloaded from.
        storage_base_path (str): the path to upload the corepacks.json to.
        build_bucket_base_path (str): the path in the build bucket of the corepacks.json.

    """
    # download the corepacks.json stored in the build bucket to temp dir
    build_corepacks_file_path = os.path.join(build_bucket_base_path, GCPConfig.CORE_PACK_FILE_NAME)
    build_corepacks_blob = build_bucket.blob(build_corepacks_file_path)

    if not build_corepacks_blob.exists():
        logging.critical(f"{GCPConfig.CORE_PACK_FILE_NAME} is missing in {build_bucket.name} bucket, exiting...")
        sys.exit(1)

    temp_corepacks_file_path = os.path.join(extract_destination_path, GCPConfig.CORE_PACK_FILE_NAME)
    build_corepacks_blob.download_to_filename(temp_corepacks_file_path)
    corepacks_file = load_json(temp_corepacks_file_path)

    # change the storage paths to the prod bucket
    corepacks_list = corepacks_file.get('corePacks', [])
    try:
        corepacks_list = [os.path.join(GCPConfig.GCS_PUBLIC_URL, production_bucket.name, storage_base_path,
                                       LATEST_ZIP_REGEX.findall(corepack_path)[0]) for corepack_path in corepacks_list]
    except IndexError:
        corepacks_list_str = '\n'.join(corepacks_list)
        logging.exception(f"GCS paths in build bucket corepacks.json file are not of format: "
                          f"{GCPConfig.GCS_PUBLIC_URL}/<BUCKET_NAME>/.../content/packs/...\n"
                          f"List of build bucket corepacks paths:\n{corepacks_list_str}")
        sys.exit(1)

    # construct core pack data with public gcs urls
    core_packs_data = {
        'corePacks': corepacks_list,
        'upgradeCorePacks': corepacks_file.get('upgradeCorePacks', []),
        'buildNumber': build_number
    }

    # upload core pack json file to gcs
    prod_corepacks_file_path = os.path.join(storage_base_path, GCPConfig.CORE_PACK_FILE_NAME)
    prod_corepacks_blob = production_bucket.blob(prod_corepacks_file_path)
    prod_corepacks_blob.upload_from_string(json.dumps(core_packs_data, indent=4))

    logging.success(f"Finished uploading {GCPConfig.CORE_PACK_FILE_NAME} to storage.")
def main():
    install_logging("GetPrivateBuildStatus.log", logger=logging)

    if not os.path.isfile(PRIVATE_REPO_WORKFLOW_ID_FILE):
        logging.info('Build private repo skipped')
        sys.exit(0)

    # gets workflow id from the file
    with open(PRIVATE_REPO_WORKFLOW_ID_FILE, 'r') as f:
        workflow_id = f.read()

    # get github_token parameter
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()
    github_token = args.github_token

    # gets the workflow status
    status, conclusion, step = get_workflow_status(github_token, workflow_id)

    # initialize timer
    start = time.time()
    elapsed: float = 0

    # polling the workflow status while is in progress
    while status in ['queued', 'in_progress'
                     ] and elapsed < GET_WORKFLOWS_TIMEOUT_THRESHOLD:
        logging.info(
            f'Workflow {workflow_id} status is {status}, current step: {step}')
        time.sleep(60)
        status, conclusion, step = get_workflow_status(github_token,
                                                       workflow_id)
        elapsed = time.time() - start

    if elapsed >= GET_WORKFLOWS_TIMEOUT_THRESHOLD:
        logging.critical(
            f'Timeout reached while waiting for private content build to complete, build url:'
            f' {WORKFLOW_HTML_URL}/{workflow_id}')
        sys.exit(1)

    logging.info(f'Workflow {workflow_id} conclusion is {conclusion}')
    if conclusion != 'success':
        logging.critical(
            f'Private repo build failed,  build url: {WORKFLOW_HTML_URL}/{workflow_id}'
        )
        sys.exit(1)

    logging.success('Build private repo finished successfully')
    sys.exit(0)
예제 #5
0
def search_pack_and_its_dependencies(client: demisto_client,
                                     pack_id: str,
                                     packs_to_install: list,
                                     installation_request_body: list,
                                     lock: Lock):
    """ Searches for the pack of the specified file path, as well as its dependencies,
        and updates the list of packs to be installed accordingly.

    Args:
        client (demisto_client): The configured client to use.
        pack_id (str): The id of the pack to be installed.
        packs_to_install (list) A list of the packs to be installed in this iteration.
        installation_request_body (list): A list of packs to be installed, in the request format.
        lock (Lock): A lock object.
    """
    pack_data = {}
    if pack_id not in packs_to_install:
        pack_display_name = get_pack_display_name(pack_id)
        if pack_display_name:
            pack_data = search_pack(client, pack_display_name, pack_id, lock)
        if pack_data is None:
            pack_data = {
                'id': pack_id,
                'version': '1.0.0'
            }

    if pack_data:
        dependencies = get_pack_dependencies(client, pack_data, lock)

        current_packs_to_install = [pack_data]
        if dependencies:
            # Check that the dependencies don't include a deprecated pack:
            for dependency in dependencies:
                pack_path = os.path.join(PACKS_FOLDER, dependency.get('id'))
                if is_pack_deprecated(pack_path):
                    logging.critical(f'Pack {pack_id} depends on pack {dependency.get("id")} which is a deprecated '
                                     f'pack.')
                    global SUCCESS_FLAG
                    SUCCESS_FLAG = False
                else:
                    current_packs_to_install.extend(dependencies)

        lock.acquire()
        for pack in current_packs_to_install:
            if pack['id'] not in packs_to_install:
                packs_to_install.append(pack['id'])
                installation_request_body.append(pack)
        lock.release()
예제 #6
0
def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path):
    total_seconds_waited = 0
    while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path):
        if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK:
            logging.critical(
                "Error: Failed too long to acquire lock, exceeded max wait time."
            )
            sys.exit(1)

        if total_seconds_waited % 60 == 0:
            # Printing a message every minute to keep the machine from dying due to no output
            logging.info("Waiting to acquire lock.")

        total_seconds_waited += 10
        time.sleep(10)

    lock_dummy_index(public_storage_bucket, dummy_index_lock_path)
def get_workflow_status(github_token: str,
                        workflow_id: str) -> Tuple[str, str, str]:
    """ Returns a set with the workflow job status, job conclusion and current step that running now in the job
        for the given workflow id.

    Args:
        github_token: Github bearer token.
        workflow_id: Github workflow id.

    Returns: (Workflow job status, Workflow job conclusion - only if the job completed otherwise its None,
              Current step that running now - only if the job is running otherwise its None )

    """

    # get the workflow run status
    workflow_url = GET_WORKFLOW_URL.format(workflow_id)
    res = requests.get(workflow_url,
                       headers={'Authorization': f'Bearer {github_token}'},
                       verify=False)
    if res.status_code != 200:
        logging.critical(
            f'Failed to gets private repo workflow, request to {workflow_url} failed with error: {str(res.content)}'
        )
        sys.exit(1)

    # parse response
    try:
        workflow = json.loads(res.content)
    except ValueError:
        logging.exception('Enable to parse private repo workflows response')
        sys.exit(1)

    # get the workflow job from the response to know what step is in progress now
    jobs = workflow.get('jobs', [])

    if not jobs:
        logging.critical(
            f'Failed to gets private repo workflow jobs, build url: {WORKFLOW_HTML_URL}/{workflow_id}'
        )
        sys.exit(1)

    curr_job = jobs[0]
    job_status = curr_job.get('status')
    job_conclusion = curr_job.get('conclusion')

    if job_status == 'completed':
        return 'completed', job_conclusion, ''

    # check for failure steps
    failure_steps = [
        step for step in jobs[0].get('steps')
        if step.get('conclusion') == 'failure'
    ]
    if failure_steps:
        return 'completed', 'failure', failure_steps[0].get('name')

    # if the job is still in progress - get the current step
    curr_step = next(step for step in jobs[0].get('steps')
                     if step.get('status') == 'in_progress')

    return job_status, job_conclusion, curr_step.get('name')
예제 #8
0
def main():
    install_logging("TriggerPrivateBuild.log", logger=logging)
    # get github token parameter
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()

    github_token = args.github_token

    # get branch name
    branches = tools.run_command("git branch")
    branch_name_regex = re.search(r"\* (.*)", branches)
    if branch_name_regex:
        branch_name = branch_name_regex.group(1)

    if branch_has_private_build_infra_change(branch_name):
        # get the workflows ids before triggering the build
        pre_existing_workflow_ids = get_dispatch_workflows_ids(
            github_token, 'master')

        # trigger private build
        payload = {
            'event_type': f'Trigger private build from content/{branch_name}',
            'client_payload': {
                'commit_sha1': branch_name,
                'is_infra_build': 'True'
            }
        }

        res = requests.post(TRIGGER_BUILD_URL,
                            headers={
                                'Accept':
                                'application/vnd.github.everest-preview+json',
                                'Authorization': f'Bearer {github_token}'
                            },
                            data=json.dumps(payload),
                            verify=False)

        if res.status_code != 204:
            logging.critical(
                f'Failed to trigger private repo build, request to '
                f'{TRIGGER_BUILD_URL} failed with error: {str(res.content)}')
            sys.exit(1)

        workflow_ids_diff = []
        for i in range(GET_WORKFLOWS_MAX_RETRIES):
            # wait 5 seconds and get the workflow ids again
            time.sleep(5)
            workflow_ids_after_dispatch = get_dispatch_workflows_ids(
                github_token, 'master')

            # compare with the first workflows list to get the current id
            workflow_ids_diff = [
                x for x in workflow_ids_after_dispatch
                if x not in pre_existing_workflow_ids
            ]
            if workflow_ids_diff:
                break

        if len(workflow_ids_diff) == 1:
            workflow_id = workflow_ids_diff[0]
            logging.success(
                f'Private repo build triggered successfully, workflow id: {workflow_id}\n URL:'
                f' {WORKFLOW_HTML_URL}/{workflow_id}')

            # write the workflow id to text file to use it in get_private_build_status.py
            with open(PRIVATE_REPO_WORKFLOW_ID_FILE, "w") as f:
                f.write(str(workflow_id))
            sys.exit(0)

        else:
            logging.critical('Could not found the private repo workflow')
            sys.exit(1)

    else:
        logging.info('Build private repo skipped')