Example #1
0
def generate_deploy_frontend(stage, edp, artifact_location, purge_cache):
    """
    Adds jobs to a stage that is used to deploy a frontend to an S3 bucket.
    """
    deploy_job = stage.ensure_job(
        constants.DEPLOY_FRONTEND_JOB_NAME_TPL(
            env=edp.environment,
            app=edp.play,
        )
    )
    tasks.generate_package_install(deploy_job, 'tubular')
    tasks.retrieve_artifact(artifact_location, deploy_job)
    cmd_args = [
        '--env-config-file',
        '{}/frontends/{}/{}_config.yml'.format(
            constants.INTERNAL_CONFIGURATION_LOCAL_DIR,
            edp.play,
            edp.environment,
        ),
        '--app-name',
        edp.play,
        '--app-dist',
        '{}/{}'.format(
            constants.ARTIFACT_PATH,
            constants.FRONTEND_DIST_DIR_NAME
        ),
    ]
    if purge_cache:
        cmd_args.append('--purge-cache')

    deploy_job.add_task(tasks.tubular_task(
        'frontend_deploy.py',
        cmd_args,
        working_dir=None,
    ))
Example #2
0
def make_release_candidate(edxapp_group, config):
    """
    Variables needed for this pipeline:
    - git_token
    """
    pipeline = edxapp_group.ensure_replacement_of_pipeline(
        'edxapp_cut_release_candidate')

    edx_platform_master = EDX_PLATFORM(material_name='edx-platform',
                                       branch=None,
                                       ignore_patterns=frozenset())
    pipeline.ensure_material(edx_platform_master)
    pipeline.ensure_material(TUBULAR())
    stage = pipeline.ensure_stage(constants.MAKE_RELEASE_CANDIDATE_STAGE_NAME)
    job = stage.ensure_job(constants.MAKE_RELEASE_CANDIDATE_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    tasks.generate_merge_branch(
        pipeline,
        job,
        config['git_token'],
        'edx',
        'edx-platform',
        "origin/{}".format(edx_platform_master.branch),
        EDX_PLATFORM().branch,
        fast_forward_only=True,
        reference_repo='edx-platform',
    )

    # These two options together make sure that the pipeline only triggers
    # from the timer trigger.
    pipeline.set_timer('0 0/5 15-18 ? * MON-FRI', only_on_changes=True)
    stage.set_has_manual_approval()

    return pipeline
Example #3
0
def generate_rollback_asgs(stage, edp, deployment_artifact_location, config):
    """
    Generates a job for rolling back ASGs (code).

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (EDP): The EDP that this job should roll back.
        deployment_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to roll back.
        config (dict): Environment-independent secure config.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.ROLLBACK_ASGS_JOB_NAME_TPL(edp))

    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve build info from the upstream deploy stage.
    tasks.retrieve_artifact(deployment_artifact_location, job)
    deployment_artifact_path = path_to_artifact(deployment_artifact_location.file_name)

    tasks.generate_rollback_asg(
        job,
        deployment_artifact_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
    )

    return job
Example #4
0
def generate_fetch_tag_name(stage, acquia_env):
    """
    Creates a job that fetches the tag name from a Drupal environment.

    Arguments:
        stage (gomatic.Stage): The stage to which the job is being added.
        acquia_env (str): The name of the environment from which the tag name should
            be fetched.

    Returns:
        gomatic.gocd.pipelines.Job: The new job.

    """
    fetch_tag_job = stage.ensure_job(constants.FETCH_TAG_JOB_NAME)
    fetch_tag_job.ensure_resource("edxMarketing")
    tasks.generate_package_install(fetch_tag_job, 'tubular')
    tasks.generate_target_directory(fetch_tag_job)
    path_name = '../target/{env}_tag_name.txt'
    marketing_tasks.generate_fetch_tag(fetch_tag_job, acquia_env, path_name)

    artifact_path = 'target/{tag_name}.txt'.format(
        tag_name=constants.ACQUIA_ENV_TAG_NAME.format(acquia_env=acquia_env)
    )
    fetch_tag_job.ensure_artifacts(set([BuildArtifact(artifact_path)]))

    return fetch_tag_job
Example #5
0
def generate_build_frontend(stage, edp):
    """
    Adds jobs to a stage that is used to build a frontend app.
    """
    app_artifact_path = constants.FRONTEND_DIST_DIR_PATH_TPL(name=edp.play)
    build_job = stage.ensure_job(
        constants.BUILD_FRONTEND_JOB_NAME_TPL(
            env=edp.environment,
            app=edp.play,
        )
    )
    build_job.ensure_resource(constants.FRONTEND_RESOURCE)
    tasks.generate_package_install(build_job, 'tubular')
    build_job.add_task(tasks.tubular_task(
        'frontend_build.py',
        [
            '--common-config-file',
            '{}/frontends/common/{}_config.yml'.format(
                constants.INTERNAL_CONFIGURATION_LOCAL_DIR,
                edp.environment,
            ),
            '--env-config-file',
            '{}/frontends/{}/{}_config.yml'.format(
                constants.INTERNAL_CONFIGURATION_LOCAL_DIR,
                edp.play,
                edp.environment,
            ),
            '--app-name',
            edp.play,
            '--version-file',
            '{}/dist/version.json'.format(edp.play)
        ],
        working_dir=None,
    ))
    build_job.ensure_artifacts(set([BuildArtifact(app_artifact_path)]))
Example #6
0
def make_release_candidate(edxapp_group, config):
    """
    Variables needed for this pipeline:
    - git_token
    """
    pipeline = edxapp_group.ensure_replacement_of_pipeline('edxapp_cut_release_candidate')

    edx_platform_master = EDX_PLATFORM(material_name='edx-platform', branch=None, ignore_patterns=frozenset())
    pipeline.ensure_material(edx_platform_master)
    pipeline.ensure_material(TUBULAR())
    stage = pipeline.ensure_stage(constants.MAKE_RELEASE_CANDIDATE_STAGE_NAME)
    job = stage.ensure_job(constants.MAKE_RELEASE_CANDIDATE_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    tasks.generate_merge_branch(
        pipeline,
        job,
        config['git_token'],
        'edx',
        'edx-platform',
        "origin/{}".format(edx_platform_master.branch),
        EDX_PLATFORM().branch,
        fast_forward_only=True,
        reference_repo='edx-platform',
    )

    # These two options together make sure that the pipeline only triggers
    # from the timer trigger.
    # Times are UTC and run every 5 minutes.  During EDT 11:00am-5:55pm, during EST 10:00am-4:55pm
    pipeline.set_timer('0 0/5 15-21 ? * MON-FRI', only_on_changes=True)
    stage.set_has_manual_approval()

    return pipeline
Example #7
0
def generate_rollback_asgs(stage, edp, deployment_artifact_location, config):
    """
    Generates a job for rolling back ASGs (code).

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (EDP): The EDP that this job should roll back.
        deployment_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to roll back.
        config (dict): Environment-independent secure config.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.ROLLBACK_ASGS_JOB_NAME_TPL(edp))

    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve build info from the upstream deploy stage.
    tasks.retrieve_artifact(deployment_artifact_location, job)
    deployment_artifact_path = path_to_artifact(deployment_artifact_location.file_name)

    tasks.generate_rollback_asg(
        job,
        deployment_artifact_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
    )

    return job
Example #8
0
def generate_run_jenkins_job(stage, config):
    """
    Generates a Job that runs a Jenkins job.

    Args:
        stage (gomatic.Stage): Stage to which the Job will be added.
        config (dict): Environment-specific secure config.

    Returns:
        gomatic.Job
    """
    job = stage.ensure_job('run_jenkins_job')
    # FIXME: Remove once https://github.com/gocd-contrib/gomatic/pull/27 is released.
    # pylint: disable=protected-access
    job.ensure_unencrypted_secure_environment_variables(
        {
            'JENKINS_USER_TOKEN': config['jenkins_user_token'],
            'JENKINS_JOB_TOKEN': config['jenkins_job_token'],
        }
    )

    tasks.generate_package_install(job, 'tubular')
    tasks.trigger_jenkins_build(
        job,
        config['jenkins_url'],
        config['jenkins_username'],
        config['jenkins_job_name'],
    )

    return job
Example #9
0
def generate_find_and_advance_release(
        pipeline,
        advance_pipeline_name,
        advance_pipeline_stage_name,
        gocd_user,
        gocd_password,
        gocd_url,
        slack_token,
        slack_room=constants.SLACK_ROOM
):
    """
    Generates a stage used to find the next release to advance and "manually" advance it.

    Args:
        pipeline (gomatic.Pipeline):
        advance_pipeline_name (str): Name of pipeline to advance.
        advance_pipeline_stage_name (str): Name of stage within pipeline to advance.
        gocd_user (str): GoCD username
        gocd_password (str): GoCD user's password
        gocd_url (str): URL of the GoCD instance
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.RELEASE_ADVANCER_STAGE_NAME)
    job = stage.ensure_job(constants.RELEASE_ADVANCER_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    # Add task to generate the directory where the artifact file will be written.
    tasks.generate_target_directory(job)

    pipeline.ensure_unencrypted_secure_environment_variables(
        {
            'GOCD_PASSWORD': gocd_password
        }
    )

    job.ensure_encrypted_environment_variables(
        {
            'SLACK_TOKEN': slack_token,
        }
    )

    tasks.generate_find_and_advance_release(
        job,
        gocd_user,
        gocd_url,
        advance_pipeline_name,
        advance_pipeline_stage_name,
        slack_room,
        out_file=constants.FIND_ADVANCE_PIPELINE_OUT_FILENAME
    )

    return stage
Example #10
0
def generate_e2e_test_stage(pipeline, config):
    """
    Add stages to run end-to-end tests against edxapp to the specified ``pipeline``.

    Required Config Parameters:
        jenkins_user_token
        jenkins_job_token
        jenkins_user_name
    """
    # For now, you can only trigger builds on a single jenkins server, because you can only
    # define a single username/token.
    # And all the jobs that you want to trigger need the same job token defined.
    # TODO: refactor when required so that each job can define their own user and job tokens
    pipeline.ensure_unencrypted_secure_environment_variables({
        'JENKINS_USER_TOKEN':
        config['jenkins_user_token'],
        'JENKINS_JOB_TOKEN':
        config['jenkins_job_token']
    })

    # Create the stage with the Jenkins jobs
    jenkins_stage = pipeline.ensure_stage(
        constants.JENKINS_VERIFICATION_STAGE_NAME)
    jenkins_user_name = config['jenkins_user_name']

    jenkins_url = "https://build.testeng.edx.org"
    jenkins_job_timeout = 60 * 60

    e2e_tests = jenkins_stage.ensure_job('edx-e2e-test')
    e2e_tests.timeout = str(jenkins_job_timeout + 60)
    tasks.generate_package_install(e2e_tests, 'tubular')
    tasks.trigger_jenkins_build(
        e2e_tests,
        jenkins_url,
        jenkins_user_name,
        'edx-e2e-tests',
        {},
        timeout=jenkins_job_timeout,
    )

    microsites_tests = jenkins_stage.ensure_job('microsites-staging-tests')
    microsites_tests.timeout = str(jenkins_job_timeout + 60)
    tasks.generate_package_install(microsites_tests, 'tubular')
    tasks.trigger_jenkins_build(
        microsites_tests,
        jenkins_url,
        jenkins_user_name,
        'microsites-staging-tests',
        {
            'CI_BRANCH': 'kashif/white_label',
        },
        timeout=jenkins_job_timeout,
    )
Example #11
0
def generate_cleanup_dangling_instances(pipeline,
                                        aws_access_key_id,
                                        aws_secret_access_key,
                                        name_match_pattern,
                                        max_run_hours,
                                        skip_if_tag,
                                        ec2_region=constants.EC2_REGION,
                                        runif='any',
                                        manual_approval=False):
    """
    Generate the stage that terminates all ec2 instances runnning for longer than a specified time.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        aws_access_key_id (str): The AWS access key ID
        aws_secret_access_key (str): The AWS secret access key
        name_match_pattern (str): pattern to match the name of the instances that should be terminated
        max_run_hours (int): number of hourse that should pass before terminating matching instances
        skip_if_tag (str): if this tag exists on an instance, it will not be terminated
        ec2_region (str): the EC2 region to connect
        runif (str): one of ['passed', 'failed', 'any'] Default: any - controls when the
            stage's terminate task is triggered in the pipeline
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage

    """
    pipeline.ensure_encrypted_environment_variables(
        {
            'AWS_ACCESS_KEY_ID': aws_access_key_id,
            'AWS_SECRET_ACCESS_KEY': aws_secret_access_key,
        }
    )

    stage = pipeline.ensure_stage(constants.INSTANCE_JANITOR_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Fetch the instance info to use in reaching the EC2 instance.
    job = stage.ensure_job(constants.INSTANCE_JANITOR_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    tasks.generate_janitor_instance_cleanup(job,
                                            name_match_pattern,
                                            max_run_hours,
                                            skip_if_tag,
                                            ec2_region,
                                            runif=runif)

    return stage
Example #12
0
def generate_terminate_instance(pipeline,
                                instance_info_location,
                                aws_access_key_id,
                                aws_secret_access_key,
                                slack_token,
                                ec2_region=constants.EC2_REGION,
                                artifact_path=constants.ARTIFACT_PATH,
                                runif='any',
                                manual_approval=False):
    """
    Generate the stage that terminates an EC2 instance.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        instance_info_location (ArtifactLocation): Location of YAML file containing
            instance info from the AMI-building stage, for fetching.
        runif (str): one of ['passed', 'failed', 'any'] Default: any - controls when the
            stage's terminate task is triggered in the pipeline
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage

    """
    pipeline.ensure_encrypted_environment_variables(
        {
            'AWS_ACCESS_KEY_ID': aws_access_key_id,
            'AWS_SECRET_ACCESS_KEY': aws_secret_access_key,
        }
    )
    pipeline.ensure_environment_variables(
        {
            'ARTIFACT_PATH': artifact_path,
            'EC2_REGION': ec2_region,
        }
    )

    stage = pipeline.ensure_stage(constants.TERMINATE_INSTANCE_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Fetch the instance info to use in reaching the EC2 instance.
    job = stage.ensure_job(constants.TERMINATE_INSTANCE_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.retrieve_artifact(instance_info_location, job, constants.ARTIFACT_PATH)

    tasks.generate_ami_cleanup(job, slack_token, runif=runif)

    return stage
Example #13
0
def generate_deploy_to_acquia(pipeline, acquia_env, use_tag=False):
    """
    Creates a stage that deploys to an Acquia environment.

    Arguments:
        pipeline (gomatic.Pipeline): The pipeline to which the stage is being added.
        acquia_env (str): The environment name to which to deploy.
        use_tag (bool): True if deploying off a tag, False otherwise.

    Returns:
        gomatic.Stage: The new stage.

    """
    deploy_stage_for_acquia_env = pipeline.ensure_stage(
        constants.DEPLOY_ACQUIA_ENV_STAGE_NAME.format(acquia_env=acquia_env)
    )
    deploy_job_for_acquia_env = deploy_stage_for_acquia_env.ensure_job(
        constants.DEPLOY_ACQUIA_ENV_JOB_NAME.format(acquia_env=acquia_env)
    )
    deploy_job_for_acquia_env.ensure_resource("edxMarketing")

    if use_tag:
        tasks.generate_target_directory(deploy_job_for_acquia_env)

        # fetch the tag name
        constants.new_tag_name_artifact_params = {
            'pipeline': pipeline.name,
            'stage': constants.BUILD_AND_PUSH_TO_ACQUIA_STAGE_NAME,
            'job': constants.BUILD_AND_PUSH_TO_ACQUIA_JOB_NAME,
            'src': FetchArtifactFile('deploy_tag_name.txt'),
            'dest': 'target'
        }
        deploy_job_for_acquia_env.add_task(FetchArtifactTask(**constants.new_tag_name_artifact_params))

        deploy_source = '$(cat ../{artifact_path}/deploy_tag_name.txt)'.format(
            artifact_path=constants.ARTIFACT_PATH
        )
    else:
        deploy_source = '$MARKETING_REPOSITORY_VERSION'

    tasks.generate_package_install(deploy_job_for_acquia_env, 'tubular')
    marketing_tasks.generate_drupal_deploy(
        deploy_job_for_acquia_env,
        acquia_env,
        deploy_source
    )

    return deploy_stage_for_acquia_env
Example #14
0
def generate_tag_commit(
        stage, tag_id, deploy_artifact, org, repo,
        head_sha=None, head_sha_variable=None, head_sha_artifact=None
):
    """
    Generates a stage that is used to tag a release SHA.

    Either ``head_sha``, or both ``head_sha_variable`` and ``head_sha_artifact`` are required.

    Args:
        stage (gomatic.Stage): The stage to add the job to
        tag_id (str): A name to use to disambiguate instances of the tagging job
        deploy_artifact (ArtifactLocation): Location of deployment artifact file
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        head_sha (str): commit SHA or environment variable holding the SHA to tag as the release. Optional.
        head_sha_variable (str): The variable in head_sha_file that contains the SHA to tag. Optional.
        head_sha_artifact (ArtifactLocation): The file containing the head_sha_variable. Optional.

    Returns:
        gomatic.Job
    """
    # Generate a job/task which tags the head commit of the source branch.
    # Instruct the task to auto-generate tag name/message by not sending them in.
    tag_job = stage.ensure_job(constants.GIT_TAG_SHA_JOB_NAME_TPL(tag_id))
    tasks.generate_package_install(tag_job, 'tubular')

    if deploy_artifact:
        # Fetch the AMI-deployment artifact to extract deployment time.
        tasks.retrieve_artifact(deploy_artifact, tag_job, constants.ARTIFACT_PATH)

    if head_sha_artifact:
        tasks.retrieve_artifact(head_sha_artifact, tag_job, constants.ARTIFACT_PATH)

    tasks.generate_tag_commit(
        tag_job,
        org,
        repo,
        commit_sha=head_sha,
        deploy_artifact_filename=deploy_artifact.file_name if deploy_artifact else None,
        commit_sha_variable=head_sha_variable,
        input_file=head_sha_artifact.file_name if head_sha_artifact else None,
    )
Example #15
0
def generate_asg_cleanup(pipeline,
                         asgard_api_endpoints,
                         asgard_token,
                         aws_access_key_id,
                         aws_secret_access_key,
                         manual_approval=False):
    """
    Generates stage which calls the ASG cleanup script.

    Args:
        pipeline (gomatic.Pipeline):
        asgard_api_endpoints (str): canonical URL for asgard.
        asgard_token (str): Asgard token to use for authentication
        aws_access_key_id (str): AWS key ID for auth
        aws_secret_access_key (str): AWS secret key for auth
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables({'ASGARD_API_ENDPOINTS': asgard_api_endpoints})
    pipeline.ensure_encrypted_environment_variables(
        {
            'AWS_ACCESS_KEY_ID': aws_access_key_id,
            'AWS_SECRET_ACCESS_KEY': aws_secret_access_key,
            'ASGARD_API_TOKEN': asgard_token,
        }
    )

    stage = pipeline.ensure_stage("ASG-Cleanup-Stage")
    if manual_approval:
        stage.set_has_manual_approval()

    job = stage.ensure_job("Cleanup-ASGS")
    tasks.generate_package_install(job, 'tubular')
    job.add_task(ExecTask(
        [
            'cleanup-asgs.py'
        ],
        working_dir="tubular"
    ))

    return stage
Example #16
0
def generate_tag_commit(
        stage, tag_id, deploy_artifact, org, repo,
        head_sha=None, head_sha_variable=None, head_sha_artifact=None
):
    """
    Generates a stage that is used to tag a release SHA.

    Either ``head_sha``, or both ``head_sha_variable`` and ``head_sha_artifact`` are required.

    Args:
        stage (gomatic.Stage): The stage to add the job to
        tag_id (str): A name to use to disambiguate instances of the tagging job
        deploy_artifact (ArtifactLocation): Location of deployment artifact file
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        head_sha (str): commit SHA or environment variable holding the SHA to tag as the release. Optional.
        head_sha_variable (str): The variable in head_sha_file that contains the SHA to tag. Optional.
        head_sha_artifact (ArtifactLocation): The file containing the head_sha_variable. Optional.

    Returns:
        gomatic.Job
    """
    # Generate a job/task which tags the head commit of the source branch.
    # Instruct the task to auto-generate tag name/message by not sending them in.
    tag_job = stage.ensure_job(constants.GIT_TAG_SHA_JOB_NAME_TPL(tag_id))
    tasks.generate_package_install(tag_job, 'tubular')

    if deploy_artifact:
        # Fetch the AMI-deployment artifact to extract deployment time.
        tasks.retrieve_artifact(deploy_artifact, tag_job, constants.ARTIFACT_PATH)

    if head_sha_artifact:
        tasks.retrieve_artifact(head_sha_artifact, tag_job, constants.ARTIFACT_PATH)

    tasks.generate_tag_commit(
        tag_job,
        org,
        repo,
        commit_sha=head_sha,
        deploy_artifact_filename=deploy_artifact.file_name if deploy_artifact else None,
        commit_sha_variable=head_sha_variable,
        input_file=head_sha_artifact.file_name if head_sha_artifact else None,
    )
Example #17
0
def generate_rollback_in_acquia(pipeline, acquia_env, source_pipeline_env):
    """
    Creates a stage that rolls back an Acquia environment.

    NOTE: Rollback is only implemented for tag based deployments.

    Arguments:
        pipeline (gomatic.Pipeline): The pipeline to which the stage is being added.
        acquia_env (str): The environment name to which to deploy.
        source_pipeline_env (str): The pipeline environment from which the
            rollback is sourced (e.g. 'prod').

    Returns:
        gomatic.Stage: The new stage.

    """
    rollback_stage = pipeline.ensure_stage(constants.ROLLBACK_STAGE_NAME.format(acquia_env=acquia_env))
    rollback_stage.set_has_manual_approval()
    rollback_job = rollback_stage.ensure_job(constants.ROLLBACK_JOB_NAME.format(acquia_env=acquia_env))
    rollback_job.ensure_resource("edxMarketing")

    rollback_tag_name = constants.ACQUIA_ENV_TAG_NAME.format(acquia_env=acquia_env)
    rollback_tag_name_artifact_params = {
        'pipeline': constants.DEPLOY_MARKETING_PIPELINE_NAME.format(acquia_env=source_pipeline_env),
        'stage': constants.FETCH_TAG_STAGE_NAME,
        'job': constants.FETCH_TAG_JOB_NAME,
        'src': FetchArtifactFile('{rollback_tag_name}.txt'.format(rollback_tag_name=rollback_tag_name)),
        'dest': 'target'
    }

    tasks.generate_package_install(rollback_job, 'tubular')
    tasks.generate_target_directory(rollback_job)
    rollback_job.add_task(FetchArtifactTask(**rollback_tag_name_artifact_params))
    marketing_tasks.generate_drupal_deploy(
        rollback_job,
        acquia_env,
        '$(cat ../{artifact_path}/{rollback_tag_name}.txt)'.format(
            artifact_path=constants.ARTIFACT_PATH, rollback_tag_name=rollback_tag_name
        )
    )

    return rollback_stage
Example #18
0
def generate_check_ci(
        pipeline,
        token,
        materials_to_check
):
    """
    Generates a stage used to check the CI combined test status of a commit.
    Used to gate a release on whether CI has successfully passed for the release code.

    Args:
        pipeline (gomatic.Pipeline):
        token (str): GitHub token used to check CI.
        materials_to_check (list(GitMaterial)): List of materials.

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_unencrypted_secure_environment_variables(
        {
            'GIT_TOKEN': token
        }
    )
    stage = pipeline.ensure_stage(constants.CHECK_CI_STAGE_NAME)
    # Add a separate checking job for each org/repo.
    for material in materials_to_check:
        org, repo = github_id(material)
        repo_underscore = repo.replace('-', '_')
        job = stage.ensure_job(constants.CHECK_CI_JOB_NAME + '_' + repo_underscore)
        tasks.generate_package_install(job, 'tubular')

        cmd_args = [
            '--token', '$GIT_TOKEN',
            '--org', org,
            '--repo', repo,
            '--commit_hash', material_envvar_bash(material)
        ]
        job.add_task(tasks.tubular_task(
            'check_pr_tests_status.py',
            cmd_args
        ))

    return stage
Example #19
0
def generate_build_and_push_to_acquia(pipeline, create_tag=False):
    """
    Creates a stage that builds static assets and then pushes source and assets to Acquia.

    Arguments:
        pipeline (gomatic.Pipeline): The pipeline to which the stage is being added.
        create_tag (bool): True if a tag should first be created, False to push a branch directly.

    Returns:
        gomatic.Stage: The new stage.

    """
    build_and_push_to_acquia_stage = pipeline.ensure_stage(
        constants.BUILD_AND_PUSH_TO_ACQUIA_STAGE_NAME
    )
    build_and_push_to_acquia_job = build_and_push_to_acquia_stage.ensure_job(
        constants.BUILD_AND_PUSH_TO_ACQUIA_JOB_NAME
    )
    build_and_push_to_acquia_job.ensure_resource("edxMarketing")

    marketing_tasks.setup_git_config(build_and_push_to_acquia_job)
    marketing_tasks.add_acquia_remote_if_needed(build_and_push_to_acquia_job)

    if create_tag:
        # Ensures the tag name is accessible in future jobs.
        build_and_push_to_acquia_job.ensure_artifacts(set([BuildArtifact('target/deploy_tag_name.txt')]))

        tasks.generate_package_install(build_and_push_to_acquia_job, 'tubular')
        tasks.generate_target_directory(build_and_push_to_acquia_job)

        marketing_tasks.create_tag_without_static_assets(build_and_push_to_acquia_job)
        marketing_tasks.build_and_commit_static_assets(build_and_push_to_acquia_job)
        marketing_tasks.create_and_push_tag_with_static_assets_to_acquia(build_and_push_to_acquia_job)
        marketing_tasks.push_original_tag_to_mktg(build_and_push_to_acquia_job)
    else:
        build_and_push_to_acquia_stage.set_has_manual_approval()

        marketing_tasks.build_and_commit_static_assets(build_and_push_to_acquia_job)
        marketing_tasks.push_branch_to_acquia(build_and_push_to_acquia_job)

    return build_and_push_to_acquia_stage
Example #20
0
def generate_backup_database(pipeline, acquia_env):
    """
    Creates a stage that creates database backup for an Acquia environment.

    Arguments:
        pipeline (gomatic.Pipeline): The pipeline to which the stage is being added.
        acquia_env (str): The environment name for which the database will be backed up.

    Returns:
        gomatic.Stage: The new stage.

    """
    backup_acquia_env_database_stage = pipeline.ensure_stage(
        constants.BACKUP_ACQUIA_ENV_DATABASE_STAGE_NAME.format(acquia_env=acquia_env)
    )
    backup_acquia_env_database_job = backup_acquia_env_database_stage.ensure_job(
        constants.BACKUP_ACQUIA_ENV_DATABASE_JOB_NAME.format(acquia_env=acquia_env)
    )
    backup_acquia_env_database_job.ensure_resource("edxMarketing")

    tasks.generate_package_install(backup_acquia_env_database_job, 'tubular')
    marketing_tasks.generate_backup_drupal_database(backup_acquia_env_database_job, acquia_env)

    return backup_acquia_env_database_stage
Example #21
0
def generate_merge_release_candidate(
        pipeline, stage, token, org, repo, target_branch, head_sha,
        fast_forward_only, reference_repo=None,
):
    """
    Generates a job that is used to merge a Git source branch into a target branch,
    optionally ensuring that the merge is a fast-forward merge.

    Args:
        pipeline (gomatic.Pipeline): The pipeline containing ``stage``.
        stage (gomatic.Stage): The stage to add the job to
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        target_branch (str): Name of the branch into which to merge the source branch
        head_sha (str): commit SHA or environment variable holding the SHA to tag as the release
        token (str): the github token used to create all these things. Will be an env_var 'GIT_TOKEN'
        fast_forward_only (bool): If True, force a fast-forward merge or fail.

    Returns:
        gomatic.Job
    """
    merge_branch_job = stage.ensure_job(constants.GIT_MERGE_RC_BRANCH_JOB_NAME)
    tasks.generate_package_install(merge_branch_job, 'tubular')
    tasks.generate_target_directory(merge_branch_job)
    tasks.generate_merge_branch(
        pipeline,
        merge_branch_job,
        token,
        org,
        repo,
        head_sha,
        target_branch,
        fast_forward_only,
        reference_repo=reference_repo,
    )
    return merge_branch_job
Example #22
0
def generate_merge_release_candidate(
        pipeline, stage, token, org, repo, target_branch, head_sha,
        fast_forward_only, reference_repo=None,
):
    """
    Generates a job that is used to merge a Git source branch into a target branch,
    optionally ensuring that the merge is a fast-forward merge.

    Args:
        pipeline (gomatic.Pipeline): The pipeline containing ``stage``.
        stage (gomatic.Stage): The stage to add the job to
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        target_branch (str): Name of the branch into which to merge the source branch
        head_sha (str): commit SHA or environment variable holding the SHA to tag as the release
        token (str): the github token used to create all these things. Will be an env_var 'GIT_TOKEN'
        fast_forward_only (bool): If True, force a fast-forward merge or fail.

    Returns:
        gomatic.Job
    """
    merge_branch_job = stage.ensure_job(constants.GIT_MERGE_RC_BRANCH_JOB_NAME)
    tasks.generate_package_install(merge_branch_job, 'tubular')
    tasks.generate_target_directory(merge_branch_job)
    tasks.generate_merge_branch(
        pipeline,
        merge_branch_job,
        token,
        org,
        repo,
        head_sha,
        target_branch,
        fast_forward_only,
        reference_repo=reference_repo,
    )
    return merge_branch_job
Example #23
0
def prerelease_materials(edxapp_group, config):
    """
    Generate the prerelease materials pipeline

    Args:
        edxapp_group (gomatic.PipelineGroup): Pipeline group this new pipeline will be attached.
        config (dict): the general configuration for this pipeline
        stage_config (dict): the stage_edx_edxapp configuration for this pipeline
        prod_edx_config (dict): the prod_edx_edxapp configuration for this pipeline
        prod_edge_config (dict): the prod_edge_edxapp configuration for this pipeline

    Returns:
        gomatic.Pipeline

    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id

    - ec2_instance_profile_name
    - base_ami_id

    Optional variables:
    - configuration_secure_version
    """
    pipeline = edxapp_group.ensure_replacement_of_pipeline("prerelease_edxapp_materials_latest")
    pipeline.set_label_template('${edx-platform[:7]}-${COUNT}')

    for material in (
            CONFIGURATION, EDX_SECURE, EDGE_SECURE,
            EDX_MICROSITE, EDX_INTERNAL, EDGE_INTERNAL,
    ):
        pipeline.ensure_material(material())

    pipeline.ensure_material(TUBULAR())
    pipeline.ensure_material(EDX_PLATFORM(material_name='edx-platform', ignore_patterns=frozenset()))

    stage = pipeline.ensure_stage(constants.PRERELEASE_MATERIALS_STAGE_NAME)
    job = stage.ensure_job(constants.PRERELEASE_MATERIALS_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    private_releases.generate_create_private_release_candidate(
        job,
        config['git_token'],
        ('edx', 'edx-platform'),
        'master',
        EDX_PLATFORM().branch,
        ('edx', 'edx-platform-private'),
        'security-release',
        'release-candidate',
        target_reference_repo='edx-platform-private',
    )

    # This prevents the commit being released from being lost when the new
    # release-candidate is cut. However, this will require a janitor job to
    # deal with any releases that are never completed.
    tasks.generate_create_branch(
        pipeline, job, config['git_token'], 'edx', 'edx-platform',
        target_branch="release-candidate-$GO_PIPELINE_COUNTER",
        sha=material_envvar_bash(EDX_PLATFORM()))

    # Move the AMI selection jobs here in a single stage.
    stage = pipeline.ensure_stage(constants.BASE_AMI_SELECTION_STAGE_NAME)
    for edp in (
            STAGE_EDX_EDXAPP,
            PROD_EDX_EDXAPP,
            PROD_EDGE_EDXAPP,
    ):
        localized_config = config[edp]
        job = stage.ensure_job(constants.BASE_AMI_SELECTION_EDP_JOB_NAME(edp))
        tasks.generate_package_install(job, 'tubular')
        tasks.generate_base_ami_selection(
            job,
            localized_config['aws_access_key_id'],
            localized_config['aws_secret_access_key'],
            edp,
            config.get('base_ami_id')
        )

    return pipeline
Example #24
0
def generate_build_ami(stage,
                       edp,
                       app_repo_url,
                       configuration_secure_material,
                       configuration_internal_material,
                       playbook_path,
                       config,
                       version_tags=None,
                       **kwargs):
    """
    Generates a job for creating a new AMI.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            for which an AMI will be created.
        app_repo_url (str): App repo's URL.
        configuration_secure_material (gomatic.gomatic.gocd.materials.GitMaterial): Secure
            configuration material. Destination directory expected to be 'configuration-secure'.
        configuration_internal_material (gomatic.gomatic.gocd.materials.GitMaterial): Internal
            configuration material. Destination directory expected to be 'configuration-internal'.
        playbook_path (str): Path to the Ansible playbook to run when creating the AMI.
        config (dict): Environment-specific secure config.
        version_tags (dict): An optional {app_name: (repo, version), ...} dict that
            specifies what versions to tag the AMI with.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Locate the base AMI.
    tasks.generate_base_ami_selection(
        job,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        edp=edp
    )

    # Launch a new instance on which to build the AMI.
    tasks.generate_launch_instance(
        job,
        aws_access_key_id=config['aws_access_key_id'],
        aws_secret_access_key=config['aws_secret_access_key'],
        ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
        ec2_security_group_id=config['ec2_security_group_id'],
        ec2_instance_profile_name=config['ec2_instance_profile_name'],
        variable_override_path=path_to_artifact(constants.BASE_AMI_OVERRIDE_FILENAME),
    )

    # Run the Ansible play for the service.
    tasks.generate_run_app_playbook(
        job,
        playbook_path,
        edp,
        app_repo_url,
        private_github_key=config['github_private_key'],
        hipchat_token=config['hipchat_token'],
        configuration_secure_dir=configuration_secure_material.destination_directory,
        configuration_internal_dir=configuration_internal_material.destination_directory,
        disable_edx_services='true',
        COMMON_TAG_EC2_INSTANCE='true',
        **kwargs
    )

    # Create an AMI from the instance.
    tasks.generate_create_ami(
        job,
        edp.play,
        edp.deployment,
        edp.environment,
        app_repo_url,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        path_to_artifact(constants.LAUNCH_INSTANCE_FILENAME),
        hipchat_token=config['hipchat_token'],
        version_tags=version_tags,
        **kwargs
    )

    tasks.generate_ami_cleanup(job, config['hipchat_token'], runif='any')

    return job
Example #25
0
def generate_poll_tests_and_merge_pr(pipeline,
                                     stage,
                                     job,
                                     stage_name,
                                     job_name,
                                     pr_artifact_params,
                                     artifact_filename,
                                     org,
                                     repo,
                                     token,
                                     initial_poll_wait,
                                     max_poll_tries,
                                     poll_interval,
                                     manual_approval):
    """
    Generates a stage that is used to:
    - poll for successful completion of PR tests
    - merge the PR

    Args:
        pipeline (gomatic.Pipeline): Pipeline to attach this stage to
        stage (gomatic.Stage): Stage to use when adding tasks -or- None
        job (gomatic.Job): Job to use when adding tasks -or- None
        stage_name (str): Name of the stage
        job_name (str): Name of the job
        pr_artifact_params (dict): Params to use in creation of artifact-fetching task.
        artifact_filename (str): Filename of the artifact to fetch/read-in.
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        token (str): the github token used to create all these things. Will be an env_var 'GIT_TOKEN'
        initial_poll_wait (int): Number of seconds that will pass between 1st/2nd poll attempts.
        max_poll_tries (int): Maximum number of poll attempts that should occur before failing.
        poll_interval (int): Number of seconds between all poll attempts (after the 1st/2nd attempt interval).
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables(
        {
            'PR_TEST_INITIAL_WAIT_INTERVAL': str(initial_poll_wait),
            'MAX_PR_TEST_POLL_TRIES': str(max_poll_tries),
            'PR_TEST_POLL_INTERVAL': str(poll_interval)
        }
    )
    pipeline.ensure_unencrypted_secure_environment_variables(
        {
            'GIT_TOKEN': token
        }
    )
    if stage is None:
        git_stage = pipeline.ensure_stage(stage_name)
        if manual_approval:
            git_stage.set_has_manual_approval()
        git_job = git_stage.ensure_job(job_name)
    else:
        git_stage = stage
        git_job = job
    tasks.generate_package_install(git_job, 'tubular')
    tasks.generate_target_directory(git_job)

    # Fetch the PR-creation material.
    git_job.add_task(FetchArtifactTask(**pr_artifact_params))

    # Generate a task that poll the status of combined tests for a PR.
    tasks.generate_poll_pr_tests(
        git_job,
        org,
        repo,
        artifact_filename
    )

    # Generate a task that merges a PR that has passed all its tests in the previous task.
    tasks.generate_merge_pr(
        git_job,
        org,
        repo,
        artifact_filename
    )

    return git_stage
Example #26
0
def generate_create_branch_and_pr(pipeline,
                                  stage_name,
                                  org,
                                  repo,
                                  source_branch,
                                  new_branch,
                                  target_branch,
                                  pr_title,
                                  pr_body,
                                  token,
                                  manual_approval):
    """
    Generates a stage that is used to:
    - create a new branch off the HEAD of a source branch
    - create a PR to merge the new branch into a target branch

    Args:
        pipeline (gomatic.Pipeline): Pipeline to attach this stage to
        stage_name (str): Name of the stage
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        source_branch (str): Name of the branch to use in creating the new branch
        new_branch (str): Name of the branch to create off the HEAD of the source branch
        target_branch (str): Name of the branch into which to merge the source branch
        pr_title (str): Title of the new PR
        pr_body (str): Body of the new PR
        token (str): the github token used to create all these things. Will be an env_var 'GIT_TOKEN'
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    git_stage = pipeline.ensure_stage(stage_name)
    if manual_approval:
        git_stage.set_has_manual_approval()
    git_job = git_stage.ensure_job(constants.CREATE_MASTER_MERGE_PR_JOB_NAME)
    tasks.generate_package_install(git_job, 'tubular')
    tasks.generate_target_directory(git_job)

    # Generate a task that creates a new branch off the HEAD of a source branch.
    tasks.generate_create_branch(
        pipeline,
        git_job,
        token,
        org,
        repo,
        target_branch=new_branch,
        source_branch=source_branch
    )

    # Generate a task that creates a pull request merging the new branch from above into a target branch.
    tasks.generate_create_pr(
        git_job,
        org,
        repo,
        new_branch,
        target_branch,
        pr_title,
        pr_body
    )

    return git_stage
Example #27
0
def generate_run_play(pipeline,
                      playbook_with_path,
                      edp,
                      app_repo,
                      slack_token='',
                      slack_room=constants.SLACK_ROOM,
                      manual_approval=False,
                      configuration_secure_dir=constants.PRIVATE_CONFIGURATION_LOCAL_DIR,
                      configuration_internal_dir=constants.INTERNAL_CONFIGURATION_LOCAL_DIR,
                      override_artifacts=None,
                      timeout=None,
                      **kwargs):
    """
    TODO: This currently runs from the configuration/playbooks/continuous_delivery/ directory. Need to figure out how to
    pass in a configuration file to ansible-play correctly. TE-1608

    Assumes:
        - generate_launch_instance stage was used launch the instance preceding this stage.
        - Requires the ansible_inventory and key.pem files to be in the constants.ARTIFACT_DIRECTORY path
        - Play is run from the constants.PUBLIC_CONFIGURATION_DIR
        - Play is run using the constants.ANSIBLE_CONFIG configuration file

    Args:
        pipeline (gomatic.Pipeline):
        playbook_with_path (str):
        app_repo (str) :
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.
        manual_approval (bool):
        configuration_secure_dir (str): The secure config directory to use for this play.
        configuration_internal_dir (str): The internal config directory to use for this play.
        override_artifacts (bool):
        timeout (int): GoCD job level inactivity timeout setting.
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.RUN_PLAY_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.RUN_PLAY_JOB_NAME)
    if timeout:
        job.timeout = str(timeout)

    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    for file_name in (
            constants.KEY_PEM_FILENAME,
            constants.LAUNCH_INSTANCE_FILENAME,
            constants.ANSIBLE_INVENTORY_FILENAME
    ):
        tasks.retrieve_artifact(
            ArtifactLocation(
                pipeline.name,
                constants.LAUNCH_INSTANCE_STAGE_NAME,
                constants.LAUNCH_INSTANCE_JOB_NAME,
                file_name,
            ),
            job,
            constants.ARTIFACT_PATH
        )

    override_files = []
    if not override_artifacts:
        override_artifacts = []

    for artifact in override_artifacts:
        tasks.retrieve_artifact(artifact, job, constants.ARTIFACT_PATH)
        override_files.append('{}/{}'.format(constants.ARTIFACT_PATH, artifact.file_name))

    tasks.generate_run_app_playbook(
        job=job,
        playbook_with_path=playbook_with_path,
        edp=edp,
        app_repo=app_repo,
        slack_token=slack_token,
        slack_room=slack_room,
        configuration_secure_dir=configuration_secure_dir,
        configuration_internal_dir=configuration_internal_dir,
        override_files=override_files,
        **kwargs)
    return stage
Example #28
0
def generate_create_ami_from_instance(pipeline,
                                      edp,
                                      app_repo,
                                      aws_access_key_id,
                                      aws_secret_access_key,
                                      ami_creation_timeout=3600,
                                      ami_wait='yes',
                                      cache_id='',
                                      artifact_path=constants.ARTIFACT_PATH,
                                      slack_token='',
                                      slack_room=constants.SLACK_ROOM,
                                      manual_approval=False,
                                      version_tags=None,
                                      **kwargs):
    """
    Generates an artifact ami.yml:
        ami_id: ami-abcdefg
        ami_message: AMI creation operation complete
        ami_state: available

    Args:
        pipeline (gomatic.Pipeline):
        edp (EDP):
        app_repo (str):
        configuration_secure_repo (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        configuration_repo (str):
        ami_creation_timeout (str):
        ami_wait (str):
        cache_id (str):
        artifact_path (str):
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.
        manual_approval (bool):
        version_tags (dict): An optional {app_name: (repo, version), ...} dict that
            specifies what versions to tag the AMI with.
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.BUILD_AMI_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    launch_info_artifact = ArtifactLocation(
        pipeline.name,
        constants.LAUNCH_INSTANCE_STAGE_NAME,
        constants.LAUNCH_INSTANCE_JOB_NAME,
        constants.LAUNCH_INSTANCE_FILENAME,
    )

    tasks.retrieve_artifact(launch_info_artifact, job)

    # Create an AMI from the instance
    tasks.generate_create_ami(
        job=job,
        play=edp.play,
        deployment=edp.deployment,
        edx_environment=edp.environment,
        app_repo=app_repo,
        launch_info_path='{}/{}'.format(constants.ARTIFACT_PATH, constants.LAUNCH_INSTANCE_FILENAME),
        aws_access_key_id=aws_access_key_id,
        aws_secret_access_key=aws_secret_access_key,
        ami_creation_timeout=ami_creation_timeout,
        ami_wait=ami_wait,
        cache_id=cache_id,
        artifact_path=artifact_path,
        slack_token=slack_token,
        slack_room=slack_room,
        version_tags=version_tags,
        **kwargs)

    return stage
Example #29
0
def private_public_merge_sync(edxapp_group, config):
    """
    Variables needed for this pipeline:
    - git_token
    - initial_poll_wait
    - max_poll_tries
    - poll_interval
    """
    pipeline = edxapp_group.ensure_replacement_of_pipeline('edxapp_private_public_merge_sync')

    edx_platform_private_sr = EDX_PLATFORM_PRIVATE(
        material_name='edx-platform-private',
        destination_directory='edx-platform-priv',
        ignore_patterns=frozenset()
    )
    pipeline.ensure_material(edx_platform_private_sr)
    pipeline.ensure_material(EDX_PLATFORM(branch='master', destination_directory='edx-plat'))
    pipeline.ensure_material(TUBULAR())
    stage = pipeline.ensure_stage(constants.PRIV_PUB_CREATE_MERGE_PR_STAGE_NAME)
    job = stage.ensure_job(constants.PRIV_PUB_CREATE_MERGE_PR_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    private_releases.generate_private_public_create_pr(
        job,
        config['git_token'],
        ('edx', 'edx-platform-private'),
        edx_platform_private_sr.branch,
        ('edx', 'edx-platform'),
        "master",
    )

    stage = pipeline.ensure_stage(constants.PUB_PRIV_POLL_MERGE_STAGE_NAME)
    job = stage.ensure_job(constants.PUB_PRIV_POLL_MERGE_JOB_NAME)

    pr_artifact_params = {
        'pipeline': pipeline.name,
        'stage': constants.PRIV_PUB_CREATE_MERGE_PR_STAGE_NAME,
        'job': constants.PRIV_PUB_CREATE_MERGE_PR_JOB_NAME,
        'src': FetchArtifactFile(constants.PRIVATE_PUBLIC_PR_FILENAME),
        'dest': constants.ARTIFACT_PATH
    }
    stages.generate_poll_tests_and_merge_pr(
        pipeline=pipeline,
        stage=stage,
        job=job,
        stage_name=None,
        job_name=None,
        pr_artifact_params=pr_artifact_params,
        artifact_filename=constants.PRIVATE_PUBLIC_PR_FILENAME,
        org='edx',
        repo='edx-platform',
        token=config['github_token'],
        initial_poll_wait=config['initial_poll_wait'],
        max_poll_tries=config['max_poll_tries'],
        poll_interval=config['poll_interval'],
        manual_approval=False
    )

    # Generate a task that keeps the private branch up-to-date by pushing the public branch to it.
    private_releases.generate_public_private_merge(
        job,
        ('edx', 'edx-platform-private'),
        edx_platform_private_sr.branch,
        ('edx', 'edx-platform'),
        "master",
    )

    return pipeline
Example #30
0
def generate_deploy_ami(stage, ami_artifact_location, edp, config, has_migrations=True, application_user=None):
    """
    Generates a job for deploying an AMI. Migrations are applied as part of this job.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to deploy.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            to which the AMI belongs.
        config (dict): Environment-specific secure config.
        has_migrations (bool): Whether to generate Gomatic for applying migrations.
        application_user (str): application user if different from the play name.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.DEPLOY_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve the AMI ID from the upstream build stage.
    tasks.retrieve_artifact(ami_artifact_location, job)
    variable_override_path = path_to_artifact(ami_artifact_location.file_name)

    if has_migrations:
        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )

        # SSH key used to access the instance needs specific permissions.
        job.ensure_task(tasks.bash_task(
            'chmod 600 {key_pem_path}',
            key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
        ))

        if application_user is None:
            application_user = edp.play

        tasks.generate_run_migrations(
            job,
            application_user=application_user,
            application_name=application_user,
            application_path='/edx/app/{}'.format(application_user),
            db_migration_user=constants.DB_MIGRATION_USER,
            db_migration_pass=config['db_migration_pass'],
        )

        tasks.generate_ami_cleanup(job, config['hipchat_token'], runif='any')

    tasks.generate_deploy_ami(
        job,
        variable_override_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
    )

    return job
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_rep
    - jenkins_user_token
    - jenkins_job_token
    """

    pipeline = configurator.ensure_pipeline_group(constants.ORA2_PIPELINE_GROUP_NAME) \
                           .ensure_replacement_of_pipeline(constants.BUILD_ORA2_SANDBOX_PIPELINE_NAME) \
                           .set_timer('0 30 9 * * ?')

    for material in (TUBULAR, EDX_ORA2):
        pipeline.ensure_material(material())

    pipeline.ensure_environment_variables({
        'DNS_NAME':
        'ora2',
        'NAME_TAG':
        'ora2',
        'EDXAPP_VERSION':
        'master',
        'ORA2_VERSION':
        'master',
        'CONFIGURATION_VERSION':
        'master',
        'CONFIGURATION_SOURCE_REPO':
        'https://github.com/edx/configuration.git',
        'CONFIGURATION_SECURE_VERSION':
        'master',
        'CONFIGURATION_INTERNAL_VERSION':
        'master',
        'NOTIFY_ON_FAILURE':
        '*****@*****.**'
    })

    pipeline.ensure_unencrypted_secure_environment_variables({
        'JENKINS_USER_TOKEN':
        config['jenkins_user_token'],
        'JENKINS_JOB_TOKEN':
        config['jenkins_job_token']
    })

    # Create the Create Sandbox stage, job, and task
    jenkins_create_ora2_sandbox_stage = pipeline.ensure_stage(
        constants.CREATE_ORA2_SANDBOX_STAGE_NAME)
    jenkins_create_ora2_sandbox_job = jenkins_create_ora2_sandbox_stage.ensure_job(
        constants.CREATE_ORA2_SANDBOX_JOB_NAME)
    tasks.generate_package_install(jenkins_create_ora2_sandbox_job, 'tubular')

    # Keys need to be lower case for this job to use them
    create_ora2_sandbox_jenkins_params = {
        'dns_name': '$DNS_NAME',
        'name_tag': '$NAME_TAG',
        'edxapp_version': '$EDXAPP_VERSION',
        'configuration_version': '$CONFIGURATION_VERSION',
        'configuration_source_repo': '$CONFIGURATION_SOURCE_REPO',
        'configuration_secure_version': '$CONFIGURATION_SECURE_VERSION',
        'configuration_internal_version': '$CONFIGURATION_INTERNAL_VERSION',
        'basic_auth': 'false'
    }
    jenkins_timeout = 75 * 60
    jenkins_create_ora2_sandbox_job.timeout = str(jenkins_timeout + 60)
    tasks.trigger_jenkins_build(jenkins_create_ora2_sandbox_job,
                                constants.ORA2_JENKINS_URL,
                                constants.ORA2_JENKINS_USER_NAME,
                                constants.CREATE_ORA2_SANDBOX_JENKINS_JOB_NAME,
                                create_ora2_sandbox_jenkins_params,
                                timeout=jenkins_timeout)

    # Create the Set Ora2 Version stage, job, and task
    jenkins_set_ora2_version_stage = pipeline.ensure_stage(
        constants.SET_ORA2_VERSION_STAGE_NAME)
    jenkins_set_ora2_version_job = jenkins_set_ora2_version_stage.ensure_job(
        constants.SET_ORA2_VERSION_JOB_NAME)
    tasks.generate_package_install(jenkins_set_ora2_version_job, 'tubular')
    # Keys need to be upper case for this job to use them
    set_ora2_version_jenkins_params = {
        'SANDBOX_HOST':
        '${DNS_NAME}.sandbox.edx.org',  # uses a different variable name for set version
        'ORA2_VERSION': '$ORA2_VERSION',
        'NOTIFY_ON_FAILURE': '$NOTIFY_ON_FAILURE'
    }
    tasks.trigger_jenkins_build(jenkins_set_ora2_version_job,
                                constants.ORA2_JENKINS_URL,
                                constants.ORA2_JENKINS_USER_NAME,
                                constants.SET_ORA2_VERSION_JENKINS_JOB_NAME,
                                set_ora2_version_jenkins_params)

    # Create the Ora2 Add Course to Sandbox stage, job, and task
    jenkins_add_course_to_ora2_stage = pipeline.ensure_stage(
        constants.ADD_COURSE_TO_ORA2_STAGE_NAME)
    jenkins_add_course_to_ora2_job = jenkins_add_course_to_ora2_stage.ensure_job(
        constants.ADD_COURSE_TO_ORA2_JOB_NAME)
    tasks.generate_package_install(jenkins_add_course_to_ora2_job, 'tubular')
    # Keys need to be upper case for this job to use them
    add_course_to_ora2_jenkins_params = {
        'SANDBOX_BASE': '$DNS_NAME',
    }
    tasks.trigger_jenkins_build(jenkins_add_course_to_ora2_job,
                                constants.ORA2_JENKINS_URL,
                                constants.ORA2_JENKINS_USER_NAME,
                                constants.ADD_COURSE_TO_ORA2_JENKINS_JOB_NAME,
                                add_course_to_ora2_jenkins_params)

    # Create the Enable Auto Auth stage, job, and task
    jenkins_enable_auto_auth_stage = pipeline.ensure_stage(
        constants.ENABLE_AUTO_AUTH_STAGE_NAME)
    jenkins_enable_auto_auth_job = jenkins_enable_auto_auth_stage.ensure_job(
        constants.ENABLE_AUTO_AUTH_JOB_NAME)
    tasks.generate_package_install(jenkins_enable_auto_auth_job, 'tubular')
    # Keys need to be upper case for this job to use them
    enable_auto_auth_jenkins_params = {'SANDBOX_BASE': '$DNS_NAME'}
    tasks.trigger_jenkins_build(jenkins_enable_auto_auth_job,
                                constants.ORA2_JENKINS_URL,
                                constants.ORA2_JENKINS_USER_NAME,
                                constants.ENABLE_AUTO_AUTH_JENKINS_JOB_NAME,
                                enable_auto_auth_jenkins_params)

    # Create the Ora2 Run Tests stage, job, and task
    jenkins_run_ora2_tests_stage = pipeline.ensure_stage(
        constants.RUN_ORA2_TESTS_STAGE_NAME)
    jenkins_run_ora2_tests_job = jenkins_run_ora2_tests_stage.ensure_job(
        constants.RUN_ORA2_TESTS_JOB_NAME)
    tasks.generate_package_install(jenkins_run_ora2_tests_job, 'tubular')
    # Keys need to be upper case for this job to use them
    run_ora2_tests_jenkins_params = {
        'TEST_HOST': '${DNS_NAME}.sandbox.edx.org',
        'BRANCH': '$ORA2_VERSION',
        'SLEEP_TIME': 300
    }
    jenkins_timeout = 75 * 60
    tasks.trigger_jenkins_build(jenkins_run_ora2_tests_job,
                                constants.ORA2_JENKINS_URL,
                                constants.ORA2_JENKINS_USER_NAME,
                                constants.RUN_ORA2_TESTS_JENKINS_JOB_NAME,
                                run_ora2_tests_jenkins_params,
                                timeout=jenkins_timeout)
Example #32
0
def generate_deploy_ami(
        pipeline,
        asgard_api_endpoints,
        asgard_token,
        aws_access_key_id,
        aws_secret_access_key,
        upstream_ami_artifact=None,
        manual_approval=True
):
    """
    Generates a stage which deploys an AMI via Asgard.

    if the variable upstream_ami_artifact is set, information about which AMI to deploy will be pulled
    from this pipeline/stage/file.

    if upstream_ami_artifact is not set, the environment variable AMI_ID will be used to determine what
    AMI to deploy

    Args:
        pipeline (gomatic.Pipeline):
        asgard_api_endpoints (str): canonical URL for asgard.
        asgard_token (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        upstream_ami_artifact (ArtifactLocation): The location of yaml artifact that has the `ami_id`
        manual_approval (bool): Should this stage require manual approval?
    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables(
        {
            'ASGARD_API_ENDPOINTS': asgard_api_endpoints,
            'WAIT_SLEEP_TIME': constants.TUBULAR_SLEEP_WAIT_TIME
        }
    ).ensure_encrypted_environment_variables(
        {
            'AWS_ACCESS_KEY_ID': aws_access_key_id,
            'AWS_SECRET_ACCESS_KEY': aws_secret_access_key,
            'ASGARD_API_TOKEN': asgard_token,
        }
    )

    stage = pipeline.ensure_stage(constants.DEPLOY_AMI_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(constants.DEPLOY_AMI_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')
    # Make the artifact directory if it does not exist
    job.add_task(ExecTask(
        [
            '/bin/bash',
            '-c',
            'mkdir -p ../{}'.format(constants.ARTIFACT_PATH),
        ],
        working_dir="tubular"
    ))

    # Setup the deployment output file
    artifact_path = '{}/{}'.format(
        constants.ARTIFACT_PATH,
        constants.DEPLOY_AMI_OUT_FILENAME
    )
    job.ensure_artifacts(set([BuildArtifact(artifact_path)]))

    deploy_command =\
        'asgard-deploy.py ' \
        '--out_file ../{} '.format(artifact_path)

    if upstream_ami_artifact:
        tasks.retrieve_artifact(upstream_ami_artifact, job, 'tubular')
        deploy_command += '--config-file {}'.format(upstream_ami_artifact.file_name)

    else:
        pipeline.ensure_environment_variables({'AMI_ID': None})
        deploy_command += '--ami_id $AMI_ID'

    # Execute the deployment script
    job.add_task(ExecTask(['/bin/bash', '-c', deploy_command], working_dir="tubular"))
    return stage
Example #33
0
def generate_e2e_test_stage(pipeline, config):
    """
    Add stages to run end-to-end tests against edxapp to the specified ``pipeline``.

    Required Config Parameters:
        jenkins_user_token
        jenkins_job_token
        jenkins_user_name
    """
    # For now, you can only trigger builds on a single jenkins server, because you can only
    # define a single username/token.
    # And all the jobs that you want to trigger need the same job token defined.
    # TODO: refactor when required so that each job can define their own user and job tokens
    pipeline.ensure_unencrypted_secure_environment_variables(
        {
            'JENKINS_USER_TOKEN': config['jenkins_user_token'],
            'JENKINS_JOB_TOKEN': config['jenkins_job_token']
        }
    )

    # Create the stage with the Jenkins jobs
    jenkins_stage = pipeline.ensure_stage(constants.JENKINS_VERIFICATION_STAGE_NAME)
    jenkins_user_name = config['jenkins_user_name']

    jenkins_url = "https://build.testeng.edx.org"
    jenkins_job_timeout = 60 * 60

    e2e_tests = jenkins_stage.ensure_job('edx-e2e-test')
    e2e_tests.timeout = str(jenkins_job_timeout + 60)
    tasks.generate_package_install(e2e_tests, 'tubular')
    tasks.trigger_jenkins_build(
        e2e_tests,
        jenkins_url,
        jenkins_user_name,
        'edx-e2e-tests',
        {},
        timeout=jenkins_job_timeout,
        custom_error_message="Need help troubleshooting e2e tests failures? "
                             "See here: https://openedx.atlassian.net/wiki/display/MBT/What+to+do+when+e2e+tests+fail"
    )

    microsites_tests = jenkins_stage.ensure_job('microsites-staging-tests')
    microsites_tests.timeout = str(jenkins_job_timeout + 60)
    tasks.generate_package_install(microsites_tests, 'tubular')
    tasks.trigger_jenkins_build(
        microsites_tests,
        jenkins_url,
        jenkins_user_name,
        'microsites-staging-tests', {
            'CI_BRANCH': 'master',
        },
        timeout=jenkins_job_timeout,
    )

    # Create Message PRs task in Jenkins job that will be run if test are failed
    base_ami_artifact = utils.ArtifactLocation(
        utils.build_artifact_path(["prerelease_edxapp_materials_latest", "PROD_edx_edxapp_B"]),
        constants.BASE_AMI_SELECTION_STAGE_NAME,
        constants.BASE_AMI_SELECTION_EDP_JOB_NAME(STAGE_EDX_EDXAPP),
        constants.BASE_AMI_OVERRIDE_FILENAME,
    )
    head_ami_artifact = utils.ArtifactLocation(
        "STAGE_edxapp_B",
        constants.BUILD_AMI_STAGE_NAME,
        constants.BUILD_AMI_JOB_NAME,
        constants.BUILD_AMI_FILENAME,
    )
    for job in [e2e_tests, microsites_tests]:
        tasks.generate_message_pull_requests_in_commit_range(
            pipeline, job=job, org='edx', repo='edx-platform',
            token=config['github_token'], release_status=constants.ReleaseStatus.E2E_FAILED,
            runif="failed", base_ami_artifact=base_ami_artifact, base_ami_tag_app='edxapp-from-pipeline',
            head_ami_artifact=head_ami_artifact, head_ami_tag_app='edxapp-from-pipeline',
            extra_text="**E2E tests have failed.** https://gocd.tools.edx.org/go/tab/pipeline/history/STAGE_edxapp_M-D"
        )
Example #34
0
def generate_rollback_asg_stage(
        pipeline,
        asgard_api_endpoints,
        asgard_token,
        aws_access_key_id,
        aws_secret_access_key,
        slack_token,
        slack_room,
        deploy_file_location
):
    """
    Generates a stage which performs rollback to a previous ASG (or ASGs) via Asgard.
    If the previous ASG (or ASGs) fail health checks for some reason, a new ASGs with
    the provided AMI ID is created and used as the rollback ASG(s).
    This stage *always* requires manual approval.

    Args:
        pipeline (gomatic.Pipeline):
        asgard_api_endpoints (str): canonical URL for asgard.
        asgard_token (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.
        deploy_file_location (ArtifactLocation): The location of YAML artifact from the previous deploy
            that has the previous ASG info along with `ami_id`, for rollback/re-deploy respectively.
    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables(
        {
            'ASGARD_API_ENDPOINTS': asgard_api_endpoints,
            'SLACK_ROOM': slack_room,
        }
    ).ensure_encrypted_environment_variables(
        {
            'AWS_ACCESS_KEY_ID': aws_access_key_id,
            'AWS_SECRET_ACCESS_KEY': aws_secret_access_key,
            'SLACK_TOKEN': slack_token,
            'ASGARD_API_TOKEN': asgard_token,
        }
    )

    stage = pipeline.ensure_stage(constants.ROLLBACK_ASGS_STAGE_NAME)
    # Important: Do *not* automatically rollback! Always manual...
    stage.set_has_manual_approval()
    job = stage.ensure_job(constants.ROLLBACK_ASGS_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    tasks.retrieve_artifact(deploy_file_location, job, 'tubular')

    job.add_task(ExecTask(
        [
            '/bin/bash',
            '-c',
            'mkdir -p ../target',
        ],
        working_dir="tubular"
    ))

    artifact_path = '{}/{}'.format(
        constants.ARTIFACT_PATH,
        constants.ROLLBACK_AMI_OUT_FILENAME
    )
    job.ensure_artifacts(set([BuildArtifact(artifact_path)]))

    job.add_task(ExecTask(
        [
            'rollback_asg.py',
            '--config_file', deploy_file_location.file_name,
            '--out_file', '../{}'.format(artifact_path),
        ],
        working_dir="tubular"
    ))
    return stage
Example #35
0
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_rep
    - jenkins_user_token
    - jenkins_job_token
    """

    pipeline = configurator.ensure_pipeline_group(constants.ORA2_PIPELINE_GROUP_NAME) \
                           .ensure_replacement_of_pipeline(constants.BUILD_ORA2_SANDBOX_PIPELINE_NAME) \
                           .set_timer('0 30 9 * * ?')

    for material in (TUBULAR, EDX_ORA2):
        pipeline.ensure_material(material())

    pipeline.ensure_environment_variables(
        {
            'DNS_NAME': 'ora2',
            'NAME_TAG': 'ora2',
            'EDXAPP_VERSION': 'master',
            'ORA2_VERSION': 'master',
            'CONFIGURATION_VERSION': 'master',
            'CONFIGURATION_SOURCE_REPO': 'https://github.com/edx/configuration.git',
            'CONFIGURATION_SECURE_VERSION': 'master',
            'CONFIGURATION_INTERNAL_VERSION': 'master',
            'NOTIFY_ON_FAILURE': '*****@*****.**'
        }
    )

    pipeline.ensure_unencrypted_secure_environment_variables(
        {
            'JENKINS_USER_TOKEN': config['jenkins_user_token'],
            'JENKINS_JOB_TOKEN': config['jenkins_job_token']
        }
    )

    # Create the Create Sandbox stage, job, and task
    jenkins_create_ora2_sandbox_stage = pipeline.ensure_stage(
        constants.CREATE_ORA2_SANDBOX_STAGE_NAME
    )
    jenkins_create_ora2_sandbox_job = jenkins_create_ora2_sandbox_stage.ensure_job(
        constants.CREATE_ORA2_SANDBOX_JOB_NAME
    )
    tasks.generate_package_install(jenkins_create_ora2_sandbox_job, 'tubular')

    # Keys need to be lower case for this job to use them
    create_ora2_sandbox_jenkins_params = {
        'dns_name': '$DNS_NAME',
        'name_tag': '$NAME_TAG',
        'edxapp_version': '$EDXAPP_VERSION',
        'configuration_version': '$CONFIGURATION_VERSION',
        'configuration_source_repo': '$CONFIGURATION_SOURCE_REPO',
        'configuration_secure_version': '$CONFIGURATION_SECURE_VERSION',
        'configuration_internal_version': '$CONFIGURATION_INTERNAL_VERSION'
    }
    jenkins_timeout = 75 * 60
    jenkins_create_ora2_sandbox_job.timeout = str(jenkins_timeout + 60)
    tasks.trigger_jenkins_build(
        jenkins_create_ora2_sandbox_job,
        constants.ORA2_JENKINS_URL,
        constants.ORA2_JENKINS_USER_NAME,
        constants.CREATE_ORA2_SANDBOX_JENKINS_JOB_NAME,
        create_ora2_sandbox_jenkins_params,
        timeout=jenkins_timeout
    )

    # Create the Set Ora2 Version stage, job, and task
    jenkins_set_ora2_version_stage = pipeline.ensure_stage(
        constants.SET_ORA2_VERSION_STAGE_NAME
    )
    jenkins_set_ora2_version_job = jenkins_set_ora2_version_stage.ensure_job(
        constants.SET_ORA2_VERSION_JOB_NAME
    )
    tasks.generate_package_install(jenkins_set_ora2_version_job, 'tubular')
    # Keys need to be upper case for this job to use them
    set_ora2_version_jenkins_params = {
        'ORA2_VERSION': '$ORA2_VERSION',
        'SANDBOX_HOST': '${DNS_NAME}.sandbox.edx.org',
        'NOTIFY_ON_FAILURE': '$NOTIFY_ON_FAILURE'
    }
    tasks.trigger_jenkins_build(
        jenkins_set_ora2_version_job,
        constants.ORA2_JENKINS_URL,
        constants.ORA2_JENKINS_USER_NAME,
        constants.SET_ORA2_VERSION_JENKINS_JOB_NAME,
        set_ora2_version_jenkins_params
    )

    # Create the Ora2 Add Course to Sandbox stage, job, and task
    jenkins_add_course_to_ora2_stage = pipeline.ensure_stage(
        constants.ADD_COURSE_TO_ORA2_STAGE_NAME
    )
    jenkins_add_course_to_ora2_job = jenkins_add_course_to_ora2_stage.ensure_job(
        constants.ADD_COURSE_TO_ORA2_JOB_NAME
    )
    tasks.generate_package_install(jenkins_add_course_to_ora2_job, 'tubular')
    # Keys need to be upper case for this job to use them
    add_course_to_ora2_jenkins_params = {
        'SANDBOX_HOST': '${DNS_NAME}.sandbox.edx.org',
        'NOTIFY_ON_FAILURE': '$NOTIFY_ON_FAILURE'
    }
    tasks.trigger_jenkins_build(
        jenkins_add_course_to_ora2_job,
        constants.ORA2_JENKINS_URL,
        constants.ORA2_JENKINS_USER_NAME,
        constants.ADD_COURSE_TO_ORA2_JENKINS_JOB_NAME,
        add_course_to_ora2_jenkins_params
    )

    # Create the Enable Auto Auth stage, job, and task
    jenkins_enable_auto_auth_stage = pipeline.ensure_stage(
        constants.ENABLE_AUTO_AUTH_STAGE_NAME
    )
    jenkins_enable_auto_auth_job = jenkins_enable_auto_auth_stage.ensure_job(
        constants.ENABLE_AUTO_AUTH_JOB_NAME
    )
    tasks.generate_package_install(jenkins_enable_auto_auth_job, 'tubular')
    # Keys need to be upper case for this job to use them
    enable_auto_auth_jenkins_params = {
        'SANDBOX_HOST': '${DNS_NAME}.sandbox.edx.org',
        'NOTIFY_ON_FAILURE': '$NOTIFY_ON_FAILURE'
    }
    tasks.trigger_jenkins_build(
        jenkins_enable_auto_auth_job,
        constants.ORA2_JENKINS_URL,
        constants.ORA2_JENKINS_USER_NAME,
        constants.ENABLE_AUTO_AUTH_JENKINS_JOB_NAME,
        enable_auto_auth_jenkins_params
    )

    # Create the Ora2 Run Tests stage, job, and task
    jenkins_run_ora2_tests_stage = pipeline.ensure_stage(
        constants.RUN_ORA2_TESTS_STAGE_NAME
    )
    jenkins_run_ora2_tests_job = jenkins_run_ora2_tests_stage.ensure_job(
        constants.RUN_ORA2_TESTS_JOB_NAME
    )
    tasks.generate_package_install(jenkins_run_ora2_tests_job, 'tubular')
    # Keys need to be upper case for this job to use them
    run_ora2_tests_jenkins_params = {
        'SANDBOX_HOST': '${DNS_NAME}.sandbox.edx.org',
        'BRANCH': '$ORA2_VERSION',
        'SLEEP_TIME': 300,
        'NOTIFY_ON_FAILURE': '$NOTIFY_ON_FAILURE'
    }
    jenkins_timeout = 75 * 60
    tasks.trigger_jenkins_build(
        jenkins_run_ora2_tests_job,
        constants.ORA2_JENKINS_URL,
        constants.ORA2_JENKINS_USER_NAME,
        constants.RUN_ORA2_TESTS_JENKINS_JOB_NAME,
        run_ora2_tests_jenkins_params,
        timeout=jenkins_timeout
    )
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    materials: A list of dictionaries of the materials used in this pipeline
    upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification
    """
    pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\
                           .ensure_replacement_of_pipeline(config['pipeline_name'])

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=set(material['ignore_patterns'])))

    for material in config['upstream_pipelines']:
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    # What this accomplishes:
    # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic
    # the git materials will be carried over from the first pipeline.
    #
    # The second stage in this pipeline requires manual approval.
    #
    # This allows the overall workflow to remain paused while manual verification is completed and allows the git
    # materials to stay pinned.
    #
    # Once the second phase is approved, the workflow will continue and pipelines downstream will continue to execute
    # with the same pinned materials from the upstream pipeline.
    stages.generate_armed_stage(pipeline,
                                constants.INITIAL_VERIFICATION_STAGE_NAME)

    # For now, you can only trigger builds on a single jenkins server, because you can only
    # define a single username/token.
    # And all the jobs that you want to trigger need the same job token defined.
    # TODO: refactor when required so that each job can define their own user and job tokens
    pipeline.ensure_unencrypted_secure_environment_variables({
        'JENKINS_USER_TOKEN':
        config['jenkins_user_token'],
        'JENKINS_JOB_TOKEN':
        config['jenkins_job_token']
    })

    # Create the stage with the Jenkins jobs
    jenkins_stage = pipeline.ensure_stage(
        constants.JENKINS_VERIFICATION_STAGE_NAME)
    jenkins_stage.set_has_manual_approval()
    jenkins_user_name = config['jenkins_user_name']

    for jenkins in config['jenkins_verifications']:
        pipeline_job_name = jenkins['pipeline_job_name']
        jenkins_url = jenkins['url']
        jenkins_job_name = jenkins['job_name']
        key, _, param = jenkins['param'].partition(' ')
        jenkins_param = {key: param}

        job = jenkins_stage.ensure_job(pipeline_job_name)
        tasks.generate_package_install(job, 'tubular')
        tasks.trigger_jenkins_build(job, jenkins_url, jenkins_user_name,
                                    jenkins_job_name, jenkins_param)

    manual_verification_stage = pipeline.ensure_stage(
        constants.MANUAL_VERIFICATION_STAGE_NAME)
    manual_verification_stage.set_has_manual_approval()
    manual_verification_job = manual_verification_stage.ensure_job(
        constants.MANUAL_VERIFICATION_JOB_NAME)
    manual_verification_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER'
        ], ))
Example #37
0
def generate_launch_instance(
        pipeline,
        aws_access_key_id,
        aws_secret_access_key,
        ec2_vpc_subnet_id,
        ec2_security_group_id,
        ec2_instance_profile_name,
        base_ami_id,
        manual_approval=False,
        ec2_region=constants.EC2_REGION,
        ec2_instance_type=constants.EC2_INSTANCE_TYPE,
        ec2_timeout=constants.EC2_LAUNCH_INSTANCE_TIMEOUT,
        ec2_ebs_volume_size=constants.EC2_EBS_VOLUME_SIZE,
        base_ami_id_artifact=None
):
    """
    Pattern to launch an AMI. Generates 3 artifacts:
        key.pem             - Private key material generated for this instance launch
        launch_info.yml     - yaml file that contains information about the instance launched
        ansible_inventory   - a list of private aws IP addresses that can be fed in to ansible to run playbooks

        Please check here for further information:
        https://github.com/edx/configuration/blob/master/playbooks/continuous_delivery/launch_instance.yml

    Args:
        pipeline (gomatic.Pipeline):
        aws_access_key_id (str): AWS key ID for auth
        aws_secret_access_key (str): AWS secret key for auth
        ec2_vpc_subnet_id (str):
        ec2_security_group_id (str):
        ec2_instance_profile_name (str):
        base_ami_id (str): the ami-id used to launch the instance
        manual_approval (bool): Should this stage require manual approval?
        ec2_region (str):
        ec2_instance_type (str):
        ec2_timeout (str):
        ec2_ebs_volume_size (str):
        base_ami_id_artifact (edxpipelines.utils.ArtifactLocation): overrides the base_ami_id and will force
                                                                       the task to run with the AMI built up stream.

    Returns:

    """
    stage = pipeline.ensure_stage(constants.LAUNCH_INSTANCE_STAGE_NAME)

    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.LAUNCH_INSTANCE_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')

    if base_ami_id_artifact:
        tasks.retrieve_artifact(base_ami_id_artifact, job, constants.ARTIFACT_PATH)

    # Create the instance-launching task.
    tasks.generate_launch_instance(
        job,
        aws_access_key_id=aws_access_key_id,
        aws_secret_access_key=aws_secret_access_key,
        ec2_vpc_subnet_id=ec2_vpc_subnet_id,
        ec2_security_group_id=ec2_security_group_id,
        ec2_instance_profile_name=ec2_instance_profile_name,
        base_ami_id=base_ami_id,
        ec2_region=ec2_region,
        ec2_instance_type=ec2_instance_type,
        ec2_timeout=ec2_timeout,
        ec2_ebs_volume_size=ec2_ebs_volume_size,
        variable_override_path='{}/{}'.format(
            constants.ARTIFACT_PATH, base_ami_id_artifact.file_name
        ) if base_ami_id_artifact else None,
    )

    tasks.generate_ensure_python2(job)

    return stage
Example #38
0
def install_pipelines(configurator, config):
    """
    Install pipelines that can rollback the stage edx-mktg site.
    """
    pipeline = configurator \
        .ensure_pipeline_group(constants.DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline('rollback-stage-marketing-site') \
        .ensure_material(TUBULAR()) \
        .ensure_material(EDX_MKTG()) \
        .ensure_material(ECOM_SECURE()) \
        .ensure_material(PipelineMaterial(constants.DEPLOY_MARKETING_PIPELINE_NAME, constants.FETCH_TAG_STAGE_NAME))

    pipeline.ensure_environment_variables(
        {
            'MARKETING_REPOSITORY_VERSION': config['mktg_repository_version'],
        }
    )

    pipeline.ensure_encrypted_environment_variables(
        {
            'PRIVATE_GITHUB_KEY': config['github_private_key'],
            'PRIVATE_ACQUIA_USERNAME': config['acquia_username'],
            'PRIVATE_ACQUIA_PASSWORD': config['acquia_password'],
            'PRIVATE_ACQUIA_GITHUB_KEY': config['acquia_github_key'],
        }
    )

    stage_tag_name_artifact_params = {
        'pipeline': constants.DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': constants.FETCH_TAG_STAGE_NAME,
        'job': constants.FETCH_TAG_JOB_NAME,
        'src': FetchArtifactFile('{stage_tag}.txt'.format(stage_tag=constants.STAGE_TAG_NAME)),
        'dest': 'target'
    }

    # Stage to rollback stage to its last stable tag
    rollback_stage = pipeline.ensure_stage(constants.ROLLBACK_STAGE_NAME)
    rollback_stage.set_has_manual_approval()
    rollback_job = rollback_stage.ensure_job(constants.ROLLBACK_JOB_NAME)

    tasks.generate_package_install(rollback_job, 'tubular')
    tasks.generate_target_directory(rollback_job)
    rollback_job.add_task(FetchArtifactTask(**stage_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        rollback_job,
        constants.STAGE_ENV,
        '{stage_tag}.txt'.format(stage_tag=constants.STAGE_TAG_NAME)
    )

    # Stage to clear the caches
    clear_stage_caches_stage = pipeline.ensure_stage(constants.CLEAR_STAGE_CACHES_STAGE_NAME)
    clear_stage_caches_job = clear_stage_caches_stage.ensure_job(constants.CLEAR_STAGE_CACHES_JOB_NAME)

    tasks.generate_package_install(clear_stage_caches_job, 'tubular')
    clear_stage_caches_job.add_task(
        tasks.bash_task(
            """
            chmod 600 ecom-secure/acquia/acquia_github_key.pem &&
            cp {ecom_secure}/acquia/acquia_github_key.pem {edx_mktg}/docroot/
            """,
            ecom_secure=ECOM_SECURE().destination_directory,
            edx_mktg=EDX_MKTG().destination_directory
        )
    )
    tasks.generate_flush_drupal_caches(clear_stage_caches_job, constants.STAGE_ENV)
    tasks.generate_clear_varnish_cache(clear_stage_caches_job, constants.STAGE_ENV)
def install_pipelines(configurator, config):
    """
    Install pipelines that can deploy the edx-mktg site.
    """
    pipeline = configurator \
        .ensure_pipeline_group(constants.DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline(constants.DEPLOY_MARKETING_PIPELINE_NAME) \
        .ensure_material(TUBULAR()) \
        .ensure_material(EDX_MKTG()) \
        .ensure_material(ECOM_SECURE())

    pipeline.ensure_environment_variables(
        {
            'MARKETING_REPOSITORY_VERSION': config['mktg_repository_version'],
        }
    )

    pipeline.ensure_encrypted_environment_variables(
        {
            'PRIVATE_GITHUB_KEY': config['github_private_key'],
            'PRIVATE_ACQUIA_REMOTE': config['acquia_remote_url'],
            'PRIVATE_ACQUIA_USERNAME': config['acquia_username'],
            'PRIVATE_ACQUIA_PASSWORD': config['acquia_password'],
            'PRIVATE_ACQUIA_GITHUB_KEY': config['acquia_github_key']
        }
    )

    # Stage to fetch the current tag names from stage and prod
    fetch_tag_stage = pipeline.ensure_stage(constants.FETCH_TAG_STAGE_NAME)
    fetch_tag_stage.set_has_manual_approval()
    fetch_tag_job = fetch_tag_stage.ensure_job(constants.FETCH_TAG_JOB_NAME)
    tasks.generate_package_install(fetch_tag_job, 'tubular')
    tasks.generate_target_directory(fetch_tag_job)
    path_name = '../target/{env}_tag_name.txt'
    tasks.generate_fetch_tag(fetch_tag_job, constants.STAGE_ENV, path_name)
    tasks.generate_fetch_tag(fetch_tag_job, constants.PROD_ENV, path_name)

    fetch_tag_job.ensure_artifacts(
        set([BuildArtifact('target/{stage_tag}.txt'.format(stage_tag=constants.STAGE_TAG_NAME)),
             BuildArtifact('target/{prod_tag}.txt'.format(prod_tag=constants.PROD_TAG_NAME))])
    )

    # Stage to create and push a tag to Acquia.
    push_to_acquia_stage = pipeline.ensure_stage(constants.PUSH_TO_ACQUIA_STAGE_NAME)
    push_to_acquia_job = push_to_acquia_stage.ensure_job(constants.PUSH_TO_ACQUIA_JOB_NAME)
    # Ensures the tag name is accessible in future jobs.
    push_to_acquia_job.ensure_artifacts(
        set([BuildArtifact('target/{new_tag}.txt'.format(new_tag=constants.NEW_TAG_NAME))])
    )

    tasks.generate_package_install(push_to_acquia_job, 'tubular')
    tasks.generate_target_directory(push_to_acquia_job)

    # Create a tag from MARKETING_REPOSITORY_VERSION branch of marketing repo
    push_to_acquia_job.add_task(
        tasks.bash_task(
            # Writing dates to a file should help with any issues dealing with a job
            # taking place over two days (23:59:59 -> 00:00:00). Only the day can be
            # affected since we don't use minutes or seconds.
            # NOTE: Uses UTC
            """\
            echo -n "release-$(date +%Y-%m-%d-%H.%M)" > ../target/{new_tag}.txt &&
            TAG_NAME=$(cat ../target/{new_tag}.txt) &&
            /usr/bin/git config user.email "*****@*****.**" &&
            /usr/bin/git config user.name "edx-secure" &&
            /usr/bin/git tag -a $TAG_NAME -m "Release for $(date +%B\\ %d,\\ %Y). Created by $GO_TRIGGER_USER." &&
            /usr/bin/git push origin $TAG_NAME
            """,
            new_tag=constants.NEW_TAG_NAME,
            working_dir='edx-mktg'
        )
    )

    # Set up Acquia remote repo and push tag to Acquia. Change new tag file to contain "tags/" for deployment.
    push_to_acquia_job.add_task(
        tasks.bash_task(
            """\
            chmod 600 ../ecom-secure/acquia/acquia_github_key.pem &&
            if [[ $(git remote) != *"acquia"*  ]]; then
                /usr/bin/git remote add acquia $PRIVATE_ACQUIA_REMOTE ;
            fi &&
            GIT_SSH_COMMAND="/usr/bin/ssh -o StrictHostKeyChecking=no -i ../{ecom_secure}/acquia/acquia_github_key.pem"
            /usr/bin/git push acquia $(cat ../target/{new_tag}.txt) &&
            echo -n "tags/" | cat - ../target/{new_tag}.txt > temp &&
            mv temp ../target/{new_tag}.txt
            """,
            new_tag=constants.NEW_TAG_NAME,
            ecom_secure=ECOM_SECURE().destination_directory,
            working_dir='edx-mktg'
        )
    )

    # Stage to backup database in stage
    backup_stage_database_stage = pipeline.ensure_stage(constants.BACKUP_STAGE_DATABASE_STAGE_NAME)
    backup_stage_database_job = backup_stage_database_stage.ensure_job(constants.BACKUP_STAGE_DATABASE_JOB_NAME)

    tasks.generate_package_install(backup_stage_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_stage_database_job, constants.STAGE_ENV)

    # Stage to deploy to stage
    deploy_stage_for_stage = pipeline.ensure_stage(constants.DEPLOY_STAGE_STAGE_NAME)
    deploy_job_for_stage = deploy_stage_for_stage.ensure_job(constants.DEPLOY_STAGE_JOB_NAME)

    tasks.generate_package_install(deploy_job_for_stage, 'tubular')
    tasks.generate_target_directory(deploy_job_for_stage)

    # fetch the tag name
    constants.new_tag_name_artifact_params = {
        'pipeline': constants.DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': constants.PUSH_TO_ACQUIA_STAGE_NAME,
        'job': constants.PUSH_TO_ACQUIA_JOB_NAME,
        'src': FetchArtifactFile('{new_tag}.txt'.format(new_tag=constants.NEW_TAG_NAME)),
        'dest': 'target'
    }
    deploy_job_for_stage.add_task(FetchArtifactTask(**constants.new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        deploy_job_for_stage,
        constants.STAGE_ENV,
        '{new_tag}.txt'.format(new_tag=constants.NEW_TAG_NAME)
    )

    # Stage to clear caches in stage
    clear_stage_caches_stage = pipeline.ensure_stage(constants.CLEAR_STAGE_CACHES_STAGE_NAME)
    clear_stage_caches_job = clear_stage_caches_stage.ensure_job(constants.CLEAR_STAGE_CACHES_JOB_NAME)

    tasks.generate_package_install(clear_stage_caches_job, 'tubular')
    clear_stage_caches_job.add_task(
        tasks.bash_task(
            """
            chmod 600 ecom-secure/acquia/acquia_github_key.pem &&
            cp {ecom_secure}/acquia/acquia_github_key.pem {edx_mktg}/docroot/
            """,
            ecom_secure=ECOM_SECURE().destination_directory,
            edx_mktg=EDX_MKTG().destination_directory
        )
    )
    tasks.generate_flush_drupal_caches(clear_stage_caches_job, constants.STAGE_ENV)
    tasks.generate_clear_varnish_cache(clear_stage_caches_job, constants.STAGE_ENV)

    # Stage to backup database in prod
    backup_prod_database_stage = pipeline.ensure_stage(constants.BACKUP_PROD_DATABASE_STAGE_NAME)
    backup_prod_database_stage.set_has_manual_approval()
    backup_prod_database_job = backup_prod_database_stage.ensure_job(constants.BACKUP_PROD_DATABASE_JOB_NAME)

    tasks.generate_package_install(backup_prod_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_prod_database_job, constants.PROD_ENV)

    # Stage to deploy to prod
    deploy_stage_for_prod = pipeline.ensure_stage(constants.DEPLOY_PROD_STAGE_NAME)
    deploy_job_for_prod = deploy_stage_for_prod.ensure_job(constants.DEPLOY_PROD_JOB_NAME)

    tasks.generate_package_install(deploy_job_for_prod, 'tubular')
    tasks.generate_target_directory(deploy_job_for_prod)
    deploy_job_for_prod.add_task(FetchArtifactTask(**constants.new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        deploy_job_for_prod,
        constants.PROD_ENV,
        '{new_tag}.txt'.format(new_tag=constants.NEW_TAG_NAME)
    )

    # Stage to clear caches in prod
    clear_prod_caches_stage = pipeline.ensure_stage(constants.CLEAR_PROD_CACHES_STAGE_NAME)
    clear_prod_caches_job = clear_prod_caches_stage.ensure_job(constants.CLEAR_PROD_CACHES_JOB_NAME)

    tasks.generate_package_install(clear_prod_caches_job, 'tubular')
    clear_prod_caches_job.add_task(
        tasks.bash_task(
            """
            chmod 600 ecom-secure/acquia/acquia_github_key.pem &&
            cp {ecom_secure}/acquia/acquia_github_key.pem {edx_mktg}/docroot/
            """,
            ecom_secure=ECOM_SECURE().destination_directory,
            edx_mktg=EDX_MKTG().destination_directory
        )
    )
    tasks.generate_flush_drupal_caches(clear_prod_caches_job, constants.PROD_ENV)
    tasks.generate_clear_varnish_cache(clear_prod_caches_job, constants.PROD_ENV)
Example #40
0
def generate_deployment_messages(
        pipeline, ami_pairs, stage_deploy_pipeline_artifact,
        release_status, confluence_user, confluence_password, github_token,
        base_ami_artifact, head_ami_artifact, message_tags,
        manual_approval=False,
        wiki_parent_title=None, wiki_space=None, wiki_title=None
):
    """
    Creates a stage that will message the pull requests for a range of commits that the respective pull requests have
    been deployed to the staging environment.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to attach this stage
        ami_pairs (tuple): a tuple that consists of the artifact locations for the ami selection and the ami built
            for all environments.
        stage_deploy_pipeline_artifact(ArtifactLocation): The artifact location of the stage deploy yaml output.
        release_status (ReleaseStatus): the current status of the release
        confluence_user (str): The confluence user to create the release page with
        confluence_password (str): The confluence password to create the release page with
        github_token (str): The github token to fetch PR data with
        base_ami_artifact (ArtifactLocation): The location of the artifact that specifies
            the base_ami and tags
        head_ami_artifact (ArtifactLocation): The location of the artifact that specifies
            the head_ami and tags
        message_tags (list of (org, repo, version_tag)): The list of org/repo pairs that should
            be messaged based on changes in the specified version tag between the base and head ami
            artifacts.
        manual_approval (bool): Should this stage require manual approval?
        ami_tag_app (str): The name of the version tag on the AMI to extract the version from (Optional)
        wiki_parent_title (str): The title of the parent page to publish the release page
            under (defaults to 'LMS/Studio Release Pages')
        wiki_space (str): The space to publish the release page in (defaults to 'RELEASES')
        wiki_title (str): The title of the release wiki page (defaults to '<Next Release Date> Release')

    Returns:
        gomatic.stage.Stage

    """
    message_stage = pipeline.ensure_stage(constants.MESSAGE_PR_STAGE_NAME)
    if manual_approval:
        message_stage.set_has_manual_approval()
    message_job = message_stage.ensure_job(constants.MESSAGE_PR_JOB_NAME)
    tasks.generate_package_install(message_job, 'tubular')

    for org, repo, version_tag in message_tags:
        tasks.generate_message_pull_requests_in_commit_range(
            pipeline, message_job, org, repo, github_token, release_status,
            base_ami_artifact=base_ami_artifact, base_ami_tag_app=version_tag,
            head_ami_artifact=head_ami_artifact, head_ami_tag_app=version_tag,
        )
    wiki_job = message_stage.ensure_job(constants.PUBLISH_WIKI_JOB_NAME)
    tasks.generate_package_install(wiki_job, 'tubular')

    if release_status == constants.ReleaseStatus.STAGED:
        parent_title = wiki_parent_title
        title = wiki_title
        space = wiki_space
        input_artifact = None
    else:
        parent_title = None
        title = None
        space = None
        input_artifact = stage_deploy_pipeline_artifact

    tasks.generate_release_wiki_page(
        pipeline,
        wiki_job,
        confluence_user=confluence_user,
        confluence_password=confluence_password,
        github_token=github_token,
        release_status=release_status,
        ami_pairs=ami_pairs,
        parent_title=parent_title,
        space=space,
        title=title,
        input_artifact=input_artifact,
    )

    return message_stage
Example #41
0
def generate_run_migrations(pipeline,
                            db_migration_pass,
                            inventory_location,
                            instance_key_location,
                            launch_info_location,
                            application_user,
                            application_name,
                            application_path,
                            duration_threshold=None,
                            from_address=None,
                            to_addresses=None,
                            sub_application_name=None,
                            manual_approval=False):
    """
    Generate the stage that applies/runs migrations.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        db_migration_pass (str): Password for the DB user used to run migrations.
        inventory_location (ArtifactLocation): Location of inventory containing the IP
            address of the EC2 instance, for fetching.
        instance_key_location (ArtifactLocation): Location of SSH key used to access the
            EC2 instance, for fetching.
        launch_info_location (ArtifactLocation): Location of the launch_info.yml file for fetching
        application_user (str): Username to use while running the migrations
        application_name (str): Name of the application (e.g. edxapp, ecommerce, etc...)
        application_path (str): path of the application installed on the target machine
        duration_threshold (int): Threshold in seconds over which a migration duration will be alerted.
        from_address (str): Any migration duration email alert will be from this address.
        to_addresses (list(str)): List of To: addresses for migration duration email alerts.
        sub_application_name (str): any sub application to insert in to the migrations commands {cms|lms}
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables(
        {
            'ARTIFACT_PATH': constants.ARTIFACT_PATH,
            'ANSIBLE_CONFIG': constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG
        }
    )
    if duration_threshold:
        pipeline.ensure_environment_variables(
            {
                'MAX_EMAIL_TRIES': constants.MAX_EMAIL_TRIES
            }
        )

    if sub_application_name is not None:
        stage_name = "{}_{}".format(constants.APPLY_MIGRATIONS_STAGE, sub_application_name)
    else:
        stage_name = constants.APPLY_MIGRATIONS_STAGE
    stage = pipeline.ensure_stage(stage_name)

    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(constants.APPLY_MIGRATIONS_JOB)
    tasks.generate_package_install(job, 'tubular')

    # Fetch the Ansible inventory to use in reaching the EC2 instance.
    tasks.retrieve_artifact(inventory_location, job, constants.ARTIFACT_PATH)

    # Fetch the SSH key to use in reaching the EC2 instance.
    tasks.retrieve_artifact(instance_key_location, job, constants.ARTIFACT_PATH)

    # ensure the target directoy exists
    tasks.generate_target_directory(job)

    # fetch the launch_info.yml
    tasks.retrieve_artifact(launch_info_location, job, constants.ARTIFACT_PATH)

    # The SSH key used to access the EC2 instance needs specific permissions.
    job.add_task(
        ExecTask(
            ['/bin/bash', '-c', 'chmod 600 {}'.format(instance_key_location.file_name)],
            working_dir=constants.ARTIFACT_PATH
        )
    )

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_run_migrations(
        job,
        application_user,
        application_name,
        application_path,
        constants.DB_MIGRATION_USER,
        db_migration_pass,
        sub_application_name
    )

    if duration_threshold:
        tasks.generate_check_migration_duration(
            job,
            application_name,
            constants.MIGRATION_RESULT_FILENAME,
            duration_threshold,
            from_address,
            to_addresses
        )

    return stage
Example #42
0
def prerelease_materials(edxapp_group, config):
    """
    Generate the prerelease materials pipeline

    Args:
        edxapp_group (gomatic.PipelineGroup): Pipeline group this new pipeline will be attached.
        config (dict): the general configuration for this pipeline
        stage_config (dict): the stage_edx_edxapp configuration for this pipeline
        prod_edx_config (dict): the prod_edx_edxapp configuration for this pipeline
        prod_edge_config (dict): the prod_edge_edxapp configuration for this pipeline

    Returns:
        gomatic.Pipeline

    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id

    - ec2_instance_profile_name
    - base_ami_id

    Optional variables:
    - configuration_secure_version
    """
    pipeline = edxapp_group.ensure_replacement_of_pipeline(
        "prerelease_edxapp_materials_latest")
    pipeline.set_label_template('${edx-platform[:7]}-${COUNT}')

    for material in (
            CONFIGURATION,
            EDX_SECURE,
            EDGE_SECURE,
            EDX_MICROSITE,
            EDX_INTERNAL,
            EDGE_INTERNAL,
    ):
        pipeline.ensure_material(material())

    pipeline.ensure_material(TUBULAR())
    pipeline.ensure_material(
        EDX_PLATFORM(material_name='edx-platform',
                     ignore_patterns=frozenset()))

    stage = pipeline.ensure_stage(constants.PRERELEASE_MATERIALS_STAGE_NAME)
    job = stage.ensure_job(constants.PRERELEASE_MATERIALS_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    private_releases.generate_create_private_release_candidate(
        job,
        config['git_token'],
        ('edx', 'edx-platform'),
        'master',
        EDX_PLATFORM().branch,
        ('edx', 'edx-platform-private'),
        'security-release',
        'release-candidate',
        target_reference_repo='edx-platform-private',
    )

    # This prevents the commit being released from being lost when the new
    # release-candidate is cut. However, this will require a janitor job to
    # deal with any releases that are never completed.
    tasks.generate_create_branch(
        pipeline,
        job,
        config['git_token'],
        'edx',
        'edx-platform',
        target_branch="release-candidate-$GO_PIPELINE_COUNTER",
        sha=material_envvar_bash(EDX_PLATFORM()))

    # Move the AMI selection jobs here in a single stage.
    stage = pipeline.ensure_stage(constants.BASE_AMI_SELECTION_STAGE_NAME)
    for edp in (
            STAGE_EDX_EDXAPP,
            PROD_EDX_EDXAPP,
            PROD_EDGE_EDXAPP,
    ):
        localized_config = config[edp]
        job = stage.ensure_job(constants.BASE_AMI_SELECTION_EDP_JOB_NAME(edp))
        tasks.generate_package_install(job, 'tubular')
        tasks.generate_base_ami_selection(
            job, localized_config['aws_access_key_id'],
            localized_config['aws_secret_access_key'], edp,
            config.get('base_ami_id'))

    return pipeline