Пример #1
0
def generate_rollback_asgs(stage, edp, deployment_artifact_location, config):
    """
    Generates a job for rolling back ASGs (code).

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (EDP): The EDP that this job should roll back.
        deployment_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to roll back.
        config (dict): Environment-independent secure config.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.ROLLBACK_ASGS_JOB_NAME_TPL(edp))

    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve build info from the upstream deploy stage.
    tasks.retrieve_artifact(deployment_artifact_location, job)
    deployment_artifact_path = path_to_artifact(deployment_artifact_location.file_name)

    tasks.generate_rollback_asg(
        job,
        deployment_artifact_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
    )

    return job
Пример #2
0
def generate_rollback_asgs(stage, edp, deployment_artifact_location, config):
    """
    Generates a job for rolling back ASGs (code).

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (EDP): The EDP that this job should roll back.
        deployment_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to roll back.
        config (dict): Environment-independent secure config.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.ROLLBACK_ASGS_JOB_NAME_TPL(edp))

    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve build info from the upstream deploy stage.
    tasks.retrieve_artifact(deployment_artifact_location, job)
    deployment_artifact_path = path_to_artifact(deployment_artifact_location.file_name)

    tasks.generate_rollback_asg(
        job,
        deployment_artifact_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
    )

    return job
Пример #3
0
def generate_fetch_tag_name(stage, acquia_env):
    """
    Creates a job that fetches the tag name from a Drupal environment.

    Arguments:
        stage (gomatic.Stage): The stage to which the job is being added.
        acquia_env (str): The name of the environment from which the tag name should
            be fetched.

    Returns:
        gomatic.gocd.pipelines.Job: The new job.

    """
    fetch_tag_job = stage.ensure_job(constants.FETCH_TAG_JOB_NAME)
    fetch_tag_job.ensure_resource("edxMarketing")
    tasks.generate_package_install(fetch_tag_job, 'tubular')
    tasks.generate_target_directory(fetch_tag_job)
    path_name = '../target/{env}_tag_name.txt'
    marketing_tasks.generate_fetch_tag(fetch_tag_job, acquia_env, path_name)

    artifact_path = 'target/{tag_name}.txt'.format(
        tag_name=constants.ACQUIA_ENV_TAG_NAME.format(acquia_env=acquia_env)
    )
    fetch_tag_job.ensure_artifacts(set([BuildArtifact(artifact_path)]))

    return fetch_tag_job
Пример #4
0
def generate_find_and_advance_release(
        pipeline,
        advance_pipeline_name,
        advance_pipeline_stage_name,
        gocd_user,
        gocd_password,
        gocd_url,
        slack_token,
        slack_room=constants.SLACK_ROOM
):
    """
    Generates a stage used to find the next release to advance and "manually" advance it.

    Args:
        pipeline (gomatic.Pipeline):
        advance_pipeline_name (str): Name of pipeline to advance.
        advance_pipeline_stage_name (str): Name of stage within pipeline to advance.
        gocd_user (str): GoCD username
        gocd_password (str): GoCD user's password
        gocd_url (str): URL of the GoCD instance
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.RELEASE_ADVANCER_STAGE_NAME)
    job = stage.ensure_job(constants.RELEASE_ADVANCER_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')

    # Add task to generate the directory where the artifact file will be written.
    tasks.generate_target_directory(job)

    pipeline.ensure_unencrypted_secure_environment_variables(
        {
            'GOCD_PASSWORD': gocd_password
        }
    )

    job.ensure_encrypted_environment_variables(
        {
            'SLACK_TOKEN': slack_token,
        }
    )

    tasks.generate_find_and_advance_release(
        job,
        gocd_user,
        gocd_url,
        advance_pipeline_name,
        advance_pipeline_stage_name,
        slack_room,
        out_file=constants.FIND_ADVANCE_PIPELINE_OUT_FILENAME
    )

    return stage
Пример #5
0
def generate_build_value_stream_map_url(pipeline):
    """
    Genrates a stage that is used to build and serialize the value_stream_map url for a pipeline.

    Generates 1 artifact:
        value_stream_map.yaml   - YAML file that contains the value_stream_map URL.

    Args:
        pipeline (gomatic.Pipeline):

    Returns:
        gomatic.Stage
    """
    # Create the stage
    stage = pipeline.ensure_stage(constants.BUILD_VALUE_STREAM_MAP_URL_STAGE_NAME)

    # Create the job
    job = stage.ensure_job(constants.BUILD_VALUE_STREAM_MAP_URL_JOB_NAME)

    # Add task to generate the directory where the value_stream_map.yaml file will be written.
    tasks.generate_target_directory(job)

    # Add task to upload the value_stream_map.yaml file.
    value_stream_map_filepath = '{artifact_path}/{file_name}'.format(
        artifact_path=constants.ARTIFACT_PATH,
        file_name=constants.VALUE_STREAM_MAP_FILENAME
    )
    job.ensure_artifacts(set([value_stream_map_filepath]))

    # Script to generate the value_stream_map_url and write it out to value_stream_map.yaml
    script = '''
        if [ -z "$GO_PIPELINE_NAME" ] || [ -z "$GO_PIPELINE_LABEL" ]; then
          echo "Error: missing GO_PIPELINE_NAME and/or GO_PIPELINE_LABEL" 1>&2
          exit 1
        fi
        printf -- '---\n- deploy_value_stream_map: "{base_url}/$GO_PIPELINE_NAME/$GO_PIPELINE_LABEL"' > {filepath}
    '''.format(base_url=constants.BASE_VALUE_STREAM_MAP_URL, filepath=value_stream_map_filepath)
    job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                script
            ]
        )
    )

    return stage
Пример #6
0
def generate_deploy_to_acquia(pipeline, acquia_env, use_tag=False):
    """
    Creates a stage that deploys to an Acquia environment.

    Arguments:
        pipeline (gomatic.Pipeline): The pipeline to which the stage is being added.
        acquia_env (str): The environment name to which to deploy.
        use_tag (bool): True if deploying off a tag, False otherwise.

    Returns:
        gomatic.Stage: The new stage.

    """
    deploy_stage_for_acquia_env = pipeline.ensure_stage(
        constants.DEPLOY_ACQUIA_ENV_STAGE_NAME.format(acquia_env=acquia_env)
    )
    deploy_job_for_acquia_env = deploy_stage_for_acquia_env.ensure_job(
        constants.DEPLOY_ACQUIA_ENV_JOB_NAME.format(acquia_env=acquia_env)
    )
    deploy_job_for_acquia_env.ensure_resource("edxMarketing")

    if use_tag:
        tasks.generate_target_directory(deploy_job_for_acquia_env)

        # fetch the tag name
        constants.new_tag_name_artifact_params = {
            'pipeline': pipeline.name,
            'stage': constants.BUILD_AND_PUSH_TO_ACQUIA_STAGE_NAME,
            'job': constants.BUILD_AND_PUSH_TO_ACQUIA_JOB_NAME,
            'src': FetchArtifactFile('deploy_tag_name.txt'),
            'dest': 'target'
        }
        deploy_job_for_acquia_env.add_task(FetchArtifactTask(**constants.new_tag_name_artifact_params))

        deploy_source = '$(cat ../{artifact_path}/deploy_tag_name.txt)'.format(
            artifact_path=constants.ARTIFACT_PATH
        )
    else:
        deploy_source = '$MARKETING_REPOSITORY_VERSION'

    tasks.generate_package_install(deploy_job_for_acquia_env, 'tubular')
    marketing_tasks.generate_drupal_deploy(
        deploy_job_for_acquia_env,
        acquia_env,
        deploy_source
    )

    return deploy_stage_for_acquia_env
Пример #7
0
def generate_rollback_in_acquia(pipeline, acquia_env, source_pipeline_env):
    """
    Creates a stage that rolls back an Acquia environment.

    NOTE: Rollback is only implemented for tag based deployments.

    Arguments:
        pipeline (gomatic.Pipeline): The pipeline to which the stage is being added.
        acquia_env (str): The environment name to which to deploy.
        source_pipeline_env (str): The pipeline environment from which the
            rollback is sourced (e.g. 'prod').

    Returns:
        gomatic.Stage: The new stage.

    """
    rollback_stage = pipeline.ensure_stage(constants.ROLLBACK_STAGE_NAME.format(acquia_env=acquia_env))
    rollback_stage.set_has_manual_approval()
    rollback_job = rollback_stage.ensure_job(constants.ROLLBACK_JOB_NAME.format(acquia_env=acquia_env))
    rollback_job.ensure_resource("edxMarketing")

    rollback_tag_name = constants.ACQUIA_ENV_TAG_NAME.format(acquia_env=acquia_env)
    rollback_tag_name_artifact_params = {
        'pipeline': constants.DEPLOY_MARKETING_PIPELINE_NAME.format(acquia_env=source_pipeline_env),
        'stage': constants.FETCH_TAG_STAGE_NAME,
        'job': constants.FETCH_TAG_JOB_NAME,
        'src': FetchArtifactFile('{rollback_tag_name}.txt'.format(rollback_tag_name=rollback_tag_name)),
        'dest': 'target'
    }

    tasks.generate_package_install(rollback_job, 'tubular')
    tasks.generate_target_directory(rollback_job)
    rollback_job.add_task(FetchArtifactTask(**rollback_tag_name_artifact_params))
    marketing_tasks.generate_drupal_deploy(
        rollback_job,
        acquia_env,
        '$(cat ../{artifact_path}/{rollback_tag_name}.txt)'.format(
            artifact_path=constants.ARTIFACT_PATH, rollback_tag_name=rollback_tag_name
        )
    )

    return rollback_stage
Пример #8
0
def generate_build_and_push_to_acquia(pipeline, create_tag=False):
    """
    Creates a stage that builds static assets and then pushes source and assets to Acquia.

    Arguments:
        pipeline (gomatic.Pipeline): The pipeline to which the stage is being added.
        create_tag (bool): True if a tag should first be created, False to push a branch directly.

    Returns:
        gomatic.Stage: The new stage.

    """
    build_and_push_to_acquia_stage = pipeline.ensure_stage(
        constants.BUILD_AND_PUSH_TO_ACQUIA_STAGE_NAME
    )
    build_and_push_to_acquia_job = build_and_push_to_acquia_stage.ensure_job(
        constants.BUILD_AND_PUSH_TO_ACQUIA_JOB_NAME
    )
    build_and_push_to_acquia_job.ensure_resource("edxMarketing")

    marketing_tasks.setup_git_config(build_and_push_to_acquia_job)
    marketing_tasks.add_acquia_remote_if_needed(build_and_push_to_acquia_job)

    if create_tag:
        # Ensures the tag name is accessible in future jobs.
        build_and_push_to_acquia_job.ensure_artifacts(set([BuildArtifact('target/deploy_tag_name.txt')]))

        tasks.generate_package_install(build_and_push_to_acquia_job, 'tubular')
        tasks.generate_target_directory(build_and_push_to_acquia_job)

        marketing_tasks.create_tag_without_static_assets(build_and_push_to_acquia_job)
        marketing_tasks.build_and_commit_static_assets(build_and_push_to_acquia_job)
        marketing_tasks.create_and_push_tag_with_static_assets_to_acquia(build_and_push_to_acquia_job)
        marketing_tasks.push_original_tag_to_mktg(build_and_push_to_acquia_job)
    else:
        build_and_push_to_acquia_stage.set_has_manual_approval()

        marketing_tasks.build_and_commit_static_assets(build_and_push_to_acquia_job)
        marketing_tasks.push_branch_to_acquia(build_and_push_to_acquia_job)

    return build_and_push_to_acquia_stage
Пример #9
0
def generate_merge_release_candidate(
        pipeline, stage, token, org, repo, target_branch, head_sha,
        fast_forward_only, reference_repo=None,
):
    """
    Generates a job that is used to merge a Git source branch into a target branch,
    optionally ensuring that the merge is a fast-forward merge.

    Args:
        pipeline (gomatic.Pipeline): The pipeline containing ``stage``.
        stage (gomatic.Stage): The stage to add the job to
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        target_branch (str): Name of the branch into which to merge the source branch
        head_sha (str): commit SHA or environment variable holding the SHA to tag as the release
        token (str): the github token used to create all these things. Will be an env_var 'GIT_TOKEN'
        fast_forward_only (bool): If True, force a fast-forward merge or fail.

    Returns:
        gomatic.Job
    """
    merge_branch_job = stage.ensure_job(constants.GIT_MERGE_RC_BRANCH_JOB_NAME)
    tasks.generate_package_install(merge_branch_job, 'tubular')
    tasks.generate_target_directory(merge_branch_job)
    tasks.generate_merge_branch(
        pipeline,
        merge_branch_job,
        token,
        org,
        repo,
        head_sha,
        target_branch,
        fast_forward_only,
        reference_repo=reference_repo,
    )
    return merge_branch_job
Пример #10
0
def generate_merge_release_candidate(
        pipeline, stage, token, org, repo, target_branch, head_sha,
        fast_forward_only, reference_repo=None,
):
    """
    Generates a job that is used to merge a Git source branch into a target branch,
    optionally ensuring that the merge is a fast-forward merge.

    Args:
        pipeline (gomatic.Pipeline): The pipeline containing ``stage``.
        stage (gomatic.Stage): The stage to add the job to
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        target_branch (str): Name of the branch into which to merge the source branch
        head_sha (str): commit SHA or environment variable holding the SHA to tag as the release
        token (str): the github token used to create all these things. Will be an env_var 'GIT_TOKEN'
        fast_forward_only (bool): If True, force a fast-forward merge or fail.

    Returns:
        gomatic.Job
    """
    merge_branch_job = stage.ensure_job(constants.GIT_MERGE_RC_BRANCH_JOB_NAME)
    tasks.generate_package_install(merge_branch_job, 'tubular')
    tasks.generate_target_directory(merge_branch_job)
    tasks.generate_merge_branch(
        pipeline,
        merge_branch_job,
        token,
        org,
        repo,
        head_sha,
        target_branch,
        fast_forward_only,
        reference_repo=reference_repo,
    )
    return merge_branch_job
Пример #11
0
def generate_build_ami(stage,
                       edp,
                       app_repo_url,
                       configuration_secure_material,
                       configuration_internal_material,
                       playbook_path,
                       config,
                       version_tags=None,
                       **kwargs):
    """
    Generates a job for creating a new AMI.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            for which an AMI will be created.
        app_repo_url (str): App repo's URL.
        configuration_secure_material (gomatic.gomatic.gocd.materials.GitMaterial): Secure
            configuration material. Destination directory expected to be 'configuration-secure'.
        configuration_internal_material (gomatic.gomatic.gocd.materials.GitMaterial): Internal
            configuration material. Destination directory expected to be 'configuration-internal'.
        playbook_path (str): Path to the Ansible playbook to run when creating the AMI.
        config (dict): Environment-specific secure config.
        version_tags (dict): An optional {app_name: (repo, version), ...} dict that
            specifies what versions to tag the AMI with.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Locate the base AMI.
    tasks.generate_base_ami_selection(
        job,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        edp=edp
    )

    # Launch a new instance on which to build the AMI.
    tasks.generate_launch_instance(
        job,
        aws_access_key_id=config['aws_access_key_id'],
        aws_secret_access_key=config['aws_secret_access_key'],
        ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
        ec2_security_group_id=config['ec2_security_group_id'],
        ec2_instance_profile_name=config['ec2_instance_profile_name'],
        variable_override_path=path_to_artifact(constants.BASE_AMI_OVERRIDE_FILENAME),
    )

    # Run the Ansible play for the service.
    tasks.generate_run_app_playbook(
        job,
        playbook_path,
        edp,
        app_repo_url,
        private_github_key=config['github_private_key'],
        hipchat_token=config['hipchat_token'],
        configuration_secure_dir=configuration_secure_material.destination_directory,
        configuration_internal_dir=configuration_internal_material.destination_directory,
        disable_edx_services='true',
        COMMON_TAG_EC2_INSTANCE='true',
        **kwargs
    )

    # Create an AMI from the instance.
    tasks.generate_create_ami(
        job,
        edp.play,
        edp.deployment,
        edp.environment,
        app_repo_url,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        path_to_artifact(constants.LAUNCH_INSTANCE_FILENAME),
        hipchat_token=config['hipchat_token'],
        version_tags=version_tags,
        **kwargs
    )

    tasks.generate_ami_cleanup(job, config['hipchat_token'], runif='any')

    return job
Пример #12
0
def generate_poll_tests_and_merge_pr(pipeline,
                                     stage,
                                     job,
                                     stage_name,
                                     job_name,
                                     pr_artifact_params,
                                     artifact_filename,
                                     org,
                                     repo,
                                     token,
                                     initial_poll_wait,
                                     max_poll_tries,
                                     poll_interval,
                                     manual_approval):
    """
    Generates a stage that is used to:
    - poll for successful completion of PR tests
    - merge the PR

    Args:
        pipeline (gomatic.Pipeline): Pipeline to attach this stage to
        stage (gomatic.Stage): Stage to use when adding tasks -or- None
        job (gomatic.Job): Job to use when adding tasks -or- None
        stage_name (str): Name of the stage
        job_name (str): Name of the job
        pr_artifact_params (dict): Params to use in creation of artifact-fetching task.
        artifact_filename (str): Filename of the artifact to fetch/read-in.
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        token (str): the github token used to create all these things. Will be an env_var 'GIT_TOKEN'
        initial_poll_wait (int): Number of seconds that will pass between 1st/2nd poll attempts.
        max_poll_tries (int): Maximum number of poll attempts that should occur before failing.
        poll_interval (int): Number of seconds between all poll attempts (after the 1st/2nd attempt interval).
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables(
        {
            'PR_TEST_INITIAL_WAIT_INTERVAL': str(initial_poll_wait),
            'MAX_PR_TEST_POLL_TRIES': str(max_poll_tries),
            'PR_TEST_POLL_INTERVAL': str(poll_interval)
        }
    )
    pipeline.ensure_unencrypted_secure_environment_variables(
        {
            'GIT_TOKEN': token
        }
    )
    if stage is None:
        git_stage = pipeline.ensure_stage(stage_name)
        if manual_approval:
            git_stage.set_has_manual_approval()
        git_job = git_stage.ensure_job(job_name)
    else:
        git_stage = stage
        git_job = job
    tasks.generate_package_install(git_job, 'tubular')
    tasks.generate_target_directory(git_job)

    # Fetch the PR-creation material.
    git_job.add_task(FetchArtifactTask(**pr_artifact_params))

    # Generate a task that poll the status of combined tests for a PR.
    tasks.generate_poll_pr_tests(
        git_job,
        org,
        repo,
        artifact_filename
    )

    # Generate a task that merges a PR that has passed all its tests in the previous task.
    tasks.generate_merge_pr(
        git_job,
        org,
        repo,
        artifact_filename
    )

    return git_stage
Пример #13
0
def generate_create_branch_and_pr(pipeline,
                                  stage_name,
                                  org,
                                  repo,
                                  source_branch,
                                  new_branch,
                                  target_branch,
                                  pr_title,
                                  pr_body,
                                  token,
                                  manual_approval):
    """
    Generates a stage that is used to:
    - create a new branch off the HEAD of a source branch
    - create a PR to merge the new branch into a target branch

    Args:
        pipeline (gomatic.Pipeline): Pipeline to attach this stage to
        stage_name (str): Name of the stage
        org (str): Name of the github organization that holds the repository (e.g. edx)
        repo (str): Name of repository (e.g edx-platform)
        source_branch (str): Name of the branch to use in creating the new branch
        new_branch (str): Name of the branch to create off the HEAD of the source branch
        target_branch (str): Name of the branch into which to merge the source branch
        pr_title (str): Title of the new PR
        pr_body (str): Body of the new PR
        token (str): the github token used to create all these things. Will be an env_var 'GIT_TOKEN'
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    git_stage = pipeline.ensure_stage(stage_name)
    if manual_approval:
        git_stage.set_has_manual_approval()
    git_job = git_stage.ensure_job(constants.CREATE_MASTER_MERGE_PR_JOB_NAME)
    tasks.generate_package_install(git_job, 'tubular')
    tasks.generate_target_directory(git_job)

    # Generate a task that creates a new branch off the HEAD of a source branch.
    tasks.generate_create_branch(
        pipeline,
        git_job,
        token,
        org,
        repo,
        target_branch=new_branch,
        source_branch=source_branch
    )

    # Generate a task that creates a pull request merging the new branch from above into a target branch.
    tasks.generate_create_pr(
        git_job,
        org,
        repo,
        new_branch,
        target_branch,
        pr_title,
        pr_body
    )

    return git_stage
Пример #14
0
def generate_run_migrations(pipeline,
                            db_migration_pass,
                            inventory_location,
                            instance_key_location,
                            launch_info_location,
                            application_user,
                            application_name,
                            application_path,
                            duration_threshold=None,
                            from_address=None,
                            to_addresses=None,
                            sub_application_name=None,
                            manual_approval=False):
    """
    Generate the stage that applies/runs migrations.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        db_migration_pass (str): Password for the DB user used to run migrations.
        inventory_location (ArtifactLocation): Location of inventory containing the IP
            address of the EC2 instance, for fetching.
        instance_key_location (ArtifactLocation): Location of SSH key used to access the
            EC2 instance, for fetching.
        launch_info_location (ArtifactLocation): Location of the launch_info.yml file for fetching
        application_user (str): Username to use while running the migrations
        application_name (str): Name of the application (e.g. edxapp, ecommerce, etc...)
        application_path (str): path of the application installed on the target machine
        duration_threshold (int): Threshold in seconds over which a migration duration will be alerted.
        from_address (str): Any migration duration email alert will be from this address.
        to_addresses (list(str)): List of To: addresses for migration duration email alerts.
        sub_application_name (str): any sub application to insert in to the migrations commands {cms|lms}
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables(
        {
            'ARTIFACT_PATH': constants.ARTIFACT_PATH,
            'ANSIBLE_CONFIG': constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG
        }
    )
    if duration_threshold:
        pipeline.ensure_environment_variables(
            {
                'MAX_EMAIL_TRIES': constants.MAX_EMAIL_TRIES
            }
        )

    if sub_application_name is not None:
        stage_name = "{}_{}".format(constants.APPLY_MIGRATIONS_STAGE, sub_application_name)
    else:
        stage_name = constants.APPLY_MIGRATIONS_STAGE
    stage = pipeline.ensure_stage(stage_name)

    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(constants.APPLY_MIGRATIONS_JOB)
    tasks.generate_package_install(job, 'tubular')

    # Fetch the Ansible inventory to use in reaching the EC2 instance.
    tasks.retrieve_artifact(inventory_location, job, constants.ARTIFACT_PATH)

    # Fetch the SSH key to use in reaching the EC2 instance.
    tasks.retrieve_artifact(instance_key_location, job, constants.ARTIFACT_PATH)

    # ensure the target directoy exists
    tasks.generate_target_directory(job)

    # fetch the launch_info.yml
    tasks.retrieve_artifact(launch_info_location, job, constants.ARTIFACT_PATH)

    # The SSH key used to access the EC2 instance needs specific permissions.
    job.add_task(
        ExecTask(
            ['/bin/bash', '-c', 'chmod 600 {}'.format(instance_key_location.file_name)],
            working_dir=constants.ARTIFACT_PATH
        )
    )

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_run_migrations(
        job,
        application_user,
        application_name,
        application_path,
        constants.DB_MIGRATION_USER,
        db_migration_pass,
        sub_application_name
    )

    if duration_threshold:
        tasks.generate_check_migration_duration(
            job,
            application_name,
            constants.MIGRATION_RESULT_FILENAME,
            duration_threshold,
            from_address,
            to_addresses
        )

    return stage
def install_pipelines(configurator, config):
    """
    Install pipelines that can deploy the edx-mktg site.
    """
    pipeline = configurator \
        .ensure_pipeline_group(constants.DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline(constants.DEPLOY_MARKETING_PIPELINE_NAME) \
        .ensure_material(TUBULAR()) \
        .ensure_material(EDX_MKTG()) \
        .ensure_material(ECOM_SECURE())

    pipeline.ensure_environment_variables(
        {
            'MARKETING_REPOSITORY_VERSION': config['mktg_repository_version'],
        }
    )

    pipeline.ensure_encrypted_environment_variables(
        {
            'PRIVATE_GITHUB_KEY': config['github_private_key'],
            'PRIVATE_ACQUIA_REMOTE': config['acquia_remote_url'],
            'PRIVATE_ACQUIA_USERNAME': config['acquia_username'],
            'PRIVATE_ACQUIA_PASSWORD': config['acquia_password'],
            'PRIVATE_ACQUIA_GITHUB_KEY': config['acquia_github_key']
        }
    )

    # Stage to fetch the current tag names from stage and prod
    fetch_tag_stage = pipeline.ensure_stage(constants.FETCH_TAG_STAGE_NAME)
    fetch_tag_stage.set_has_manual_approval()
    fetch_tag_job = fetch_tag_stage.ensure_job(constants.FETCH_TAG_JOB_NAME)
    tasks.generate_package_install(fetch_tag_job, 'tubular')
    tasks.generate_target_directory(fetch_tag_job)
    path_name = '../target/{env}_tag_name.txt'
    tasks.generate_fetch_tag(fetch_tag_job, constants.STAGE_ENV, path_name)
    tasks.generate_fetch_tag(fetch_tag_job, constants.PROD_ENV, path_name)

    fetch_tag_job.ensure_artifacts(
        set([BuildArtifact('target/{stage_tag}.txt'.format(stage_tag=constants.STAGE_TAG_NAME)),
             BuildArtifact('target/{prod_tag}.txt'.format(prod_tag=constants.PROD_TAG_NAME))])
    )

    # Stage to create and push a tag to Acquia.
    push_to_acquia_stage = pipeline.ensure_stage(constants.PUSH_TO_ACQUIA_STAGE_NAME)
    push_to_acquia_job = push_to_acquia_stage.ensure_job(constants.PUSH_TO_ACQUIA_JOB_NAME)
    # Ensures the tag name is accessible in future jobs.
    push_to_acquia_job.ensure_artifacts(
        set([BuildArtifact('target/{new_tag}.txt'.format(new_tag=constants.NEW_TAG_NAME))])
    )

    tasks.generate_package_install(push_to_acquia_job, 'tubular')
    tasks.generate_target_directory(push_to_acquia_job)

    # Create a tag from MARKETING_REPOSITORY_VERSION branch of marketing repo
    push_to_acquia_job.add_task(
        tasks.bash_task(
            # Writing dates to a file should help with any issues dealing with a job
            # taking place over two days (23:59:59 -> 00:00:00). Only the day can be
            # affected since we don't use minutes or seconds.
            # NOTE: Uses UTC
            """\
            echo -n "release-$(date +%Y-%m-%d-%H.%M)" > ../target/{new_tag}.txt &&
            TAG_NAME=$(cat ../target/{new_tag}.txt) &&
            /usr/bin/git config user.email "*****@*****.**" &&
            /usr/bin/git config user.name "edx-secure" &&
            /usr/bin/git tag -a $TAG_NAME -m "Release for $(date +%B\\ %d,\\ %Y). Created by $GO_TRIGGER_USER." &&
            /usr/bin/git push origin $TAG_NAME
            """,
            new_tag=constants.NEW_TAG_NAME,
            working_dir='edx-mktg'
        )
    )

    # Set up Acquia remote repo and push tag to Acquia. Change new tag file to contain "tags/" for deployment.
    push_to_acquia_job.add_task(
        tasks.bash_task(
            """\
            chmod 600 ../ecom-secure/acquia/acquia_github_key.pem &&
            if [[ $(git remote) != *"acquia"*  ]]; then
                /usr/bin/git remote add acquia $PRIVATE_ACQUIA_REMOTE ;
            fi &&
            GIT_SSH_COMMAND="/usr/bin/ssh -o StrictHostKeyChecking=no -i ../{ecom_secure}/acquia/acquia_github_key.pem"
            /usr/bin/git push acquia $(cat ../target/{new_tag}.txt) &&
            echo -n "tags/" | cat - ../target/{new_tag}.txt > temp &&
            mv temp ../target/{new_tag}.txt
            """,
            new_tag=constants.NEW_TAG_NAME,
            ecom_secure=ECOM_SECURE().destination_directory,
            working_dir='edx-mktg'
        )
    )

    # Stage to backup database in stage
    backup_stage_database_stage = pipeline.ensure_stage(constants.BACKUP_STAGE_DATABASE_STAGE_NAME)
    backup_stage_database_job = backup_stage_database_stage.ensure_job(constants.BACKUP_STAGE_DATABASE_JOB_NAME)

    tasks.generate_package_install(backup_stage_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_stage_database_job, constants.STAGE_ENV)

    # Stage to deploy to stage
    deploy_stage_for_stage = pipeline.ensure_stage(constants.DEPLOY_STAGE_STAGE_NAME)
    deploy_job_for_stage = deploy_stage_for_stage.ensure_job(constants.DEPLOY_STAGE_JOB_NAME)

    tasks.generate_package_install(deploy_job_for_stage, 'tubular')
    tasks.generate_target_directory(deploy_job_for_stage)

    # fetch the tag name
    constants.new_tag_name_artifact_params = {
        'pipeline': constants.DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': constants.PUSH_TO_ACQUIA_STAGE_NAME,
        'job': constants.PUSH_TO_ACQUIA_JOB_NAME,
        'src': FetchArtifactFile('{new_tag}.txt'.format(new_tag=constants.NEW_TAG_NAME)),
        'dest': 'target'
    }
    deploy_job_for_stage.add_task(FetchArtifactTask(**constants.new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        deploy_job_for_stage,
        constants.STAGE_ENV,
        '{new_tag}.txt'.format(new_tag=constants.NEW_TAG_NAME)
    )

    # Stage to clear caches in stage
    clear_stage_caches_stage = pipeline.ensure_stage(constants.CLEAR_STAGE_CACHES_STAGE_NAME)
    clear_stage_caches_job = clear_stage_caches_stage.ensure_job(constants.CLEAR_STAGE_CACHES_JOB_NAME)

    tasks.generate_package_install(clear_stage_caches_job, 'tubular')
    clear_stage_caches_job.add_task(
        tasks.bash_task(
            """
            chmod 600 ecom-secure/acquia/acquia_github_key.pem &&
            cp {ecom_secure}/acquia/acquia_github_key.pem {edx_mktg}/docroot/
            """,
            ecom_secure=ECOM_SECURE().destination_directory,
            edx_mktg=EDX_MKTG().destination_directory
        )
    )
    tasks.generate_flush_drupal_caches(clear_stage_caches_job, constants.STAGE_ENV)
    tasks.generate_clear_varnish_cache(clear_stage_caches_job, constants.STAGE_ENV)

    # Stage to backup database in prod
    backup_prod_database_stage = pipeline.ensure_stage(constants.BACKUP_PROD_DATABASE_STAGE_NAME)
    backup_prod_database_stage.set_has_manual_approval()
    backup_prod_database_job = backup_prod_database_stage.ensure_job(constants.BACKUP_PROD_DATABASE_JOB_NAME)

    tasks.generate_package_install(backup_prod_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_prod_database_job, constants.PROD_ENV)

    # Stage to deploy to prod
    deploy_stage_for_prod = pipeline.ensure_stage(constants.DEPLOY_PROD_STAGE_NAME)
    deploy_job_for_prod = deploy_stage_for_prod.ensure_job(constants.DEPLOY_PROD_JOB_NAME)

    tasks.generate_package_install(deploy_job_for_prod, 'tubular')
    tasks.generate_target_directory(deploy_job_for_prod)
    deploy_job_for_prod.add_task(FetchArtifactTask(**constants.new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        deploy_job_for_prod,
        constants.PROD_ENV,
        '{new_tag}.txt'.format(new_tag=constants.NEW_TAG_NAME)
    )

    # Stage to clear caches in prod
    clear_prod_caches_stage = pipeline.ensure_stage(constants.CLEAR_PROD_CACHES_STAGE_NAME)
    clear_prod_caches_job = clear_prod_caches_stage.ensure_job(constants.CLEAR_PROD_CACHES_JOB_NAME)

    tasks.generate_package_install(clear_prod_caches_job, 'tubular')
    clear_prod_caches_job.add_task(
        tasks.bash_task(
            """
            chmod 600 ecom-secure/acquia/acquia_github_key.pem &&
            cp {ecom_secure}/acquia/acquia_github_key.pem {edx_mktg}/docroot/
            """,
            ecom_secure=ECOM_SECURE().destination_directory,
            edx_mktg=EDX_MKTG().destination_directory
        )
    )
    tasks.generate_flush_drupal_caches(clear_prod_caches_job, constants.PROD_ENV)
    tasks.generate_clear_varnish_cache(clear_prod_caches_job, constants.PROD_ENV)
Пример #16
0
def generate_ansible_stage(stage_name,
                           task,
                           pipeline,
                           inventory_location,
                           instance_key_location,
                           launch_info_location,
                           application_user,
                           application_name,
                           application_path,
                           hipchat_auth_token,
                           hipchat_room=constants.HIPCHAT_ROOM,
                           manual_approval=False):
    """
        Generate the stage with the given name, that runs the specified task.

        Args:
            stage_name (str): Name of the generated stage.
            task (function): Task to be executed by the stage.
            pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
            inventory_location (ArtifactLocation): Location of inventory containing the IP address of the EC2 instance, for fetching.
            instance_key_location (ArtifactLocation): Location of SSH key used to access the EC2 instance, for fetching.
            launch_info_location (ArtifactLocation): Location of the launch_info.yml file for fetching
            application_user (str): Username to use while running the migrations
            application_name (str): Name of the application (e.g. edxapp, programs, etc...)
            application_path (str): path of the application installed on the target machine
            hipchat_auth_token (str): HipChat authentication token
            hipchat_room (str): HipChat room where announcements should be made
            manual_approval (bool): Should this stage require manual approval?

        Returns:
            gomatic.Stage
        """

    pipeline.ensure_environment_variables({
        'APPLICATION_USER': application_user,
        'APPLICATION_NAME': application_name,
        'APPLICATION_PATH': application_path,
        'HIPCHAT_ROOM': hipchat_room,
    })
    pipeline.ensure_encrypted_environment_variables({
        'HIPCHAT_TOKEN':
        hipchat_auth_token,
    })

    stage = pipeline.ensure_stage(stage_name)

    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(stage_name + '_job')

    # Fetch the Ansible inventory to use in reaching the EC2 instance.
    artifact_params = {
        "pipeline": inventory_location.pipeline,
        "stage": inventory_location.stage,
        "job": inventory_location.job,
        "src": FetchArtifactFile(inventory_location.file_name),
        "dest": 'configuration'
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # Fetch the SSH key to use in reaching the EC2 instance.
    artifact_params = {
        "pipeline": instance_key_location.pipeline,
        "stage": instance_key_location.stage,
        "job": instance_key_location.job,
        "src": FetchArtifactFile(instance_key_location.file_name),
        "dest": 'configuration'
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # ensure the target directoy exists
    tasks.generate_target_directory(job)

    # fetch the launch_info.yml
    artifact_params = {
        "pipeline": launch_info_location.pipeline,
        "stage": launch_info_location.stage,
        "job": launch_info_location.job,
        "src": FetchArtifactFile(launch_info_location.file_name),
        "dest": "target"
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # The SSH key used to access the EC2 instance needs specific permissions.
    job.add_task(
        ExecTask([
            '/bin/bash', '-c', 'chmod 600 {}'.format(
                instance_key_location.file_name)
        ],
                 working_dir='configuration'))

    tasks.generate_requirements_install(job, 'configuration')
    task(job)

    return stage
Пример #17
0
def generate_create_ami_from_instance(
        pipeline,
        play,
        deployment,
        edx_environment,
        app_repo,
        configuration_secure_repo,
        aws_access_key_id,
        aws_secret_access_key,
        configuration_repo=constants.PUBLIC_CONFIGURATION_REPO_URL,
        ami_creation_timeout="3600",
        ami_wait='yes',
        cache_id='',
        artifact_path=constants.ARTIFACT_PATH,
        hipchat_room=constants.HIPCHAT_ROOM,
        manual_approval=False,
        **kwargs):
    """
    Generates an artifact ami.yml:
        ami_id: ami-abcdefg
        ami_message: AMI creation operation complete
        ami_state: available

    Args:
        pipeline (gomatic.Pipeline):
        play (str): Play that was run on the instance (used for tagging)
        deployment (str):
        edx_environment (str):
        app_repo (str):
        configuration_secure_repo (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        configuration_repo (str):
        ami_creation_timeout (str):
        ami_wait (str):
        cache_id (str):
        artifact_path (str):
        hipchat_room (str):
        manual_approval (bool):
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.BUILD_AMI_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()
    pipeline.ensure_encrypted_environment_variables({
        'AWS_ACCESS_KEY_ID':
        aws_access_key_id,
        'AWS_SECRET_ACCESS_KEY':
        aws_secret_access_key
    })

    pipeline.ensure_environment_variables({
        'PLAY':
        play,
        'DEPLOYMENT':
        deployment,
        'EDX_ENVIRONMENT':
        edx_environment,
        'APP_REPO':
        app_repo,
        'CONFIGURATION_REPO':
        configuration_repo,
        'CONFIGURATION_SECURE_REPO':
        configuration_secure_repo,
        'AMI_CREATION_TIMEOUT':
        ami_creation_timeout,
        'AMI_WAIT':
        ami_wait,
        'CACHE_ID':
        cache_id,  # gocd build number
        'ARTIFACT_PATH':
        artifact_path,
        'HIPCHAT_ROOM':
        hipchat_room,
        'ANSIBLE_CONFIG':
        constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG,
    })

    # Install the requirements.
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME)
    tasks.generate_requirements_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')

    tasks.generate_target_directory(job)

    # fetch the key material
    artifact_params = {
        'pipeline': pipeline.name,
        'stage': constants.LAUNCH_INSTANCE_STAGE_NAME,
        'job': constants.LAUNCH_INSTANCE_JOB_NAME,
        'src': FetchArtifactFile("launch_info.yml"),
        'dest': constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # Create an AMI from the instance
    tasks.generate_create_ami(job, **kwargs)

    return stage
Пример #18
0
def install_pipelines(configurator, config):
    """
    Install pipelines that can rollback the stage edx-mktg site.
    """
    pipeline = configurator \
        .ensure_pipeline_group(constants.DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline('rollback-stage-marketing-site') \
        .ensure_material(TUBULAR()) \
        .ensure_material(EDX_MKTG()) \
        .ensure_material(ECOM_SECURE()) \
        .ensure_material(PipelineMaterial(constants.DEPLOY_MARKETING_PIPELINE_NAME, constants.FETCH_TAG_STAGE_NAME))

    pipeline.ensure_environment_variables(
        {
            'MARKETING_REPOSITORY_VERSION': config['mktg_repository_version'],
        }
    )

    pipeline.ensure_encrypted_environment_variables(
        {
            'PRIVATE_GITHUB_KEY': config['github_private_key'],
            'PRIVATE_ACQUIA_USERNAME': config['acquia_username'],
            'PRIVATE_ACQUIA_PASSWORD': config['acquia_password'],
            'PRIVATE_ACQUIA_GITHUB_KEY': config['acquia_github_key'],
        }
    )

    stage_tag_name_artifact_params = {
        'pipeline': constants.DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': constants.FETCH_TAG_STAGE_NAME,
        'job': constants.FETCH_TAG_JOB_NAME,
        'src': FetchArtifactFile('{stage_tag}.txt'.format(stage_tag=constants.STAGE_TAG_NAME)),
        'dest': 'target'
    }

    # Stage to rollback stage to its last stable tag
    rollback_stage = pipeline.ensure_stage(constants.ROLLBACK_STAGE_NAME)
    rollback_stage.set_has_manual_approval()
    rollback_job = rollback_stage.ensure_job(constants.ROLLBACK_JOB_NAME)

    tasks.generate_package_install(rollback_job, 'tubular')
    tasks.generate_target_directory(rollback_job)
    rollback_job.add_task(FetchArtifactTask(**stage_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        rollback_job,
        constants.STAGE_ENV,
        '{stage_tag}.txt'.format(stage_tag=constants.STAGE_TAG_NAME)
    )

    # Stage to clear the caches
    clear_stage_caches_stage = pipeline.ensure_stage(constants.CLEAR_STAGE_CACHES_STAGE_NAME)
    clear_stage_caches_job = clear_stage_caches_stage.ensure_job(constants.CLEAR_STAGE_CACHES_JOB_NAME)

    tasks.generate_package_install(clear_stage_caches_job, 'tubular')
    clear_stage_caches_job.add_task(
        tasks.bash_task(
            """
            chmod 600 ecom-secure/acquia/acquia_github_key.pem &&
            cp {ecom_secure}/acquia/acquia_github_key.pem {edx_mktg}/docroot/
            """,
            ecom_secure=ECOM_SECURE().destination_directory,
            edx_mktg=EDX_MKTG().destination_directory
        )
    )
    tasks.generate_flush_drupal_caches(clear_stage_caches_job, constants.STAGE_ENV)
    tasks.generate_clear_varnish_cache(clear_stage_caches_job, constants.STAGE_ENV)
Пример #19
0
def install_pipeline(save_config_locally, dry_run, variable_files, cmd_line_vars):
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'], config['gocd_username'], config['gocd_password'], ssl=True))

    pipeline = configurator \
        .ensure_pipeline_group(DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline(DEPLOY_MARKETING_PIPELINE_NAME) \
        .set_git_material(GitMaterial('https://github.com/edx/tubular',
                                      polling=True,
                                      destination_directory='tubular',
                                      ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                      )
                          )

    pipeline.ensure_environment_variables(
        {
            'MARKETING_REPOSITORY_VERSION': config['mktg_repository_version'],
        }
    )

    pipeline.ensure_encrypted_environment_variables(
        {
            'PRIVATE_GITHUB_KEY': config['github_private_key'],
            'PRIVATE_MARKETING_REPOSITORY_URL': config['mktg_repository_url'],
            'PRIVATE_ACQUIA_REMOTE': config['acquia_remote_url'],
            'PRIVATE_ACQUIA_USERNAME': config['acquia_username'],
            'PRIVATE_ACQUIA_PASSWORD': config['acquia_password'],
            'PRIVATE_ACQUIA_GITHUB_KEY': config['acquia_github_key']
        }
    )

    # Stage to fetch the current tag names from stage and prod
    fetch_tag_stage = pipeline.ensure_stage(FETCH_TAG_STAGE_NAME)
    fetch_tag_stage.set_has_manual_approval()
    fetch_tag_job = fetch_tag_stage.ensure_job(FETCH_TAG_JOB_NAME)
    tasks.generate_requirements_install(fetch_tag_job, 'tubular')
    tasks.generate_target_directory(fetch_tag_job)
    path_name = '../target/{env}_tag_name.txt'
    tasks.generate_fetch_tag(fetch_tag_job, STAGE_ENV, path_name)
    tasks.generate_fetch_tag(fetch_tag_job, PROD_ENV, path_name)

    fetch_tag_job.ensure_artifacts(
        set([BuildArtifact('target/{stage_tag}.txt'.format(stage_tag=STAGE_TAG_NAME)),
             BuildArtifact('target/{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME))])
    )

    # Stage to create and push a tag to Acquia.
    push_to_acquia_stage = pipeline.ensure_stage(PUSH_TO_ACQUIA_STAGE_NAME)
    push_to_acquia_job = push_to_acquia_stage.ensure_job(PUSH_TO_ACQUIA_JOB_NAME)
    # Ensures the tag name is accessible in future jobs.
    push_to_acquia_job.ensure_artifacts(
        set([BuildArtifact('target/{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))])
    )

    tasks.generate_requirements_install(push_to_acquia_job, 'tubular')
    tasks.generate_target_directory(push_to_acquia_job)
    tasks.fetch_edx_mktg(push_to_acquia_job, 'edx-mktg')

    # Create a tag from MARKETING_REPOSITORY_VERSION branch of marketing repo
    push_to_acquia_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                # Writing dates to a file should help with any issues dealing with a job
                # taking place over two days (23:59:59 -> 00:00:00). Only the day can be
                # affected since we don't use minutes or seconds.
                # NOTE: Uses UTC
                'echo -n "release-$(date +%Y-%m-%d-%H.%M)" > ../target/{new_tag}.txt && '
                'TAG_NAME=$(cat ../target/{new_tag}.txt) && '
                '/usr/bin/git config user.email "*****@*****.**" && '
                '/usr/bin/git config user.name "edx-secure" && '
                '/usr/bin/git tag -a $TAG_NAME -m "Release for $(date +%B\ %d,\ %Y). Created by $GO_TRIGGER_USER." && '
                'GIT_SSH_COMMAND="/usr/bin/ssh -o StrictHostKeyChecking=no -i ../github_key.pem" '
                '/usr/bin/git push origin $TAG_NAME'.format(new_tag=NEW_TAG_NAME)
            ],
            working_dir='edx-mktg'
        )
    )

    # Set up Acquia Github key for use in pushing tag to Acquia
    tasks.format_RSA_key(push_to_acquia_job, 'acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')

    # Set up Acquia remote repo and push tag to Acquia. Change new tag file to contain "tags/" for deployment.
    push_to_acquia_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                '/usr/bin/git remote add acquia $PRIVATE_ACQUIA_REMOTE && '
                'GIT_SSH_COMMAND="/usr/bin/ssh -o StrictHostKeyChecking=no -i ../acquia_github_key.pem" '
                '/usr/bin/git push acquia $(cat ../target/{new_tag}.txt) && '
                'echo -n "tags/" | cat - ../target/{new_tag}.txt > temp && mv temp ../target/{new_tag}.txt'.format(new_tag=NEW_TAG_NAME)
            ],
            working_dir='edx-mktg'
        )
    )

    # Stage to backup database in stage
    backup_stage_database_stage = pipeline.ensure_stage(BACKUP_STAGE_DATABASE_STAGE_NAME)
    backup_stage_database_job = backup_stage_database_stage.ensure_job(BACKUP_STAGE_DATABASE_JOB_NAME)

    tasks.generate_requirements_install(backup_stage_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_stage_database_job, STAGE_ENV)

    # Stage to deploy to stage
    deploy_stage_for_stage = pipeline.ensure_stage(DEPLOY_STAGE_STAGE_NAME)
    deploy_job_for_stage = deploy_stage_for_stage.ensure_job(DEPLOY_STAGE_JOB_NAME)

    tasks.generate_requirements_install(deploy_job_for_stage, 'tubular')
    tasks.generate_target_directory(deploy_job_for_stage)

    # fetch the tag name
    new_tag_name_artifact_params = {
        'pipeline': DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': PUSH_TO_ACQUIA_STAGE_NAME,
        'job': PUSH_TO_ACQUIA_JOB_NAME,
        'src': FetchArtifactFile('{new_tag}.txt'.format(new_tag=NEW_TAG_NAME)),
        'dest': 'target'
    }
    deploy_job_for_stage.add_task(FetchArtifactTask(**new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(deploy_job_for_stage, STAGE_ENV, '{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))

    # Stage to clear caches in stage
    clear_stage_caches_stage = pipeline.ensure_stage(CLEAR_STAGE_CACHES_STAGE_NAME)
    clear_stage_caches_job = clear_stage_caches_stage.ensure_job(CLEAR_STAGE_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_stage_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_stage_caches_job, 'tubular')
    tasks.format_RSA_key(clear_stage_caches_job, 'edx-mktg/docroot/acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_stage_caches_job, STAGE_ENV)
    tasks.generate_clear_varnish_cache(clear_stage_caches_job, STAGE_ENV)

    # Stage to backup database in prod
    backup_prod_database_stage = pipeline.ensure_stage(BACKUP_PROD_DATABASE_STAGE_NAME)
    backup_prod_database_stage.set_has_manual_approval()
    backup_prod_database_job = backup_prod_database_stage.ensure_job(BACKUP_PROD_DATABASE_JOB_NAME)

    tasks.generate_requirements_install(backup_prod_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_prod_database_job, PROD_ENV)

    # Stage to deploy to prod
    deploy_stage_for_prod = pipeline.ensure_stage(DEPLOY_PROD_STAGE_NAME)
    deploy_job_for_prod = deploy_stage_for_prod.ensure_job(DEPLOY_PROD_JOB_NAME)

    tasks.generate_requirements_install(deploy_job_for_prod, 'tubular')
    tasks.generate_target_directory(deploy_job_for_prod)
    deploy_job_for_prod.add_task(FetchArtifactTask(**new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(deploy_job_for_prod, PROD_ENV, '{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))

    # Stage to clear caches in prod
    clear_prod_caches_stage = pipeline.ensure_stage(CLEAR_PROD_CACHES_STAGE_NAME)
    clear_prod_caches_job = clear_prod_caches_stage.ensure_job(CLEAR_PROD_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_prod_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_prod_caches_job, 'tubular')
    tasks.format_RSA_key(clear_prod_caches_job, 'edx-mktg/docroot/acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_prod_caches_job, PROD_ENV)
    tasks.generate_clear_varnish_cache(clear_prod_caches_job, PROD_ENV)

    configurator.save_updated_config(save_config_locally=save_config_locally, dry_run=dry_run)
Пример #20
0
def generate_rollback_migrations(
        stage,
        edp,
        application_user,
        application_name,
        application_path,
        db_migration_user,
        db_migration_pass,
        migration_info_location,
        inventory_location=None,
        instance_key_location=None,
        ami_artifact_location=None,
        config=None,
        sub_application_name=None,
        additional_migrations=None,
):
    """
    Generates a job for rolling back database migrations.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage this job will be part of
        edp (EDP): EDP that this job will roll back
        migration_info_location (edxpipelines.utils.ArtifactLocation): Location of
            the migration output to roll back
        inventory_location (edxpipelines.utils.ArtifactLocation): Location of the
            ansible inventory
        instance_key_location (edxpipelines.utils.ArtifactLocation): Location of
            the key used to ssh in to the instance
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): AMI to use when
            launching instance used to roll back migrations.
        config (dict): Environment-specific secure config.
        sub_application_name (str): additional command to be passed to the migrate app {cms|lms}
        additional_migrations (list[edxpipelines.utils.MigrationAppInfo]): Additional applications to migrate.
            Will only run if has_migrations=True

    Returns:
        gomatic.gocd.pipelines.Job
    """
    if not additional_migrations:
        additional_migrations = []

    job_name = constants.ROLLBACK_MIGRATIONS_JOB_NAME_TPL(edp)

    if sub_application_name is not None:
        job_name += '_{}'.format(sub_application_name)

    job = stage.ensure_job(job_name)

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    is_instance_launch_required = ami_artifact_location and config

    if is_instance_launch_required:
        # Retrieve the AMI ID from the upstream build stage.
        tasks.retrieve_artifact(ami_artifact_location, job)
        variable_override_path = path_to_artifact(ami_artifact_location.file_name)

        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )
    else:
        # The instance was launched elsewhere. Fetch the Ansible inventory to
        # use in reaching the EC2 instance.
        tasks.retrieve_artifact(inventory_location, job)

        # Fetch the SSH key to use in reaching the EC2 instance.
        tasks.retrieve_artifact(instance_key_location, job)

    # SSH key used to access the instance needs specific permissions.
    job.ensure_task(tasks.bash_task(
        'chmod 600 {key_pem_path}',
        key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
    ))

    # Fetch the migration output.
    tasks.retrieve_artifact(migration_info_location, job)

    tasks.generate_migration_rollback(
        job=job,
        application_user=application_user,
        application_name=application_name,
        application_path=application_path,
        db_migration_user=db_migration_user,
        db_migration_pass=db_migration_pass,
        sub_application_name=sub_application_name,
    )

    for migration_info in additional_migrations:
        tasks.generate_migration_rollback(
            job=job,
            application_user=migration_info.name,
            application_name=migration_info.name,
            application_path=migration_info.path,
            db_migration_user=db_migration_user,
            db_migration_pass=db_migration_pass,
            sub_application_name=migration_info.sub_application_name,
        )

    # If an instance was launched as part of this job, clean it up.
    if is_instance_launch_required:
        tasks.generate_ami_cleanup(job, config['slack_token'], runif='any')

    return job
Пример #21
0
def generate_build_ami(stage,
                       edp,
                       app_repo_url,
                       configuration_secure_material,
                       configuration_internal_material,
                       playbook_path,
                       config,
                       version_tags=None,
                       **kwargs):
    """
    Generates a job for creating a new AMI.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            for which an AMI will be created.
        app_repo_url (str): App repo's URL.
        configuration_secure_material (gomatic.gomatic.gocd.materials.GitMaterial): Secure
            configuration material. Destination directory expected to be 'configuration-secure'.
        configuration_internal_material (gomatic.gomatic.gocd.materials.GitMaterial): Internal
            configuration material. Destination directory expected to be 'configuration-internal'.
        playbook_path (str): Path to the Ansible playbook to run when creating the AMI.
        config (dict): Environment-specific secure config.
        version_tags (dict): An optional {app_name: (repo, version), ...} dict that
            specifies what versions to tag the AMI with.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Locate the base AMI.
    tasks.generate_base_ami_selection(
        job,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        edp=edp
    )

    # Launch a new instance on which to build the AMI.
    tasks.generate_launch_instance(
        job,
        aws_access_key_id=config['aws_access_key_id'],
        aws_secret_access_key=config['aws_secret_access_key'],
        ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
        ec2_security_group_id=config['ec2_security_group_id'],
        ec2_instance_profile_name=config['ec2_instance_profile_name'],
        variable_override_path=path_to_artifact(constants.BASE_AMI_OVERRIDE_FILENAME),
    )

    tasks.generate_ensure_python2(job)

    # Run the Ansible play for the service.
    tasks.generate_run_app_playbook(
        job,
        playbook_path,
        edp,
        app_repo_url,
        slack_token=config['slack_token'],
        configuration_secure_dir=configuration_secure_material.destination_directory,
        configuration_internal_dir=configuration_internal_material.destination_directory,
        disable_edx_services='true',
        COMMON_TAG_EC2_INSTANCE='true',
        **kwargs
    )

    # Create an AMI from the instance.
    tasks.generate_create_ami(
        job,
        edp.play,
        edp.deployment,
        edp.environment,
        app_repo_url,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        path_to_artifact(constants.LAUNCH_INSTANCE_FILENAME),
        slack_token=config['slack_token'],
        version_tags=version_tags,
        **kwargs
    )

    tasks.generate_ami_cleanup(job, config['slack_token'], runif='any')

    return job
Пример #22
0
def generate_rollback_migrations(
        stage,
        edp,
        application_user,
        application_name,
        application_path,
        db_migration_user,
        db_migration_pass,
        migration_info_location,
        inventory_location=None,
        instance_key_location=None,
        ami_artifact_location=None,
        config=None,
        sub_application_name=None
):
    """
    Generates a job for rolling back database migrations.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage this job will be part of
        edp (EDP): EDP that this job will roll back
        migration_info_location (edxpipelines.utils.ArtifactLocation): Location of
            the migration output to roll back
        inventory_location (edxpipelines.utils.ArtifactLocation): Location of the
            ansible inventory
        instance_key_location (edxpipelines.utils.ArtifactLocation): Location of
            the key used to ssh in to the instance
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): AMI to use when
            launching instance used to roll back migrations.
        config (dict): Environment-specific secure config.
        sub_application_name (str): additional command to be passed to the migrate app {cms|lms}

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job_name = constants.ROLLBACK_MIGRATIONS_JOB_NAME_TPL(edp)

    if sub_application_name is not None:
        job_name += '_{}'.format(sub_application_name)

    job = stage.ensure_job(job_name)

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    is_instance_launch_required = ami_artifact_location and config

    if is_instance_launch_required:
        # Retrieve the AMI ID from the upstream build stage.
        tasks.retrieve_artifact(ami_artifact_location, job)
        variable_override_path = path_to_artifact(ami_artifact_location.file_name)

        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )
    else:
        # The instance was launched elsewhere. Fetch the Ansible inventory to
        # use in reaching the EC2 instance.
        tasks.retrieve_artifact(inventory_location, job)

        # Fetch the SSH key to use in reaching the EC2 instance.
        tasks.retrieve_artifact(instance_key_location, job)

    # SSH key used to access the instance needs specific permissions.
    job.ensure_task(tasks.bash_task(
        'chmod 600 {key_pem_path}',
        key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
    ))

    # Fetch the migration output.
    tasks.retrieve_artifact(migration_info_location, job)

    tasks.generate_migration_rollback(
        job=job,
        application_user=application_user,
        application_name=application_name,
        application_path=application_path,
        db_migration_user=db_migration_user,
        db_migration_pass=db_migration_pass,
        sub_application_name=sub_application_name,
    )

    # If an instance was launched as part of this job, clean it up.
    if is_instance_launch_required:
        tasks.generate_ami_cleanup(job, config['hipchat_token'], runif='any')

    return job
Пример #23
0
def generate_create_ami_from_instance(pipeline,
                                      edp,
                                      app_repo,
                                      aws_access_key_id,
                                      aws_secret_access_key,
                                      ami_creation_timeout=3600,
                                      ami_wait='yes',
                                      cache_id='',
                                      artifact_path=constants.ARTIFACT_PATH,
                                      slack_token='',
                                      slack_room=constants.SLACK_ROOM,
                                      manual_approval=False,
                                      version_tags=None,
                                      **kwargs):
    """
    Generates an artifact ami.yml:
        ami_id: ami-abcdefg
        ami_message: AMI creation operation complete
        ami_state: available

    Args:
        pipeline (gomatic.Pipeline):
        edp (EDP):
        app_repo (str):
        configuration_secure_repo (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        configuration_repo (str):
        ami_creation_timeout (str):
        ami_wait (str):
        cache_id (str):
        artifact_path (str):
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.
        manual_approval (bool):
        version_tags (dict): An optional {app_name: (repo, version), ...} dict that
            specifies what versions to tag the AMI with.
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.BUILD_AMI_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    launch_info_artifact = ArtifactLocation(
        pipeline.name,
        constants.LAUNCH_INSTANCE_STAGE_NAME,
        constants.LAUNCH_INSTANCE_JOB_NAME,
        constants.LAUNCH_INSTANCE_FILENAME,
    )

    tasks.retrieve_artifact(launch_info_artifact, job)

    # Create an AMI from the instance
    tasks.generate_create_ami(
        job=job,
        play=edp.play,
        deployment=edp.deployment,
        edx_environment=edp.environment,
        app_repo=app_repo,
        launch_info_path='{}/{}'.format(constants.ARTIFACT_PATH, constants.LAUNCH_INSTANCE_FILENAME),
        aws_access_key_id=aws_access_key_id,
        aws_secret_access_key=aws_secret_access_key,
        ami_creation_timeout=ami_creation_timeout,
        ami_wait=ami_wait,
        cache_id=cache_id,
        artifact_path=artifact_path,
        slack_token=slack_token,
        slack_room=slack_room,
        version_tags=version_tags,
        **kwargs)

    return stage
Пример #24
0
def generate_run_play(
        pipeline,
        playbook_with_path,
        play,
        deployment,
        edx_environment,
        app_repo,
        private_github_key='',
        hipchat_token='',
        hipchat_room=constants.HIPCHAT_ROOM,
        manual_approval=False,
        configuration_secure_dir=constants.PRIVATE_CONFIGURATION_LOCAL_DIR,
        configuration_internal_dir=constants.INTERNAL_CONFIGURATION_LOCAL_DIR,
        **kwargs):
    """
    TODO: This currently runs from the configuration/playbooks/continuous_delivery/ directory. Need to figure out how to
    pass in a configuration file to ansible-play correctly. TE-1608

    Assumes:
        - generate_launch_instance stage was used launch the instance preceding this stage.
        - Requires the ansible_inventory and key.pem files to be in the constants.ARTIFACT_DIRECTORY path
        - Play is run from the constants.PUBLIC_CONFIGURATION_DIR
        - Play is run using the constants.ANSIBLE_CONFIG configuration file

    Args:
        pipeline (gomatic.Pipeline):
        playbook_with_path (str):
        play (str):
        deployment (str):
        edx_environment (str):
        app_repo (str) :
        private_github_key (str):
        hipchat_token (str):
        hipchat_room (str):
        manual_approval (bool):
        configuration_secure_dir (str): The secure config directory to use for this play.
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    # setup the necessary environment variables
    pipeline.ensure_encrypted_environment_variables({
        'HIPCHAT_TOKEN':
        hipchat_token,
        'PRIVATE_GITHUB_KEY':
        private_github_key
    })
    pipeline.ensure_environment_variables({
        'PLAY':
        play,
        'DEPLOYMENT':
        deployment,
        'EDX_ENVIRONMENT':
        edx_environment,
        'APP_REPO':
        app_repo,
        'ARTIFACT_PATH':
        '{}/'.format(constants.ARTIFACT_PATH),
        'HIPCHAT_ROOM':
        hipchat_room,
        'ANSIBLE_CONFIG':
        constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG,
    })

    stage = pipeline.ensure_stage(constants.RUN_PLAY_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.RUN_PLAY_JOB_NAME)
    tasks.generate_requirements_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    # fetch the key material
    artifact_params = {
        'pipeline': pipeline.name,
        'stage': constants.LAUNCH_INSTANCE_STAGE_NAME,
        'job': constants.LAUNCH_INSTANCE_JOB_NAME,
        'src': FetchArtifactFile("key.pem"),
        'dest': constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # fetch the launch_info.yml
    artifact_params['src'] = FetchArtifactFile('launch_info.yml')
    job.add_task(FetchArtifactTask(**artifact_params))

    # fetch the inventory file
    artifact_params['src'] = FetchArtifactFile('ansible_inventory')
    job.add_task(FetchArtifactTask(**artifact_params))

    tasks.generate_run_app_playbook(job, configuration_internal_dir,
                                    configuration_secure_dir,
                                    playbook_with_path, **kwargs)
    return stage
Пример #25
0
def generate_deploy_ami(stage,
                        ami_artifact_location,
                        edp,
                        config,
                        has_migrations=True,
                        application_user=None,
                        additional_migrations=None,
                        management_commands=None,):
    """
    Generates a job for deploying an AMI. Migrations are applied as part of this job.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to deploy.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            to which the AMI belongs.
        config (dict): Environment-specific secure config.
        has_migrations (bool): Whether to generate Gomatic for applying migrations.
        management_commands (list): management commands to run, will only run if list is defined
        application_user (str): application user if different from the play name.
        additional_migrations (list[edxpipelines.utils.MigrationAppInfo]): Additional applications to migrate.
            Will only run if has_migrations=True

    Returns:
        gomatic.gocd.pipelines.Job
    """
    if not additional_migrations:
        additional_migrations = []

    job = stage.ensure_job(constants.DEPLOY_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve the AMI ID from the upstream build stage.
    tasks.retrieve_artifact(ami_artifact_location, job)
    variable_override_path = path_to_artifact(ami_artifact_location.file_name)

    if has_migrations or management_commands:
        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )

        # SSH key used to access the instance needs specific permissions.
        job.ensure_task(tasks.bash_task(
            'chmod 600 {key_pem_path}',
            key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
        ))

        if application_user is None:
            application_user = edp.play

    if has_migrations:
        tasks.generate_run_migrations(
            job,
            application_user=application_user,
            application_name=application_user,
            application_path='/edx/app/{}'.format(application_user),
            db_migration_user=constants.DB_MIGRATION_USER,
            db_migration_pass=config['db_migration_pass'],
        )

        for migration in additional_migrations:
            tasks.generate_run_migrations(
                job,
                application_user=migration.name,
                application_name=migration.name,
                application_path=migration.path,
                db_migration_user=constants.DB_MIGRATION_USER,
                db_migration_pass=config['db_migration_pass'],
                sub_application_name=migration.sub_application_name
            )

    if management_commands:
        for command in management_commands:
            tasks.generate_run_management_command(
                job,
                application_user=application_user,
                application_name=application_user,
                application_path='/edx/app/{}'.format(application_user),
                command=command
            )

    if has_migrations or management_commands:
        tasks.generate_ami_cleanup(job, config['slack_token'], runif='any')

    tasks.generate_deploy_ami(
        job,
        variable_override_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
    )

    return job
Пример #26
0
def generate_run_migrations(pipeline,
                            db_migration_pass,
                            inventory_location,
                            instance_key_location,
                            launch_info_location,
                            application_user,
                            application_name,
                            application_path,
                            sub_application_name=None,
                            manual_approval=False):
    """
    Generate the stage that applies/runs migrations.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        db_migration_pass (str): Password for the DB user used to run migrations.
        inventory_location (ArtifactLocation): Location of inventory containing the IP address of the EC2 instance, for fetching.
        instance_key_location (ArtifactLocation): Location of SSH key used to access the EC2 instance, for fetching.
        launch_info_location (ArtifactLocation): Location of the launch_info.yml file for fetching
        application_user (str): Username to use while running the migrations
        application_name (str): Name of the application (e.g. edxapp, programs, etc...)
        application_path (str): path of the application installed on the target machine
        sub_application_name (str): any sub application to insert in to the migrations commands {cms|lms}
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables({
        'APPLICATION_USER':
        application_user,
        'APPLICATION_NAME':
        application_name,
        'APPLICATION_PATH':
        application_path,
        'DB_MIGRATION_USER':
        '******',
        'ARTIFACT_PATH':
        constants.ARTIFACT_PATH,
        'ANSIBLE_CONFIG':
        constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG,
    })
    pipeline.ensure_encrypted_environment_variables({
        'DB_MIGRATION_PASS':
        db_migration_pass,
    })

    if sub_application_name is not None:
        stage_name = "{}_{}".format(constants.APPLY_MIGRATIONS_STAGE,
                                    sub_application_name)
    else:
        stage_name = constants.APPLY_MIGRATIONS_STAGE
    stage = pipeline.ensure_stage(stage_name)

    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(constants.APPLY_MIGRATIONS_JOB)

    # Fetch the Ansible inventory to use in reaching the EC2 instance.
    artifact_params = {
        "pipeline": inventory_location.pipeline,
        "stage": inventory_location.stage,
        "job": inventory_location.job,
        "src": FetchArtifactFile(inventory_location.file_name),
        "dest": constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # Fetch the SSH key to use in reaching the EC2 instance.
    artifact_params = {
        "pipeline": instance_key_location.pipeline,
        "stage": instance_key_location.stage,
        "job": instance_key_location.job,
        "src": FetchArtifactFile(instance_key_location.file_name),
        "dest": constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # ensure the target directoy exists
    tasks.generate_target_directory(job)

    # fetch the launch_info.yml
    artifact_params = {
        "pipeline": launch_info_location.pipeline,
        "stage": launch_info_location.stage,
        "job": launch_info_location.job,
        "src": FetchArtifactFile(launch_info_location.file_name),
        "dest": constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # The SSH key used to access the EC2 instance needs specific permissions.
    job.add_task(
        ExecTask([
            '/bin/bash', '-c', 'chmod 600 {}'.format(
                instance_key_location.file_name)
        ],
                 working_dir=constants.ARTIFACT_PATH))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_run_migrations(job, sub_application_name)

    # Cleanup EC2 instance if running the migrations failed.
    # I think this should be left for the terminate instance stage
    # tasks.generate_ami_cleanup(job, runif='failed')

    return stage
Пример #27
0
def generate_run_play(pipeline,
                      playbook_with_path,
                      edp,
                      app_repo,
                      slack_token='',
                      slack_room=constants.SLACK_ROOM,
                      manual_approval=False,
                      configuration_secure_dir=constants.PRIVATE_CONFIGURATION_LOCAL_DIR,
                      configuration_internal_dir=constants.INTERNAL_CONFIGURATION_LOCAL_DIR,
                      override_artifacts=None,
                      timeout=None,
                      **kwargs):
    """
    TODO: This currently runs from the configuration/playbooks/continuous_delivery/ directory. Need to figure out how to
    pass in a configuration file to ansible-play correctly. TE-1608

    Assumes:
        - generate_launch_instance stage was used launch the instance preceding this stage.
        - Requires the ansible_inventory and key.pem files to be in the constants.ARTIFACT_DIRECTORY path
        - Play is run from the constants.PUBLIC_CONFIGURATION_DIR
        - Play is run using the constants.ANSIBLE_CONFIG configuration file

    Args:
        pipeline (gomatic.Pipeline):
        playbook_with_path (str):
        app_repo (str) :
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.
        manual_approval (bool):
        configuration_secure_dir (str): The secure config directory to use for this play.
        configuration_internal_dir (str): The internal config directory to use for this play.
        override_artifacts (bool):
        timeout (int): GoCD job level inactivity timeout setting.
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.RUN_PLAY_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.RUN_PLAY_JOB_NAME)
    if timeout:
        job.timeout = str(timeout)

    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    for file_name in (
            constants.KEY_PEM_FILENAME,
            constants.LAUNCH_INSTANCE_FILENAME,
            constants.ANSIBLE_INVENTORY_FILENAME
    ):
        tasks.retrieve_artifact(
            ArtifactLocation(
                pipeline.name,
                constants.LAUNCH_INSTANCE_STAGE_NAME,
                constants.LAUNCH_INSTANCE_JOB_NAME,
                file_name,
            ),
            job,
            constants.ARTIFACT_PATH
        )

    override_files = []
    if not override_artifacts:
        override_artifacts = []

    for artifact in override_artifacts:
        tasks.retrieve_artifact(artifact, job, constants.ARTIFACT_PATH)
        override_files.append('{}/{}'.format(constants.ARTIFACT_PATH, artifact.file_name))

    tasks.generate_run_app_playbook(
        job=job,
        playbook_with_path=playbook_with_path,
        edp=edp,
        app_repo=app_repo,
        slack_token=slack_token,
        slack_room=slack_room,
        configuration_secure_dir=configuration_secure_dir,
        configuration_internal_dir=configuration_internal_dir,
        override_files=override_files,
        **kwargs)
    return stage
Пример #28
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))

    pipeline = configurator \
        .ensure_pipeline_group(DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline('rollback-prod-marketing-site') \
        .set_git_material(GitMaterial('https://github.com/edx/tubular',
                                      polling=False,
                                      destination_directory='tubular',
                                      ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                      )
                          ) \
        .ensure_material(PipelineMaterial(DEPLOY_MARKETING_PIPELINE_NAME, FETCH_TAG_STAGE_NAME))

    pipeline.ensure_environment_variables({
        'MARKETING_REPOSITORY_VERSION':
        config['mktg_repository_version'],
    })

    pipeline.ensure_encrypted_environment_variables({
        'PRIVATE_GITHUB_KEY':
        config['github_private_key'],
        'PRIVATE_MARKETING_REPOSITORY_URL':
        config['mktg_repository_url'],
        'PRIVATE_ACQUIA_USERNAME':
        config['acquia_username'],
        'PRIVATE_ACQUIA_PASSWORD':
        config['acquia_password'],
        'PRIVATE_ACQUIA_GITHUB_KEY':
        config['acquia_github_key'],
    })

    prod_tag_name_artifact_params = {
        'pipeline': DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': FETCH_TAG_STAGE_NAME,
        'job': FETCH_TAG_JOB_NAME,
        'src':
        FetchArtifactFile('{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME)),
        'dest': 'target'
    }

    # Stage to rollback stage to its last stable tag
    rollback_stage = pipeline.ensure_stage(ROLLBACK_STAGE_NAME)
    rollback_stage.set_has_manual_approval()
    rollback_job = rollback_stage.ensure_job(ROLLBACK_JOB_NAME)

    tasks.generate_requirements_install(rollback_job, 'tubular')
    tasks.generate_target_directory(rollback_job)
    rollback_job.add_task(FetchArtifactTask(**prod_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        rollback_job, PROD_ENV,
        '{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME))

    # Stage to clear caches in extra
    clear_prod_caches_stage = pipeline.ensure_stage(
        CLEAR_PROD_CACHES_STAGE_NAME)
    clear_prod_caches_job = clear_prod_caches_stage.ensure_job(
        CLEAR_PROD_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_prod_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_prod_caches_job, 'tubular')
    tasks.format_RSA_key(clear_prod_caches_job,
                         'edx-mktg/docroot/acquia_github_key.pem',
                         '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_prod_caches_job, PROD_ENV)
    tasks.generate_clear_varnish_cache(clear_prod_caches_job, PROD_ENV)

    configurator.save_updated_config(save_config_locally=save_config_locally,
                                     dry_run=dry_run)
Пример #29
0
def generate_deploy_ami(stage, ami_artifact_location, edp, config, has_migrations=True, application_user=None):
    """
    Generates a job for deploying an AMI. Migrations are applied as part of this job.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to deploy.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            to which the AMI belongs.
        config (dict): Environment-specific secure config.
        has_migrations (bool): Whether to generate Gomatic for applying migrations.
        application_user (str): application user if different from the play name.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.DEPLOY_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve the AMI ID from the upstream build stage.
    tasks.retrieve_artifact(ami_artifact_location, job)
    variable_override_path = path_to_artifact(ami_artifact_location.file_name)

    if has_migrations:
        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )

        # SSH key used to access the instance needs specific permissions.
        job.ensure_task(tasks.bash_task(
            'chmod 600 {key_pem_path}',
            key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
        ))

        if application_user is None:
            application_user = edp.play

        tasks.generate_run_migrations(
            job,
            application_user=application_user,
            application_name=application_user,
            application_path='/edx/app/{}'.format(application_user),
            db_migration_user=constants.DB_MIGRATION_USER,
            db_migration_pass=config['db_migration_pass'],
        )

        tasks.generate_ami_cleanup(job, config['hipchat_token'], runif='any')

    tasks.generate_deploy_ami(
        job,
        variable_override_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
    )

    return job