Esempio n. 1
0
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id
    """
    pipeline = configurator.ensure_pipeline_group(
        config['pipeline_group']
    ).ensure_replacement_of_pipeline(
        config['pipeline_name']
    ).ensure_material(
        GitMaterial(
            'https://github.com/edx/edx-gomatic',
            polling=True,
            material_name='edx-gomatic',
            destination_directory='edx-gomatic',
        )
    ).ensure_material(
        GitMaterial(
            '[email protected]:edx-ops/gomatic-secure.git',
            polling=True,
            destination_directory='gomatic-secure',
            ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
        )
    ).set_label_template('${edx-gomatic[:7]}')

    pipeline.ensure_encrypted_environment_variables(
        {
            'GOMATIC_USER': config['gomatic_user'],
            'GOMATIC_PASSWORD': config['gomatic_password']
        }
    )

    stage = pipeline.ensure_stage('deploy_gomatic_stage')
    job = stage.ensure_job('deploy_gomatic_scripts_job')
    tasks.generate_requirements_install(job, 'edx-gomatic')

    job.add_task(
        ExecTask(
            [
                '/usr/bin/python',
                './deploy_pipelines.py',
                '-v',
                'tools',
                '-f',
                'config.yml'
            ],
            working_dir='edx-gomatic'
        )
    )
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id
    """
    pipeline = configurator.ensure_pipeline_group(
        config['pipeline_group']
    ).ensure_replacement_of_pipeline(
        config['pipeline_name']
    ).ensure_material(
        GitMaterial(
            'https://github.com/edx/edx-gomatic',
            polling=True,
            material_name='edx-gomatic',
            destination_directory='edx-gomatic',
        )
    ).ensure_material(
        GitMaterial(
            '[email protected]:edx/gomatic-internal.git',
            polling=True,
            destination_directory='gomatic-internal',
            ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
        )
    ).set_label_template('${edx-gomatic[:7]}')

    pipeline.ensure_encrypted_environment_variables(
        {
            'GOMATIC_USER': config['gomatic_user'],
            'GOMATIC_PASSWORD': config['gomatic_password']
        }
    )

    stage = pipeline.ensure_stage('deploy_gomatic_stage')
    job = stage.ensure_job('deploy_gomatic_scripts_job')
    tasks.generate_requirements_install(job, 'edx-gomatic')

    job.add_task(
        ExecTask(
            [
                '/usr/bin/python',
                './deploy_pipelines.py',
                '-v',
                'tools',
                '-f',
                'config.yml'
            ],
            working_dir='edx-gomatic'
        )
    )
Esempio n. 3
0
def generate_terminate_instance(pipeline,
                                instance_info_location,
                                aws_access_key_id,
                                aws_secret_access_key,
                                hipchat_auth_token,
                                ec2_region=constants.EC2_REGION,
                                artifact_path=constants.ARTIFACT_PATH,
                                runif='any',
                                manual_approval=False):
    """
    Generate the stage that terminates an EC2 instance.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        instance_info_location (ArtifactLocation): Location of YAML file containing instance info from the AMI-building stage, for fetching.
        runif (str): one of ['passed', 'failed', 'any'] Default: any - controls when the stage's terminate task is triggered in the pipeline
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage

    """
    pipeline.ensure_encrypted_environment_variables({
        'AWS_ACCESS_KEY_ID':
        aws_access_key_id,
        'AWS_SECRET_ACCESS_KEY':
        aws_secret_access_key,
        'HIPCHAT_TOKEN':
        hipchat_auth_token
    })
    pipeline.ensure_environment_variables({
        'ARTIFACT_PATH':
        artifact_path,
        'EC2_REGION':
        ec2_region,
        'HIPCHAT_ROOM':
        constants.HIPCHAT_ROOM
    })

    stage = pipeline.ensure_stage(constants.TERMINATE_INSTANCE_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Fetch the instance info to use in reaching the EC2 instance.
    artifact_params = {
        'pipeline': instance_info_location.pipeline,
        'stage': instance_info_location.stage,
        'job': instance_info_location.job,
        'src': FetchArtifactFile(instance_info_location.file_name),
        'dest': constants.ARTIFACT_PATH
    }
    job = stage.ensure_job(constants.TERMINATE_INSTANCE_JOB_NAME)
    tasks.generate_requirements_install(job, 'configuration')
    job.add_task(FetchArtifactTask(**artifact_params))

    tasks.generate_ami_cleanup(job, runif=runif)

    return stage
Esempio n. 4
0
def generate_edp_validation(pipeline,
                            hipchat_auth_token,
                            hipchat_channels,
                            asgard_api_endpoints,
                            ami_deployment,
                            ami_environment,
                            ami_play,
                            manual_approval=False):
    """
    Generate stage which checks an AMI's environment/deployment/play (EDP) against the allowed EDP.
    Stage fails if the EDPs don't match.

    Args:
        pipeline (gomatic.Pipeline):
        hipchat_auth_token (str):
        hipchat_channels (str): The channels/users to notify
        asgard_api_endpoints (str): canonical URL for asgard.
        ami_deployment (str): typically one of: [edx, edge, etc...]
        ami_environment (str): typically one of: [stage, prod, loadtest, etc...]
        ami_play (str):
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables({'AMI_ID': None,
                                           'AMI_DEPLOYMENT': ami_deployment,
                                           'HIPCHAT_CHANNELS': hipchat_channels,
                                           'ASGARD_API_ENDPOINTS': asgard_api_endpoints,
                                           'AMI_ENVIRONMENT': ami_environment,
                                           'AMI_PLAY': ami_play}) \
        .ensure_encrypted_environment_variables({'HIPCHAT_TOKEN': hipchat_auth_token})

    stage = pipeline.ensure_stage("Validation")
    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job("EDPValidation")
    tasks.generate_requirements_install(job, 'tubular')
    job.add_task(
        ExecTask(['/usr/bin/python', 'scripts/validate_edp.py'],
                 working_dir='tubular'))
    job.add_task(
        ExecTask([
            '/bin/bash', '-c', '/usr/bin/python '
            'scripts/submit_hipchat_msg.py '
            '-m '
            '"${AMI_ID} is not tagged for ${AMI_ENVIRONMENT}-${AMI_DEPLOYMENT}-${AMI_PLAY}. '
            'Are you sure you\'re deploying the right AMI to the right app?" '
            '--color "red"'
        ],
                 working_dir='tubular',
                 runif='failed'))

    return stage
Esempio n. 5
0
def generate_terminate_instance(pipeline,
                                instance_info_location,
                                aws_access_key_id,
                                aws_secret_access_key,
                                slack_token,
                                ec2_region=constants.EC2_REGION,
                                artifact_path=constants.ARTIFACT_PATH,
                                runif='any',
                                manual_approval=False):
    """
    Generate the stage that terminates an EC2 instance.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        instance_info_location (ArtifactLocation): Location of YAML file containing
            instance info from the AMI-building stage, for fetching.
        runif (str): one of ['passed', 'failed', 'any'] Default: any - controls when the
            stage's terminate task is triggered in the pipeline
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage

    """
    pipeline.ensure_encrypted_environment_variables(
        {
            'AWS_ACCESS_KEY_ID': aws_access_key_id,
            'AWS_SECRET_ACCESS_KEY': aws_secret_access_key,
        }
    )
    pipeline.ensure_environment_variables(
        {
            'ARTIFACT_PATH': artifact_path,
            'EC2_REGION': ec2_region,
        }
    )

    stage = pipeline.ensure_stage(constants.TERMINATE_INSTANCE_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Fetch the instance info to use in reaching the EC2 instance.
    job = stage.ensure_job(constants.TERMINATE_INSTANCE_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.retrieve_artifact(instance_info_location, job, constants.ARTIFACT_PATH)

    tasks.generate_ami_cleanup(job, slack_token, runif=runif)

    return stage
Esempio n. 6
0
def generate_asg_cleanup(pipeline,
                         asgard_api_endpoints,
                         asgard_token,
                         aws_access_key_id,
                         aws_secret_access_key,
                         manual_approval=False):
    """
    Generates stage which calls the ASG cleanup script.

    Args:
        pipeline (gomatic.Pipeline):
        asgard_api_endpoints (str): canonical URL for asgard.
        asgard_token (str): Asgard token to use for authentication
        aws_access_key_id (str): AWS key ID for auth
        aws_secret_access_key (str): AWS secret key for auth
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables(
        {'ASGARD_API_ENDPOINTS': asgard_api_endpoints})
    pipeline.ensure_encrypted_environment_variables({
        'ASGARD_API_TOKEN':
        asgard_token,
        'AWS_ACCESS_KEY_ID':
        aws_access_key_id,
        'AWS_SECRET_ACCESS_KEY':
        aws_secret_access_key,
    })

    stage = pipeline.ensure_stage("ASG-Cleanup-Stage")
    if manual_approval:
        stage.set_has_manual_approval()

    job = stage.ensure_job("Cleanup-ASGS")
    tasks.generate_requirements_install(job, 'tubular')
    job.add_task(
        ExecTask(['/usr/bin/python', 'scripts/cleanup-asgs.py'],
                 working_dir="tubular"))

    return stage
Esempio n. 7
0
def generate_deploy_ami(pipeline,
                        asgard_api_endpoints,
                        asgard_token,
                        aws_access_key_id,
                        aws_secret_access_key,
                        upstream_ami_artifact=None,
                        manual_approval=True):
    """
    Generates a stage which deploys an AMI via Asgard.

    if the variable upstream_ami_artifact is set, information about which AMI to deploy will be pulled
    from this pipeline/stage/file.

    if upstream_ami_artifact is not set, the environment variable AMI_ID will be used to determine what
    AMI to deploy

    Args:
        pipeline (gomatic.Pipeline):
        asgard_api_endpoints (str): canonical URL for asgard.
        asgard_token (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        upstream_ami_artifact (ArtifactLocation): The location of yaml artifact that has the `ami_id`
        manual_approval (bool): Should this stage require manual approval?
    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables({
        'ASGARD_API_ENDPOINTS':
        asgard_api_endpoints,
        'WAIT_SLEEP_TIME':
        constants.TUBULAR_SLEEP_WAIT_TIME
    }).ensure_encrypted_environment_variables({
        'ASGARD_API_TOKEN':
        asgard_token,
        'AWS_ACCESS_KEY_ID':
        aws_access_key_id,
        'AWS_SECRET_ACCESS_KEY':
        aws_secret_access_key,
    })

    stage = pipeline.ensure_stage(constants.DEPLOY_AMI_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(constants.DEPLOY_AMI_JOB_NAME)
    tasks.generate_requirements_install(job, 'tubular')
    # Make the artifact directory if it does not exist
    job.add_task(
        ExecTask([
            '/bin/bash',
            '-c',
            'mkdir -p ../{}'.format(constants.ARTIFACT_PATH),
        ],
                 working_dir="tubular"))

    # Setup the deployment output file
    artifact_path = '{}/{}'.format(constants.ARTIFACT_PATH,
                                   constants.DEPLOY_AMI_OUT_FILENAME)
    job.ensure_artifacts(set([BuildArtifact(artifact_path)]))

    deploy_command =\
        '/usr/bin/python ' \
        'scripts/asgard-deploy.py ' \
        '--out_file ../{} '.format(artifact_path)

    if upstream_ami_artifact:
        artifact_params = {
            "pipeline": upstream_ami_artifact.pipeline,
            "stage": upstream_ami_artifact.stage,
            "job": upstream_ami_artifact.job,
            "src": FetchArtifactFile(upstream_ami_artifact.file_name),
            "dest": 'tubular'
        }
        job.add_task(FetchArtifactTask(**artifact_params))
        deploy_command += '--config-file {}'.format(
            upstream_ami_artifact.file_name)

    else:
        pipeline.ensure_environment_variables({'AMI_ID': None})
        deploy_command += '--ami_id $AMI_ID'

    # Execute the deployment script
    job.add_task(
        ExecTask(['/bin/bash', '-c', deploy_command], working_dir="tubular"))
    return stage
Esempio n. 8
0
def generate_create_ami_from_instance(
        pipeline,
        play,
        deployment,
        edx_environment,
        app_repo,
        configuration_secure_repo,
        aws_access_key_id,
        aws_secret_access_key,
        configuration_repo=constants.PUBLIC_CONFIGURATION_REPO_URL,
        ami_creation_timeout="3600",
        ami_wait='yes',
        cache_id='',
        artifact_path=constants.ARTIFACT_PATH,
        hipchat_room=constants.HIPCHAT_ROOM,
        manual_approval=False,
        **kwargs):
    """
    Generates an artifact ami.yml:
        ami_id: ami-abcdefg
        ami_message: AMI creation operation complete
        ami_state: available

    Args:
        pipeline (gomatic.Pipeline):
        play (str): Play that was run on the instance (used for tagging)
        deployment (str):
        edx_environment (str):
        app_repo (str):
        configuration_secure_repo (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        configuration_repo (str):
        ami_creation_timeout (str):
        ami_wait (str):
        cache_id (str):
        artifact_path (str):
        hipchat_room (str):
        manual_approval (bool):
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.BUILD_AMI_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()
    pipeline.ensure_encrypted_environment_variables({
        'AWS_ACCESS_KEY_ID':
        aws_access_key_id,
        'AWS_SECRET_ACCESS_KEY':
        aws_secret_access_key
    })

    pipeline.ensure_environment_variables({
        'PLAY':
        play,
        'DEPLOYMENT':
        deployment,
        'EDX_ENVIRONMENT':
        edx_environment,
        'APP_REPO':
        app_repo,
        'CONFIGURATION_REPO':
        configuration_repo,
        'CONFIGURATION_SECURE_REPO':
        configuration_secure_repo,
        'AMI_CREATION_TIMEOUT':
        ami_creation_timeout,
        'AMI_WAIT':
        ami_wait,
        'CACHE_ID':
        cache_id,  # gocd build number
        'ARTIFACT_PATH':
        artifact_path,
        'HIPCHAT_ROOM':
        hipchat_room,
        'ANSIBLE_CONFIG':
        constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG,
    })

    # Install the requirements.
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME)
    tasks.generate_requirements_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')

    tasks.generate_target_directory(job)

    # fetch the key material
    artifact_params = {
        'pipeline': pipeline.name,
        'stage': constants.LAUNCH_INSTANCE_STAGE_NAME,
        'job': constants.LAUNCH_INSTANCE_JOB_NAME,
        'src': FetchArtifactFile("launch_info.yml"),
        'dest': constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # Create an AMI from the instance
    tasks.generate_create_ami(job, **kwargs)

    return stage
Esempio n. 9
0
def generate_run_play(
        pipeline,
        playbook_with_path,
        play,
        deployment,
        edx_environment,
        app_repo,
        private_github_key='',
        hipchat_token='',
        hipchat_room=constants.HIPCHAT_ROOM,
        manual_approval=False,
        configuration_secure_dir=constants.PRIVATE_CONFIGURATION_LOCAL_DIR,
        configuration_internal_dir=constants.INTERNAL_CONFIGURATION_LOCAL_DIR,
        **kwargs):
    """
    TODO: This currently runs from the configuration/playbooks/continuous_delivery/ directory. Need to figure out how to
    pass in a configuration file to ansible-play correctly. TE-1608

    Assumes:
        - generate_launch_instance stage was used launch the instance preceding this stage.
        - Requires the ansible_inventory and key.pem files to be in the constants.ARTIFACT_DIRECTORY path
        - Play is run from the constants.PUBLIC_CONFIGURATION_DIR
        - Play is run using the constants.ANSIBLE_CONFIG configuration file

    Args:
        pipeline (gomatic.Pipeline):
        playbook_with_path (str):
        play (str):
        deployment (str):
        edx_environment (str):
        app_repo (str) :
        private_github_key (str):
        hipchat_token (str):
        hipchat_room (str):
        manual_approval (bool):
        configuration_secure_dir (str): The secure config directory to use for this play.
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    # setup the necessary environment variables
    pipeline.ensure_encrypted_environment_variables({
        'HIPCHAT_TOKEN':
        hipchat_token,
        'PRIVATE_GITHUB_KEY':
        private_github_key
    })
    pipeline.ensure_environment_variables({
        'PLAY':
        play,
        'DEPLOYMENT':
        deployment,
        'EDX_ENVIRONMENT':
        edx_environment,
        'APP_REPO':
        app_repo,
        'ARTIFACT_PATH':
        '{}/'.format(constants.ARTIFACT_PATH),
        'HIPCHAT_ROOM':
        hipchat_room,
        'ANSIBLE_CONFIG':
        constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG,
    })

    stage = pipeline.ensure_stage(constants.RUN_PLAY_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.RUN_PLAY_JOB_NAME)
    tasks.generate_requirements_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    # fetch the key material
    artifact_params = {
        'pipeline': pipeline.name,
        'stage': constants.LAUNCH_INSTANCE_STAGE_NAME,
        'job': constants.LAUNCH_INSTANCE_JOB_NAME,
        'src': FetchArtifactFile("key.pem"),
        'dest': constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # fetch the launch_info.yml
    artifact_params['src'] = FetchArtifactFile('launch_info.yml')
    job.add_task(FetchArtifactTask(**artifact_params))

    # fetch the inventory file
    artifact_params['src'] = FetchArtifactFile('ansible_inventory')
    job.add_task(FetchArtifactTask(**artifact_params))

    tasks.generate_run_app_playbook(job, configuration_internal_dir,
                                    configuration_secure_dir,
                                    playbook_with_path, **kwargs)
    return stage
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))
    artifact_path = 'target/'

    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline = gcc.ensure_pipeline_group(config['pipeline_group'])\
                  .ensure_replacement_of_pipeline(config['pipeline_name'])\
                  .ensure_material(GitMaterial('https://github.com/edx/edx-gomatic',
                                               material_name='edx-gomatic',
                                               polling=True,
                                               destination_directory='edx-gomatic',
                                               branch='master'
                                               )
                                   ) \
                  .ensure_material(GitMaterial('[email protected]:edx-ops/gomatic-secure.git',
                                               material_name='gomatic-secure',
                                               polling=True,
                                               destination_directory='gomatic-secure',
                                               branch='master',
                                               ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                               )
                                   )

    pipeline.ensure_encrypted_environment_variables({
        'GOMATIC_USER':
        config['gomatic_user'],
        'GOMATIC_PASSWORD':
        config['gomatic_password']
    })

    stage = pipeline.ensure_stage('deploy_gomatic_stage')
    job = stage.ensure_job('deploy_gomatic_scripts_job')
    tasks.generate_requirements_install(job, 'edx-gomatic')

    job.add_task(
        ExecTask([
            '/usr/bin/python', 'deploy_pipelines.py', '-v', 'tools', '-f',
            'config.yml'
        ],
                 working_dir='edx-gomatic'))

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)
Esempio n. 11
0
def generate_deploy_ami(stage,
                        ami_artifact_location,
                        edp,
                        config,
                        has_migrations=True,
                        application_user=None,
                        additional_migrations=None,
                        management_commands=None,):
    """
    Generates a job for deploying an AMI. Migrations are applied as part of this job.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to deploy.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            to which the AMI belongs.
        config (dict): Environment-specific secure config.
        has_migrations (bool): Whether to generate Gomatic for applying migrations.
        management_commands (list): management commands to run, will only run if list is defined
        application_user (str): application user if different from the play name.
        additional_migrations (list[edxpipelines.utils.MigrationAppInfo]): Additional applications to migrate.
            Will only run if has_migrations=True

    Returns:
        gomatic.gocd.pipelines.Job
    """
    if not additional_migrations:
        additional_migrations = []

    job = stage.ensure_job(constants.DEPLOY_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve the AMI ID from the upstream build stage.
    tasks.retrieve_artifact(ami_artifact_location, job)
    variable_override_path = path_to_artifact(ami_artifact_location.file_name)

    if has_migrations or management_commands:
        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )

        # SSH key used to access the instance needs specific permissions.
        job.ensure_task(tasks.bash_task(
            'chmod 600 {key_pem_path}',
            key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
        ))

        if application_user is None:
            application_user = edp.play

    if has_migrations:
        tasks.generate_run_migrations(
            job,
            application_user=application_user,
            application_name=application_user,
            application_path='/edx/app/{}'.format(application_user),
            db_migration_user=constants.DB_MIGRATION_USER,
            db_migration_pass=config['db_migration_pass'],
        )

        for migration in additional_migrations:
            tasks.generate_run_migrations(
                job,
                application_user=migration.name,
                application_name=migration.name,
                application_path=migration.path,
                db_migration_user=constants.DB_MIGRATION_USER,
                db_migration_pass=config['db_migration_pass'],
                sub_application_name=migration.sub_application_name
            )

    if management_commands:
        for command in management_commands:
            tasks.generate_run_management_command(
                job,
                application_user=application_user,
                application_name=application_user,
                application_path='/edx/app/{}'.format(application_user),
                command=command
            )

    if has_migrations or management_commands:
        tasks.generate_ami_cleanup(job, config['slack_token'], runif='any')

    tasks.generate_deploy_ami(
        job,
        variable_override_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
    )

    return job
Esempio n. 12
0
def generate_launch_instance(
        pipeline,
        aws_access_key_id,
        aws_secret_access_key,
        ec2_vpc_subnet_id,
        ec2_security_group_id,
        ec2_instance_profile_name,
        base_ami_id,
        manual_approval=False,
        ec2_region=constants.EC2_REGION,
        ec2_instance_type=constants.EC2_INSTANCE_TYPE,
        ec2_timeout=constants.EC2_LAUNCH_INSTANCE_TIMEOUT,
        ec2_ebs_volume_size=constants.EC2_EBS_VOLUME_SIZE):
    """
    Pattern to launch an AMI. Generates 3 artifacts:
        key.pem             - Private key material generated for this instance launch
        launch_info.yml     - yaml file that contains information about the instance launched
        ansible_inventory   - a list of private aws IP addresses that can be fed in to ansible to run playbooks

        Please check here for further information:
        https://github.com/edx/configuration/blob/master/playbooks/continuous_delivery/launch_instance.yml

    Args:
        pipeline (gomatic.Pipeline):
        aws_access_key_id (str): AWS key ID for auth
        aws_secret_access_key (str): AWS secret key for auth
        ec2_vpc_subnet_id (str):
        ec2_security_group_id (str):
        ec2_instance_profile_name (str):
        base_ami_id (str): the ami-id used to launch the instance
        manual_approval (bool): Should this stage require manual approval?
        ec2_region (str):
        ec2_instance_type (str):
        ec2_timeout (str):
        ec2_ebs_volume_size (str):

    Returns:

    """
    pipeline.ensure_encrypted_environment_variables({
        'AWS_ACCESS_KEY_ID':
        aws_access_key_id,
        'AWS_SECRET_ACCESS_KEY':
        aws_secret_access_key
    })

    pipeline.ensure_environment_variables({
        'EC2_VPC_SUBNET_ID':
        ec2_vpc_subnet_id,
        'EC2_SECURITY_GROUP_ID':
        ec2_security_group_id,
        'EC2_ASSIGN_PUBLIC_IP':
        'no',
        'EC2_TIMEOUT':
        ec2_timeout,
        'EC2_REGION':
        ec2_region,
        'EBS_VOLUME_SIZE':
        ec2_ebs_volume_size,
        'EC2_INSTANCE_TYPE':
        ec2_instance_type,
        'EC2_INSTANCE_PROFILE_NAME':
        ec2_instance_profile_name,
        'NO_REBOOT':
        'no',
        'BASE_AMI_ID':
        base_ami_id,
        'ANSIBLE_CONFIG':
        constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG,
    })

    stage = pipeline.ensure_stage(constants.LAUNCH_INSTANCE_STAGE_NAME)

    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.LAUNCH_INSTANCE_JOB_NAME)
    tasks.generate_requirements_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_launch_instance(job)

    return stage
Esempio n. 13
0
def generate_run_migrations(pipeline,
                            db_migration_pass,
                            inventory_location,
                            instance_key_location,
                            launch_info_location,
                            application_user,
                            application_name,
                            application_path,
                            duration_threshold=None,
                            from_address=None,
                            to_addresses=None,
                            sub_application_name=None,
                            manual_approval=False):
    """
    Generate the stage that applies/runs migrations.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        db_migration_pass (str): Password for the DB user used to run migrations.
        inventory_location (ArtifactLocation): Location of inventory containing the IP
            address of the EC2 instance, for fetching.
        instance_key_location (ArtifactLocation): Location of SSH key used to access the
            EC2 instance, for fetching.
        launch_info_location (ArtifactLocation): Location of the launch_info.yml file for fetching
        application_user (str): Username to use while running the migrations
        application_name (str): Name of the application (e.g. edxapp, ecommerce, etc...)
        application_path (str): path of the application installed on the target machine
        duration_threshold (int): Threshold in seconds over which a migration duration will be alerted.
        from_address (str): Any migration duration email alert will be from this address.
        to_addresses (list(str)): List of To: addresses for migration duration email alerts.
        sub_application_name (str): any sub application to insert in to the migrations commands {cms|lms}
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables(
        {
            'ARTIFACT_PATH': constants.ARTIFACT_PATH,
            'ANSIBLE_CONFIG': constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG
        }
    )
    if duration_threshold:
        pipeline.ensure_environment_variables(
            {
                'MAX_EMAIL_TRIES': constants.MAX_EMAIL_TRIES
            }
        )

    if sub_application_name is not None:
        stage_name = "{}_{}".format(constants.APPLY_MIGRATIONS_STAGE, sub_application_name)
    else:
        stage_name = constants.APPLY_MIGRATIONS_STAGE
    stage = pipeline.ensure_stage(stage_name)

    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(constants.APPLY_MIGRATIONS_JOB)
    tasks.generate_package_install(job, 'tubular')

    # Fetch the Ansible inventory to use in reaching the EC2 instance.
    tasks.retrieve_artifact(inventory_location, job, constants.ARTIFACT_PATH)

    # Fetch the SSH key to use in reaching the EC2 instance.
    tasks.retrieve_artifact(instance_key_location, job, constants.ARTIFACT_PATH)

    # ensure the target directoy exists
    tasks.generate_target_directory(job)

    # fetch the launch_info.yml
    tasks.retrieve_artifact(launch_info_location, job, constants.ARTIFACT_PATH)

    # The SSH key used to access the EC2 instance needs specific permissions.
    job.add_task(
        ExecTask(
            ['/bin/bash', '-c', 'chmod 600 {}'.format(instance_key_location.file_name)],
            working_dir=constants.ARTIFACT_PATH
        )
    )

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_run_migrations(
        job,
        application_user,
        application_name,
        application_path,
        constants.DB_MIGRATION_USER,
        db_migration_pass,
        sub_application_name
    )

    if duration_threshold:
        tasks.generate_check_migration_duration(
            job,
            application_name,
            constants.MIGRATION_RESULT_FILENAME,
            duration_threshold,
            from_address,
            to_addresses
        )

    return stage
Esempio n. 14
0
def generate_create_ami_from_instance(pipeline,
                                      edp,
                                      app_repo,
                                      aws_access_key_id,
                                      aws_secret_access_key,
                                      ami_creation_timeout=3600,
                                      ami_wait='yes',
                                      cache_id='',
                                      artifact_path=constants.ARTIFACT_PATH,
                                      slack_token='',
                                      slack_room=constants.SLACK_ROOM,
                                      manual_approval=False,
                                      version_tags=None,
                                      **kwargs):
    """
    Generates an artifact ami.yml:
        ami_id: ami-abcdefg
        ami_message: AMI creation operation complete
        ami_state: available

    Args:
        pipeline (gomatic.Pipeline):
        edp (EDP):
        app_repo (str):
        configuration_secure_repo (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        configuration_repo (str):
        ami_creation_timeout (str):
        ami_wait (str):
        cache_id (str):
        artifact_path (str):
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.
        manual_approval (bool):
        version_tags (dict): An optional {app_name: (repo, version), ...} dict that
            specifies what versions to tag the AMI with.
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.BUILD_AMI_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    launch_info_artifact = ArtifactLocation(
        pipeline.name,
        constants.LAUNCH_INSTANCE_STAGE_NAME,
        constants.LAUNCH_INSTANCE_JOB_NAME,
        constants.LAUNCH_INSTANCE_FILENAME,
    )

    tasks.retrieve_artifact(launch_info_artifact, job)

    # Create an AMI from the instance
    tasks.generate_create_ami(
        job=job,
        play=edp.play,
        deployment=edp.deployment,
        edx_environment=edp.environment,
        app_repo=app_repo,
        launch_info_path='{}/{}'.format(constants.ARTIFACT_PATH, constants.LAUNCH_INSTANCE_FILENAME),
        aws_access_key_id=aws_access_key_id,
        aws_secret_access_key=aws_secret_access_key,
        ami_creation_timeout=ami_creation_timeout,
        ami_wait=ami_wait,
        cache_id=cache_id,
        artifact_path=artifact_path,
        slack_token=slack_token,
        slack_room=slack_room,
        version_tags=version_tags,
        **kwargs)

    return stage
Esempio n. 15
0
def generate_run_play(pipeline,
                      playbook_with_path,
                      edp,
                      app_repo,
                      slack_token='',
                      slack_room=constants.SLACK_ROOM,
                      manual_approval=False,
                      configuration_secure_dir=constants.PRIVATE_CONFIGURATION_LOCAL_DIR,
                      configuration_internal_dir=constants.INTERNAL_CONFIGURATION_LOCAL_DIR,
                      override_artifacts=None,
                      timeout=None,
                      **kwargs):
    """
    TODO: This currently runs from the configuration/playbooks/continuous_delivery/ directory. Need to figure out how to
    pass in a configuration file to ansible-play correctly. TE-1608

    Assumes:
        - generate_launch_instance stage was used launch the instance preceding this stage.
        - Requires the ansible_inventory and key.pem files to be in the constants.ARTIFACT_DIRECTORY path
        - Play is run from the constants.PUBLIC_CONFIGURATION_DIR
        - Play is run using the constants.ANSIBLE_CONFIG configuration file

    Args:
        pipeline (gomatic.Pipeline):
        playbook_with_path (str):
        app_repo (str) :
        slack_token (str): Token used to authenticate to Slack.
        slack_room (str): Slack room to which to post notifications.
        manual_approval (bool):
        configuration_secure_dir (str): The secure config directory to use for this play.
        configuration_internal_dir (str): The internal config directory to use for this play.
        override_artifacts (bool):
        timeout (int): GoCD job level inactivity timeout setting.
        **kwargs (dict):
            k,v pairs:
                k: the name of the option to pass to ansible
                v: the value to use for this option

    Returns:
        gomatic.Stage
    """
    stage = pipeline.ensure_stage(constants.RUN_PLAY_STAGE_NAME)
    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.RUN_PLAY_JOB_NAME)
    if timeout:
        job.timeout = str(timeout)

    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    for file_name in (
            constants.KEY_PEM_FILENAME,
            constants.LAUNCH_INSTANCE_FILENAME,
            constants.ANSIBLE_INVENTORY_FILENAME
    ):
        tasks.retrieve_artifact(
            ArtifactLocation(
                pipeline.name,
                constants.LAUNCH_INSTANCE_STAGE_NAME,
                constants.LAUNCH_INSTANCE_JOB_NAME,
                file_name,
            ),
            job,
            constants.ARTIFACT_PATH
        )

    override_files = []
    if not override_artifacts:
        override_artifacts = []

    for artifact in override_artifacts:
        tasks.retrieve_artifact(artifact, job, constants.ARTIFACT_PATH)
        override_files.append('{}/{}'.format(constants.ARTIFACT_PATH, artifact.file_name))

    tasks.generate_run_app_playbook(
        job=job,
        playbook_with_path=playbook_with_path,
        edp=edp,
        app_repo=app_repo,
        slack_token=slack_token,
        slack_room=slack_room,
        configuration_secure_dir=configuration_secure_dir,
        configuration_internal_dir=configuration_internal_dir,
        override_files=override_files,
        **kwargs)
    return stage
Esempio n. 16
0
def install_pipeline(save_config_locally, dry_run, variable_files, cmd_line_vars):
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'], config['gocd_username'], config['gocd_password'], ssl=True))

    pipeline = configurator \
        .ensure_pipeline_group(DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline(DEPLOY_MARKETING_PIPELINE_NAME) \
        .set_git_material(GitMaterial('https://github.com/edx/tubular',
                                      polling=True,
                                      destination_directory='tubular',
                                      ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                      )
                          )

    pipeline.ensure_environment_variables(
        {
            'MARKETING_REPOSITORY_VERSION': config['mktg_repository_version'],
        }
    )

    pipeline.ensure_encrypted_environment_variables(
        {
            'PRIVATE_GITHUB_KEY': config['github_private_key'],
            'PRIVATE_MARKETING_REPOSITORY_URL': config['mktg_repository_url'],
            'PRIVATE_ACQUIA_REMOTE': config['acquia_remote_url'],
            'PRIVATE_ACQUIA_USERNAME': config['acquia_username'],
            'PRIVATE_ACQUIA_PASSWORD': config['acquia_password'],
            'PRIVATE_ACQUIA_GITHUB_KEY': config['acquia_github_key']
        }
    )

    # Stage to fetch the current tag names from stage and prod
    fetch_tag_stage = pipeline.ensure_stage(FETCH_TAG_STAGE_NAME)
    fetch_tag_stage.set_has_manual_approval()
    fetch_tag_job = fetch_tag_stage.ensure_job(FETCH_TAG_JOB_NAME)
    tasks.generate_requirements_install(fetch_tag_job, 'tubular')
    tasks.generate_target_directory(fetch_tag_job)
    path_name = '../target/{env}_tag_name.txt'
    tasks.generate_fetch_tag(fetch_tag_job, STAGE_ENV, path_name)
    tasks.generate_fetch_tag(fetch_tag_job, PROD_ENV, path_name)

    fetch_tag_job.ensure_artifacts(
        set([BuildArtifact('target/{stage_tag}.txt'.format(stage_tag=STAGE_TAG_NAME)),
             BuildArtifact('target/{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME))])
    )

    # Stage to create and push a tag to Acquia.
    push_to_acquia_stage = pipeline.ensure_stage(PUSH_TO_ACQUIA_STAGE_NAME)
    push_to_acquia_job = push_to_acquia_stage.ensure_job(PUSH_TO_ACQUIA_JOB_NAME)
    # Ensures the tag name is accessible in future jobs.
    push_to_acquia_job.ensure_artifacts(
        set([BuildArtifact('target/{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))])
    )

    tasks.generate_requirements_install(push_to_acquia_job, 'tubular')
    tasks.generate_target_directory(push_to_acquia_job)
    tasks.fetch_edx_mktg(push_to_acquia_job, 'edx-mktg')

    # Create a tag from MARKETING_REPOSITORY_VERSION branch of marketing repo
    push_to_acquia_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                # Writing dates to a file should help with any issues dealing with a job
                # taking place over two days (23:59:59 -> 00:00:00). Only the day can be
                # affected since we don't use minutes or seconds.
                # NOTE: Uses UTC
                'echo -n "release-$(date +%Y-%m-%d-%H.%M)" > ../target/{new_tag}.txt && '
                'TAG_NAME=$(cat ../target/{new_tag}.txt) && '
                '/usr/bin/git config user.email "*****@*****.**" && '
                '/usr/bin/git config user.name "edx-secure" && '
                '/usr/bin/git tag -a $TAG_NAME -m "Release for $(date +%B\ %d,\ %Y). Created by $GO_TRIGGER_USER." && '
                'GIT_SSH_COMMAND="/usr/bin/ssh -o StrictHostKeyChecking=no -i ../github_key.pem" '
                '/usr/bin/git push origin $TAG_NAME'.format(new_tag=NEW_TAG_NAME)
            ],
            working_dir='edx-mktg'
        )
    )

    # Set up Acquia Github key for use in pushing tag to Acquia
    tasks.format_RSA_key(push_to_acquia_job, 'acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')

    # Set up Acquia remote repo and push tag to Acquia. Change new tag file to contain "tags/" for deployment.
    push_to_acquia_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                '/usr/bin/git remote add acquia $PRIVATE_ACQUIA_REMOTE && '
                'GIT_SSH_COMMAND="/usr/bin/ssh -o StrictHostKeyChecking=no -i ../acquia_github_key.pem" '
                '/usr/bin/git push acquia $(cat ../target/{new_tag}.txt) && '
                'echo -n "tags/" | cat - ../target/{new_tag}.txt > temp && mv temp ../target/{new_tag}.txt'.format(new_tag=NEW_TAG_NAME)
            ],
            working_dir='edx-mktg'
        )
    )

    # Stage to backup database in stage
    backup_stage_database_stage = pipeline.ensure_stage(BACKUP_STAGE_DATABASE_STAGE_NAME)
    backup_stage_database_job = backup_stage_database_stage.ensure_job(BACKUP_STAGE_DATABASE_JOB_NAME)

    tasks.generate_requirements_install(backup_stage_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_stage_database_job, STAGE_ENV)

    # Stage to deploy to stage
    deploy_stage_for_stage = pipeline.ensure_stage(DEPLOY_STAGE_STAGE_NAME)
    deploy_job_for_stage = deploy_stage_for_stage.ensure_job(DEPLOY_STAGE_JOB_NAME)

    tasks.generate_requirements_install(deploy_job_for_stage, 'tubular')
    tasks.generate_target_directory(deploy_job_for_stage)

    # fetch the tag name
    new_tag_name_artifact_params = {
        'pipeline': DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': PUSH_TO_ACQUIA_STAGE_NAME,
        'job': PUSH_TO_ACQUIA_JOB_NAME,
        'src': FetchArtifactFile('{new_tag}.txt'.format(new_tag=NEW_TAG_NAME)),
        'dest': 'target'
    }
    deploy_job_for_stage.add_task(FetchArtifactTask(**new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(deploy_job_for_stage, STAGE_ENV, '{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))

    # Stage to clear caches in stage
    clear_stage_caches_stage = pipeline.ensure_stage(CLEAR_STAGE_CACHES_STAGE_NAME)
    clear_stage_caches_job = clear_stage_caches_stage.ensure_job(CLEAR_STAGE_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_stage_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_stage_caches_job, 'tubular')
    tasks.format_RSA_key(clear_stage_caches_job, 'edx-mktg/docroot/acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_stage_caches_job, STAGE_ENV)
    tasks.generate_clear_varnish_cache(clear_stage_caches_job, STAGE_ENV)

    # Stage to backup database in prod
    backup_prod_database_stage = pipeline.ensure_stage(BACKUP_PROD_DATABASE_STAGE_NAME)
    backup_prod_database_stage.set_has_manual_approval()
    backup_prod_database_job = backup_prod_database_stage.ensure_job(BACKUP_PROD_DATABASE_JOB_NAME)

    tasks.generate_requirements_install(backup_prod_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_prod_database_job, PROD_ENV)

    # Stage to deploy to prod
    deploy_stage_for_prod = pipeline.ensure_stage(DEPLOY_PROD_STAGE_NAME)
    deploy_job_for_prod = deploy_stage_for_prod.ensure_job(DEPLOY_PROD_JOB_NAME)

    tasks.generate_requirements_install(deploy_job_for_prod, 'tubular')
    tasks.generate_target_directory(deploy_job_for_prod)
    deploy_job_for_prod.add_task(FetchArtifactTask(**new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(deploy_job_for_prod, PROD_ENV, '{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))

    # Stage to clear caches in prod
    clear_prod_caches_stage = pipeline.ensure_stage(CLEAR_PROD_CACHES_STAGE_NAME)
    clear_prod_caches_job = clear_prod_caches_stage.ensure_job(CLEAR_PROD_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_prod_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_prod_caches_job, 'tubular')
    tasks.format_RSA_key(clear_prod_caches_job, 'edx-mktg/docroot/acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_prod_caches_job, PROD_ENV)
    tasks.generate_clear_varnish_cache(clear_prod_caches_job, PROD_ENV)

    configurator.save_updated_config(save_config_locally=save_config_locally, dry_run=dry_run)
Esempio n. 17
0
def generate_rollback_migrations(
        stage,
        edp,
        application_user,
        application_name,
        application_path,
        db_migration_user,
        db_migration_pass,
        migration_info_location,
        inventory_location=None,
        instance_key_location=None,
        ami_artifact_location=None,
        config=None,
        sub_application_name=None,
        additional_migrations=None,
):
    """
    Generates a job for rolling back database migrations.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage this job will be part of
        edp (EDP): EDP that this job will roll back
        migration_info_location (edxpipelines.utils.ArtifactLocation): Location of
            the migration output to roll back
        inventory_location (edxpipelines.utils.ArtifactLocation): Location of the
            ansible inventory
        instance_key_location (edxpipelines.utils.ArtifactLocation): Location of
            the key used to ssh in to the instance
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): AMI to use when
            launching instance used to roll back migrations.
        config (dict): Environment-specific secure config.
        sub_application_name (str): additional command to be passed to the migrate app {cms|lms}
        additional_migrations (list[edxpipelines.utils.MigrationAppInfo]): Additional applications to migrate.
            Will only run if has_migrations=True

    Returns:
        gomatic.gocd.pipelines.Job
    """
    if not additional_migrations:
        additional_migrations = []

    job_name = constants.ROLLBACK_MIGRATIONS_JOB_NAME_TPL(edp)

    if sub_application_name is not None:
        job_name += '_{}'.format(sub_application_name)

    job = stage.ensure_job(job_name)

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    is_instance_launch_required = ami_artifact_location and config

    if is_instance_launch_required:
        # Retrieve the AMI ID from the upstream build stage.
        tasks.retrieve_artifact(ami_artifact_location, job)
        variable_override_path = path_to_artifact(ami_artifact_location.file_name)

        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )
    else:
        # The instance was launched elsewhere. Fetch the Ansible inventory to
        # use in reaching the EC2 instance.
        tasks.retrieve_artifact(inventory_location, job)

        # Fetch the SSH key to use in reaching the EC2 instance.
        tasks.retrieve_artifact(instance_key_location, job)

    # SSH key used to access the instance needs specific permissions.
    job.ensure_task(tasks.bash_task(
        'chmod 600 {key_pem_path}',
        key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
    ))

    # Fetch the migration output.
    tasks.retrieve_artifact(migration_info_location, job)

    tasks.generate_migration_rollback(
        job=job,
        application_user=application_user,
        application_name=application_name,
        application_path=application_path,
        db_migration_user=db_migration_user,
        db_migration_pass=db_migration_pass,
        sub_application_name=sub_application_name,
    )

    for migration_info in additional_migrations:
        tasks.generate_migration_rollback(
            job=job,
            application_user=migration_info.name,
            application_name=migration_info.name,
            application_path=migration_info.path,
            db_migration_user=db_migration_user,
            db_migration_pass=db_migration_pass,
            sub_application_name=migration_info.sub_application_name,
        )

    # If an instance was launched as part of this job, clean it up.
    if is_instance_launch_required:
        tasks.generate_ami_cleanup(job, config['slack_token'], runif='any')

    return job
Esempio n. 18
0
def generate_build_ami(stage,
                       edp,
                       app_repo_url,
                       configuration_secure_material,
                       configuration_internal_material,
                       playbook_path,
                       config,
                       version_tags=None,
                       **kwargs):
    """
    Generates a job for creating a new AMI.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            for which an AMI will be created.
        app_repo_url (str): App repo's URL.
        configuration_secure_material (gomatic.gomatic.gocd.materials.GitMaterial): Secure
            configuration material. Destination directory expected to be 'configuration-secure'.
        configuration_internal_material (gomatic.gomatic.gocd.materials.GitMaterial): Internal
            configuration material. Destination directory expected to be 'configuration-internal'.
        playbook_path (str): Path to the Ansible playbook to run when creating the AMI.
        config (dict): Environment-specific secure config.
        version_tags (dict): An optional {app_name: (repo, version), ...} dict that
            specifies what versions to tag the AMI with.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Locate the base AMI.
    tasks.generate_base_ami_selection(
        job,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        edp=edp
    )

    # Launch a new instance on which to build the AMI.
    tasks.generate_launch_instance(
        job,
        aws_access_key_id=config['aws_access_key_id'],
        aws_secret_access_key=config['aws_secret_access_key'],
        ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
        ec2_security_group_id=config['ec2_security_group_id'],
        ec2_instance_profile_name=config['ec2_instance_profile_name'],
        variable_override_path=path_to_artifact(constants.BASE_AMI_OVERRIDE_FILENAME),
    )

    tasks.generate_ensure_python2(job)

    # Run the Ansible play for the service.
    tasks.generate_run_app_playbook(
        job,
        playbook_path,
        edp,
        app_repo_url,
        slack_token=config['slack_token'],
        configuration_secure_dir=configuration_secure_material.destination_directory,
        configuration_internal_dir=configuration_internal_material.destination_directory,
        disable_edx_services='true',
        COMMON_TAG_EC2_INSTANCE='true',
        **kwargs
    )

    # Create an AMI from the instance.
    tasks.generate_create_ami(
        job,
        edp.play,
        edp.deployment,
        edp.environment,
        app_repo_url,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        path_to_artifact(constants.LAUNCH_INSTANCE_FILENAME),
        slack_token=config['slack_token'],
        version_tags=version_tags,
        **kwargs
    )

    tasks.generate_ami_cleanup(job, config['slack_token'], runif='any')

    return job
Esempio n. 19
0
def generate_run_migrations(pipeline,
                            db_migration_pass,
                            inventory_location,
                            instance_key_location,
                            launch_info_location,
                            application_user,
                            application_name,
                            application_path,
                            sub_application_name=None,
                            manual_approval=False):
    """
    Generate the stage that applies/runs migrations.

    Args:
        pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
        db_migration_pass (str): Password for the DB user used to run migrations.
        inventory_location (ArtifactLocation): Location of inventory containing the IP address of the EC2 instance, for fetching.
        instance_key_location (ArtifactLocation): Location of SSH key used to access the EC2 instance, for fetching.
        launch_info_location (ArtifactLocation): Location of the launch_info.yml file for fetching
        application_user (str): Username to use while running the migrations
        application_name (str): Name of the application (e.g. edxapp, programs, etc...)
        application_path (str): path of the application installed on the target machine
        sub_application_name (str): any sub application to insert in to the migrations commands {cms|lms}
        manual_approval (bool): Should this stage require manual approval?

    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables({
        'APPLICATION_USER':
        application_user,
        'APPLICATION_NAME':
        application_name,
        'APPLICATION_PATH':
        application_path,
        'DB_MIGRATION_USER':
        '******',
        'ARTIFACT_PATH':
        constants.ARTIFACT_PATH,
        'ANSIBLE_CONFIG':
        constants.ANSIBLE_CONTINUOUS_DELIVERY_CONFIG,
    })
    pipeline.ensure_encrypted_environment_variables({
        'DB_MIGRATION_PASS':
        db_migration_pass,
    })

    if sub_application_name is not None:
        stage_name = "{}_{}".format(constants.APPLY_MIGRATIONS_STAGE,
                                    sub_application_name)
    else:
        stage_name = constants.APPLY_MIGRATIONS_STAGE
    stage = pipeline.ensure_stage(stage_name)

    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(constants.APPLY_MIGRATIONS_JOB)

    # Fetch the Ansible inventory to use in reaching the EC2 instance.
    artifact_params = {
        "pipeline": inventory_location.pipeline,
        "stage": inventory_location.stage,
        "job": inventory_location.job,
        "src": FetchArtifactFile(inventory_location.file_name),
        "dest": constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # Fetch the SSH key to use in reaching the EC2 instance.
    artifact_params = {
        "pipeline": instance_key_location.pipeline,
        "stage": instance_key_location.stage,
        "job": instance_key_location.job,
        "src": FetchArtifactFile(instance_key_location.file_name),
        "dest": constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # ensure the target directoy exists
    tasks.generate_target_directory(job)

    # fetch the launch_info.yml
    artifact_params = {
        "pipeline": launch_info_location.pipeline,
        "stage": launch_info_location.stage,
        "job": launch_info_location.job,
        "src": FetchArtifactFile(launch_info_location.file_name),
        "dest": constants.ARTIFACT_PATH
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # The SSH key used to access the EC2 instance needs specific permissions.
    job.add_task(
        ExecTask([
            '/bin/bash', '-c', 'chmod 600 {}'.format(
                instance_key_location.file_name)
        ],
                 working_dir=constants.ARTIFACT_PATH))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_run_migrations(job, sub_application_name)

    # Cleanup EC2 instance if running the migrations failed.
    # I think this should be left for the terminate instance stage
    # tasks.generate_ami_cleanup(job, runif='failed')

    return stage
Esempio n. 20
0
def generate_launch_instance(
        pipeline,
        aws_access_key_id,
        aws_secret_access_key,
        ec2_vpc_subnet_id,
        ec2_security_group_id,
        ec2_instance_profile_name,
        base_ami_id,
        manual_approval=False,
        ec2_region=constants.EC2_REGION,
        ec2_instance_type=constants.EC2_INSTANCE_TYPE,
        ec2_timeout=constants.EC2_LAUNCH_INSTANCE_TIMEOUT,
        ec2_ebs_volume_size=constants.EC2_EBS_VOLUME_SIZE,
        base_ami_id_artifact=None
):
    """
    Pattern to launch an AMI. Generates 3 artifacts:
        key.pem             - Private key material generated for this instance launch
        launch_info.yml     - yaml file that contains information about the instance launched
        ansible_inventory   - a list of private aws IP addresses that can be fed in to ansible to run playbooks

        Please check here for further information:
        https://github.com/edx/configuration/blob/master/playbooks/continuous_delivery/launch_instance.yml

    Args:
        pipeline (gomatic.Pipeline):
        aws_access_key_id (str): AWS key ID for auth
        aws_secret_access_key (str): AWS secret key for auth
        ec2_vpc_subnet_id (str):
        ec2_security_group_id (str):
        ec2_instance_profile_name (str):
        base_ami_id (str): the ami-id used to launch the instance
        manual_approval (bool): Should this stage require manual approval?
        ec2_region (str):
        ec2_instance_type (str):
        ec2_timeout (str):
        ec2_ebs_volume_size (str):
        base_ami_id_artifact (edxpipelines.utils.ArtifactLocation): overrides the base_ami_id and will force
                                                                       the task to run with the AMI built up stream.

    Returns:

    """
    stage = pipeline.ensure_stage(constants.LAUNCH_INSTANCE_STAGE_NAME)

    if manual_approval:
        stage.set_has_manual_approval()

    # Install the requirements.
    job = stage.ensure_job(constants.LAUNCH_INSTANCE_JOB_NAME)
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_requirements_install(job, 'configuration')

    if base_ami_id_artifact:
        tasks.retrieve_artifact(base_ami_id_artifact, job, constants.ARTIFACT_PATH)

    # Create the instance-launching task.
    tasks.generate_launch_instance(
        job,
        aws_access_key_id=aws_access_key_id,
        aws_secret_access_key=aws_secret_access_key,
        ec2_vpc_subnet_id=ec2_vpc_subnet_id,
        ec2_security_group_id=ec2_security_group_id,
        ec2_instance_profile_name=ec2_instance_profile_name,
        base_ami_id=base_ami_id,
        ec2_region=ec2_region,
        ec2_instance_type=ec2_instance_type,
        ec2_timeout=ec2_timeout,
        ec2_ebs_volume_size=ec2_ebs_volume_size,
        variable_override_path='{}/{}'.format(
            constants.ARTIFACT_PATH, base_ami_id_artifact.file_name
        ) if base_ami_id_artifact else None,
    )

    tasks.generate_ensure_python2(job)

    return stage
Esempio n. 21
0
def generate_ansible_stage(stage_name,
                           task,
                           pipeline,
                           inventory_location,
                           instance_key_location,
                           launch_info_location,
                           application_user,
                           application_name,
                           application_path,
                           hipchat_auth_token,
                           hipchat_room=constants.HIPCHAT_ROOM,
                           manual_approval=False):
    """
        Generate the stage with the given name, that runs the specified task.

        Args:
            stage_name (str): Name of the generated stage.
            task (function): Task to be executed by the stage.
            pipeline (gomatic.Pipeline): Pipeline to which to add the run migrations stage.
            inventory_location (ArtifactLocation): Location of inventory containing the IP address of the EC2 instance, for fetching.
            instance_key_location (ArtifactLocation): Location of SSH key used to access the EC2 instance, for fetching.
            launch_info_location (ArtifactLocation): Location of the launch_info.yml file for fetching
            application_user (str): Username to use while running the migrations
            application_name (str): Name of the application (e.g. edxapp, programs, etc...)
            application_path (str): path of the application installed on the target machine
            hipchat_auth_token (str): HipChat authentication token
            hipchat_room (str): HipChat room where announcements should be made
            manual_approval (bool): Should this stage require manual approval?

        Returns:
            gomatic.Stage
        """

    pipeline.ensure_environment_variables({
        'APPLICATION_USER': application_user,
        'APPLICATION_NAME': application_name,
        'APPLICATION_PATH': application_path,
        'HIPCHAT_ROOM': hipchat_room,
    })
    pipeline.ensure_encrypted_environment_variables({
        'HIPCHAT_TOKEN':
        hipchat_auth_token,
    })

    stage = pipeline.ensure_stage(stage_name)

    if manual_approval:
        stage.set_has_manual_approval()
    job = stage.ensure_job(stage_name + '_job')

    # Fetch the Ansible inventory to use in reaching the EC2 instance.
    artifact_params = {
        "pipeline": inventory_location.pipeline,
        "stage": inventory_location.stage,
        "job": inventory_location.job,
        "src": FetchArtifactFile(inventory_location.file_name),
        "dest": 'configuration'
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # Fetch the SSH key to use in reaching the EC2 instance.
    artifact_params = {
        "pipeline": instance_key_location.pipeline,
        "stage": instance_key_location.stage,
        "job": instance_key_location.job,
        "src": FetchArtifactFile(instance_key_location.file_name),
        "dest": 'configuration'
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # ensure the target directoy exists
    tasks.generate_target_directory(job)

    # fetch the launch_info.yml
    artifact_params = {
        "pipeline": launch_info_location.pipeline,
        "stage": launch_info_location.stage,
        "job": launch_info_location.job,
        "src": FetchArtifactFile(launch_info_location.file_name),
        "dest": "target"
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    # The SSH key used to access the EC2 instance needs specific permissions.
    job.add_task(
        ExecTask([
            '/bin/bash', '-c', 'chmod 600 {}'.format(
                instance_key_location.file_name)
        ],
                 working_dir='configuration'))

    tasks.generate_requirements_install(job, 'configuration')
    task(job)

    return stage
Esempio n. 22
0
def generate_build_ami(stage,
                       edp,
                       app_repo_url,
                       configuration_secure_material,
                       configuration_internal_material,
                       playbook_path,
                       config,
                       version_tags=None,
                       **kwargs):
    """
    Generates a job for creating a new AMI.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            for which an AMI will be created.
        app_repo_url (str): App repo's URL.
        configuration_secure_material (gomatic.gomatic.gocd.materials.GitMaterial): Secure
            configuration material. Destination directory expected to be 'configuration-secure'.
        configuration_internal_material (gomatic.gomatic.gocd.materials.GitMaterial): Internal
            configuration material. Destination directory expected to be 'configuration-internal'.
        playbook_path (str): Path to the Ansible playbook to run when creating the AMI.
        config (dict): Environment-specific secure config.
        version_tags (dict): An optional {app_name: (repo, version), ...} dict that
            specifies what versions to tag the AMI with.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.BUILD_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Locate the base AMI.
    tasks.generate_base_ami_selection(
        job,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        edp=edp
    )

    # Launch a new instance on which to build the AMI.
    tasks.generate_launch_instance(
        job,
        aws_access_key_id=config['aws_access_key_id'],
        aws_secret_access_key=config['aws_secret_access_key'],
        ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
        ec2_security_group_id=config['ec2_security_group_id'],
        ec2_instance_profile_name=config['ec2_instance_profile_name'],
        variable_override_path=path_to_artifact(constants.BASE_AMI_OVERRIDE_FILENAME),
    )

    # Run the Ansible play for the service.
    tasks.generate_run_app_playbook(
        job,
        playbook_path,
        edp,
        app_repo_url,
        private_github_key=config['github_private_key'],
        hipchat_token=config['hipchat_token'],
        configuration_secure_dir=configuration_secure_material.destination_directory,
        configuration_internal_dir=configuration_internal_material.destination_directory,
        disable_edx_services='true',
        COMMON_TAG_EC2_INSTANCE='true',
        **kwargs
    )

    # Create an AMI from the instance.
    tasks.generate_create_ami(
        job,
        edp.play,
        edp.deployment,
        edp.environment,
        app_repo_url,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        path_to_artifact(constants.LAUNCH_INSTANCE_FILENAME),
        hipchat_token=config['hipchat_token'],
        version_tags=version_tags,
        **kwargs
    )

    tasks.generate_ami_cleanup(job, config['hipchat_token'], runif='any')

    return job
Esempio n. 23
0
def generate_rollback_asg_stage(pipeline, asgard_api_endpoints, asgard_token,
                                aws_access_key_id, aws_secret_access_key,
                                hipchat_auth_token, hipchat_room,
                                deploy_file_location):
    """
    Generates a stage which performs rollback to a previous ASG (or ASGs) via Asgard.
    If the previous ASG (or ASGs) fail health checks for some reason, a new ASGs with
    the provided AMI ID is created and used as the rollback ASG(s).
    This stage *always* requires manual approval.

    Args:
        pipeline (gomatic.Pipeline):
        asgard_api_endpoints (str): canonical URL for asgard.
        asgard_token (str):
        aws_access_key_id (str):
        aws_secret_access_key (str):
        deploy_file_location (ArtifactLocation): The location of YAML artifact from the previous deploy
            that has the previous ASG info along with `ami_id`, for rollback/re-deploy respectively.
    Returns:
        gomatic.Stage
    """
    pipeline.ensure_environment_variables({
        'ASGARD_API_ENDPOINTS': asgard_api_endpoints,
        'HIPCHAT_ROOM': hipchat_room,
    })
    pipeline.ensure_encrypted_environment_variables({
        'ASGARD_API_TOKEN':
        asgard_token,
        'AWS_ACCESS_KEY_ID':
        aws_access_key_id,
        'AWS_SECRET_ACCESS_KEY':
        aws_secret_access_key,
        'HIPCHAT_TOKEN':
        hipchat_auth_token,
    })

    stage = pipeline.ensure_stage(constants.ROLLBACK_ASGS_STAGE_NAME)
    # Important: Do *not* automatically rollback! Always manual...
    stage.set_has_manual_approval()
    job = stage.ensure_job(constants.ROLLBACK_ASGS_JOB_NAME)
    tasks.generate_requirements_install(job, 'tubular')

    artifact_params = {
        "pipeline": deploy_file_location.pipeline,
        "stage": deploy_file_location.stage,
        "job": deploy_file_location.job,
        "src": FetchArtifactFile(deploy_file_location.file_name),
        "dest": 'tubular'
    }
    job.add_task(FetchArtifactTask(**artifact_params))

    job.add_task(
        ExecTask([
            '/bin/bash',
            '-c',
            'mkdir -p ../target',
        ],
                 working_dir="tubular"))

    artifact_path = '{}/{}'.format(constants.ARTIFACT_PATH,
                                   constants.ROLLBACK_AMI_OUT_FILENAME)
    job.ensure_artifacts(set([BuildArtifact(artifact_path)]))

    job.add_task(
        ExecTask([
            '/usr/bin/python',
            'scripts/rollback_asg.py',
            '--config_file',
            deploy_file_location.file_name,
            '--out_file',
            '../{}'.format(artifact_path),
        ],
                 working_dir="tubular"))
    return stage
Esempio n. 24
0
def generate_rollback_migrations(
        stage,
        edp,
        application_user,
        application_name,
        application_path,
        db_migration_user,
        db_migration_pass,
        migration_info_location,
        inventory_location=None,
        instance_key_location=None,
        ami_artifact_location=None,
        config=None,
        sub_application_name=None
):
    """
    Generates a job for rolling back database migrations.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage this job will be part of
        edp (EDP): EDP that this job will roll back
        migration_info_location (edxpipelines.utils.ArtifactLocation): Location of
            the migration output to roll back
        inventory_location (edxpipelines.utils.ArtifactLocation): Location of the
            ansible inventory
        instance_key_location (edxpipelines.utils.ArtifactLocation): Location of
            the key used to ssh in to the instance
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): AMI to use when
            launching instance used to roll back migrations.
        config (dict): Environment-specific secure config.
        sub_application_name (str): additional command to be passed to the migrate app {cms|lms}

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job_name = constants.ROLLBACK_MIGRATIONS_JOB_NAME_TPL(edp)

    if sub_application_name is not None:
        job_name += '_{}'.format(sub_application_name)

    job = stage.ensure_job(job_name)

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_target_directory(job)

    is_instance_launch_required = ami_artifact_location and config

    if is_instance_launch_required:
        # Retrieve the AMI ID from the upstream build stage.
        tasks.retrieve_artifact(ami_artifact_location, job)
        variable_override_path = path_to_artifact(ami_artifact_location.file_name)

        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )
    else:
        # The instance was launched elsewhere. Fetch the Ansible inventory to
        # use in reaching the EC2 instance.
        tasks.retrieve_artifact(inventory_location, job)

        # Fetch the SSH key to use in reaching the EC2 instance.
        tasks.retrieve_artifact(instance_key_location, job)

    # SSH key used to access the instance needs specific permissions.
    job.ensure_task(tasks.bash_task(
        'chmod 600 {key_pem_path}',
        key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
    ))

    # Fetch the migration output.
    tasks.retrieve_artifact(migration_info_location, job)

    tasks.generate_migration_rollback(
        job=job,
        application_user=application_user,
        application_name=application_name,
        application_path=application_path,
        db_migration_user=db_migration_user,
        db_migration_pass=db_migration_pass,
        sub_application_name=sub_application_name,
    )

    # If an instance was launched as part of this job, clean it up.
    if is_instance_launch_required:
        tasks.generate_ami_cleanup(job, config['hipchat_token'], runif='any')

    return job
Esempio n. 25
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))

    pipeline = configurator \
        .ensure_pipeline_group(DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline('rollback-prod-marketing-site') \
        .set_git_material(GitMaterial('https://github.com/edx/tubular',
                                      polling=False,
                                      destination_directory='tubular',
                                      ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                      )
                          ) \
        .ensure_material(PipelineMaterial(DEPLOY_MARKETING_PIPELINE_NAME, FETCH_TAG_STAGE_NAME))

    pipeline.ensure_environment_variables({
        'MARKETING_REPOSITORY_VERSION':
        config['mktg_repository_version'],
    })

    pipeline.ensure_encrypted_environment_variables({
        'PRIVATE_GITHUB_KEY':
        config['github_private_key'],
        'PRIVATE_MARKETING_REPOSITORY_URL':
        config['mktg_repository_url'],
        'PRIVATE_ACQUIA_USERNAME':
        config['acquia_username'],
        'PRIVATE_ACQUIA_PASSWORD':
        config['acquia_password'],
        'PRIVATE_ACQUIA_GITHUB_KEY':
        config['acquia_github_key'],
    })

    prod_tag_name_artifact_params = {
        'pipeline': DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': FETCH_TAG_STAGE_NAME,
        'job': FETCH_TAG_JOB_NAME,
        'src':
        FetchArtifactFile('{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME)),
        'dest': 'target'
    }

    # Stage to rollback stage to its last stable tag
    rollback_stage = pipeline.ensure_stage(ROLLBACK_STAGE_NAME)
    rollback_stage.set_has_manual_approval()
    rollback_job = rollback_stage.ensure_job(ROLLBACK_JOB_NAME)

    tasks.generate_requirements_install(rollback_job, 'tubular')
    tasks.generate_target_directory(rollback_job)
    rollback_job.add_task(FetchArtifactTask(**prod_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        rollback_job, PROD_ENV,
        '{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME))

    # Stage to clear caches in extra
    clear_prod_caches_stage = pipeline.ensure_stage(
        CLEAR_PROD_CACHES_STAGE_NAME)
    clear_prod_caches_job = clear_prod_caches_stage.ensure_job(
        CLEAR_PROD_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_prod_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_prod_caches_job, 'tubular')
    tasks.format_RSA_key(clear_prod_caches_job,
                         'edx-mktg/docroot/acquia_github_key.pem',
                         '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_prod_caches_job, PROD_ENV)
    tasks.generate_clear_varnish_cache(clear_prod_caches_job, PROD_ENV)

    configurator.save_updated_config(save_config_locally=save_config_locally,
                                     dry_run=dry_run)
Esempio n. 26
0
def generate_deploy_ami(stage, ami_artifact_location, edp, config, has_migrations=True, application_user=None):
    """
    Generates a job for deploying an AMI. Migrations are applied as part of this job.

    Args:
        stage (gomatic.gocd.pipelines.Stage): Stage to which this job belongs.
        ami_artifact_location (edxpipelines.utils.ArtifactLocation): Where to find
            the AMI artifact to deploy.
        edp (edxpipelines.utils.EDP): Tuple indicating environment, deployment, and play
            to which the AMI belongs.
        config (dict): Environment-specific secure config.
        has_migrations (bool): Whether to generate Gomatic for applying migrations.
        application_user (str): application user if different from the play name.

    Returns:
        gomatic.gocd.pipelines.Job
    """
    job = stage.ensure_job(constants.DEPLOY_AMI_JOB_NAME_TPL(edp))

    tasks.generate_requirements_install(job, 'configuration')
    tasks.generate_package_install(job, 'tubular')
    tasks.generate_target_directory(job)

    # Retrieve the AMI ID from the upstream build stage.
    tasks.retrieve_artifact(ami_artifact_location, job)
    variable_override_path = path_to_artifact(ami_artifact_location.file_name)

    if has_migrations:
        tasks.generate_launch_instance(
            job,
            aws_access_key_id=config['aws_access_key_id'],
            aws_secret_access_key=config['aws_secret_access_key'],
            ec2_vpc_subnet_id=config['ec2_vpc_subnet_id'],
            ec2_security_group_id=config['ec2_security_group_id'],
            ec2_instance_profile_name=config['ec2_instance_profile_name'],
            variable_override_path=variable_override_path,
        )

        # SSH key used to access the instance needs specific permissions.
        job.ensure_task(tasks.bash_task(
            'chmod 600 {key_pem_path}',
            key_pem_path=path_to_artifact(constants.KEY_PEM_FILENAME)
        ))

        if application_user is None:
            application_user = edp.play

        tasks.generate_run_migrations(
            job,
            application_user=application_user,
            application_name=application_user,
            application_path='/edx/app/{}'.format(application_user),
            db_migration_user=constants.DB_MIGRATION_USER,
            db_migration_pass=config['db_migration_pass'],
        )

        tasks.generate_ami_cleanup(job, config['hipchat_token'], runif='any')

    tasks.generate_deploy_ami(
        job,
        variable_override_path,
        config['asgard_api_endpoints'],
        config['asgard_token'],
    )

    return job