def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    materials: A list of dictionaries of the materials used in this pipeline
    upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline = gcc.ensure_pipeline_group(config['pipeline_group'])\
                  .ensure_replacement_of_pipeline(config['pipeline_name'])

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=material['ignore_patterns']))

    for material in config['upstream_pipelines']:
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    # What this accomplishes:
    # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic
    # the git materials will be carried over from the first pipeline.
    #
    # The second stage in this pipeline requires manual approval.
    #
    # This allows the overall workflow to remain paused while manual verification is completed and allows the git
    # materials to stay pinned.
    #
    # Once the second phase is approved, the workflow will continue and pipelines downstream will continue to execute
    # with the same pinned materials from the upstream pipeline.
    stages.generate_armed_stage(pipeline,
                                constants.INITIAL_VERIFICATION_STAGE_NAME)

    manual_verification_stage = pipeline.ensure_stage(
        constants.MANUAL_VERIFICATION_STAGE_NAME)
    manual_verification_stage.set_has_manual_approval()
    manual_verification_job = manual_verification_stage.ensure_job(
        constants.MANUAL_VERIFICATION_JOB_NAME)
    manual_verification_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER'
        ], ))

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)
Exemple #2
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - configuration_internal_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    pipelines.generate_basic_multistage_pipeline(
        play='insights',
        playbook_path='playbooks/edx-east/insights.yml',
        app_repo='https://github.com/edx/edx-analytics-dashboard.git',
        service_name='insights',
        hipchat_room='Analytics',
        pipeline_group='Analytics',
        config=config,
        dry_run=dry_run,
        save_config_locally=save_config_locally,
        app_version='$GO_REVISION_INSIGHTS',
        INSIGHTS_VERSION='$GO_REVISION_INSIGHTS')
Exemple #3
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - configuration_internal_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    version_env_var = '$GO_REVISION_ECOMMERCE'
    pipelines.generate_basic_multistage_pipeline(
        play='ecommerce',
        pipeline_group='E-Commerce',
        playbook_path='playbooks/edx-east/ecommerce.yml',
        app_repo='https://github.com/edx/ecommerce.git',
        service_name='ecommerce',
        hipchat_room='release',
        config=config,
        save_config_locally=save_config_locally,
        dry_run=dry_run,
        app_version=version_env_var,
        ECOMMERCE_VERSION=version_env_var)
Exemple #4
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - pipeline_group
    - pipeline_name
    - asgard_api_endpoints
    - asgard_token
    - aws_access_key_id
    - aws_secret_access_key
    - cron_timer
    """

    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))

    pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\
                           .ensure_replacement_of_pipeline(config['pipeline_name'])\
                           .set_timer(config['cron_timer'])\
                           .set_git_material(GitMaterial("https://github.com/edx/tubular.git",
                                                         polling=True,
                                                         destination_directory="tubular",
                                                         ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                                         )
                                             )

    stages.generate_asg_cleanup(pipeline, config['asgard_api_endpoints'],
                                config['asgard_token'],
                                config['aws_access_key_id'],
                                config['aws_secret_access_key'])
    configurator.save_updated_config(save_config_locally=save_config_locally,
                                     dry_run=dry_run)
Exemple #5
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - pipeline_name
    - pipeline_group
    - asgard_api_endpoints
    - asgard_token
    - aws_access_key_id
    - aws_secret_access_key

    To run this script:
    python edxpipelines/pipelines/deploy_ami.py --variable_file ../gocd-pipelines/gocd/vars/tools/deploy_edge_ami.yml --variable_file ../gocd-pipelines/gocd/vars/tools/tools.yml
    python edxpipelines/pipelines/deploy_ami.py --variable_file ../gocd-pipelines/gocd/vars/tools/deploy_edx_ami.yml --variable_file ../gocd-pipelines/gocd/vars/tools/tools.yml
    python edxpipelines/pipelines/deploy_ami.py --variable_file ../gocd-pipelines/gocd/vars/tools/deploy-mckinsey-ami.yml --variable_file ../gocd-pipelines/gocd/vars/tools/tools.yml
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))
    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline_params = {
        "pipeline_name": config['pipeline_name'],
        "pipeline_group": config['pipeline_group'],
        "asgard_api_endpoints": config['asgard_api_endpoints'],
        "asgard_token": config['asgard_token'],
        "aws_access_key_id": config['aws_access_key_id'],
        "aws_secret_access_key": config['aws_secret_access_key']
    }
    configurator = pipelines.generate_deploy_pipeline(configurator,
                                                      **pipeline_params)
    configurator.save_updated_config(save_config_locally=save_config_locally,
                                     dry_run=dry_run)
    print "done"
Exemple #6
0
def install_pipelines(save_config_locally, dry_run, bmd_steps, variable_files,
                      cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - configuration_internal_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id

    Optional variables:
    - configuration_secure_version
    - configuration_internal_version
    """
    BMD_STAGES = {
        'b': generate_build_stages,
        'm': generate_migrate_stages,
        'd': generate_deploy_stages
    }

    # Sort the BMD steps by the custom 'bmd' alphabet
    bmd_steps = utils.sort_bmd(bmd_steps.lower())

    # validate the caller has requested a valid pipeline configuration
    utils.validate_pipeline_permutations(bmd_steps)

    # Merge the configuration files/variables together
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    # Create the pipeline
    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline_group = config['pipeline_group']

    # Some pipelines will need to know the name of the upstream pipeline that built the AMI.
    # Determine the build pipeline name and add it to the config.
    pipeline_name, pipeline_name_build = utils.determine_pipeline_names(
        config, bmd_steps)
    if 'pipeline_name_build' in config:
        raise Exception(
            "The config 'pipeline_name_build' value exists but should only be programmatically generated!"
        )
    config['pipeline_name_build'] = pipeline_name_build

    pipeline = gcc.ensure_pipeline_group(pipeline_group)\
                  .ensure_replacement_of_pipeline(pipeline_name)

    # Setup the materials
    # Example materials yaml
    # materials:
    #   - url: "https://github.com/edx/tubular"
    #     branch: "master"
    #     material_name: "tubular"
    #     polling: "True"
    #     destination_directory: "tubular"
    #     ignore_patterns:
    #     - '**/*'
    for material in config.get('materials', []):
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=material['ignore_patterns']))

    # Setup the upstream pipeline materials
    for material in config.get('upstream_pipelines', []):
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    # We always need to launch the AMI, independent deploys are done with a different pipeline
    launch_stage = stages.generate_launch_instance(
        pipeline,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        config['ec2_vpc_subnet_id'],
        config['ec2_security_group_id'],
        config['ec2_instance_profile_name'],
        config['base_ami_id'],
        manual_approval=not config.get('auto_run', False))

    # Generate all the requested stages
    for phase in bmd_steps:
        BMD_STAGES[phase](pipeline, config)

    # Add the cleanup stage
    generate_cleanup_stages(pipeline, config)

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)
Exemple #7
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))

    pipeline = configurator \
        .ensure_pipeline_group(DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline('rollback-prod-marketing-site') \
        .set_git_material(GitMaterial('https://github.com/edx/tubular',
                                      polling=False,
                                      destination_directory='tubular',
                                      ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                      )
                          ) \
        .ensure_material(PipelineMaterial(DEPLOY_MARKETING_PIPELINE_NAME, FETCH_TAG_STAGE_NAME))

    pipeline.ensure_environment_variables({
        'MARKETING_REPOSITORY_VERSION':
        config['mktg_repository_version'],
    })

    pipeline.ensure_encrypted_environment_variables({
        'PRIVATE_GITHUB_KEY':
        config['github_private_key'],
        'PRIVATE_MARKETING_REPOSITORY_URL':
        config['mktg_repository_url'],
        'PRIVATE_ACQUIA_USERNAME':
        config['acquia_username'],
        'PRIVATE_ACQUIA_PASSWORD':
        config['acquia_password'],
        'PRIVATE_ACQUIA_GITHUB_KEY':
        config['acquia_github_key'],
    })

    prod_tag_name_artifact_params = {
        'pipeline': DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': FETCH_TAG_STAGE_NAME,
        'job': FETCH_TAG_JOB_NAME,
        'src':
        FetchArtifactFile('{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME)),
        'dest': 'target'
    }

    # Stage to rollback stage to its last stable tag
    rollback_stage = pipeline.ensure_stage(ROLLBACK_STAGE_NAME)
    rollback_stage.set_has_manual_approval()
    rollback_job = rollback_stage.ensure_job(ROLLBACK_JOB_NAME)

    tasks.generate_requirements_install(rollback_job, 'tubular')
    tasks.generate_target_directory(rollback_job)
    rollback_job.add_task(FetchArtifactTask(**prod_tag_name_artifact_params))
    tasks.generate_drupal_deploy(
        rollback_job, PROD_ENV,
        '{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME))

    # Stage to clear caches in extra
    clear_prod_caches_stage = pipeline.ensure_stage(
        CLEAR_PROD_CACHES_STAGE_NAME)
    clear_prod_caches_job = clear_prod_caches_stage.ensure_job(
        CLEAR_PROD_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_prod_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_prod_caches_job, 'tubular')
    tasks.format_RSA_key(clear_prod_caches_job,
                         'edx-mktg/docroot/acquia_github_key.pem',
                         '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_prod_caches_job, PROD_ENV)
    tasks.generate_clear_varnish_cache(clear_prod_caches_job, PROD_ENV)

    configurator.save_updated_config(save_config_locally=save_config_locally,
                                     dry_run=dry_run)
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))
    artifact_path = 'target/'

    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline = gcc.ensure_pipeline_group(config['pipeline_group'])\
                  .ensure_replacement_of_pipeline(config['pipeline_name'])\
                  .ensure_material(GitMaterial('https://github.com/edx/edx-gomatic',
                                               material_name='edx-gomatic',
                                               polling=True,
                                               destination_directory='edx-gomatic',
                                               branch='master'
                                               )
                                   ) \
                  .ensure_material(GitMaterial('[email protected]:edx-ops/gomatic-secure.git',
                                               material_name='gomatic-secure',
                                               polling=True,
                                               destination_directory='gomatic-secure',
                                               branch='master',
                                               ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                               )
                                   )

    pipeline.ensure_encrypted_environment_variables({
        'GOMATIC_USER':
        config['gomatic_user'],
        'GOMATIC_PASSWORD':
        config['gomatic_password']
    })

    stage = pipeline.ensure_stage('deploy_gomatic_stage')
    job = stage.ensure_job('deploy_gomatic_scripts_job')
    tasks.generate_requirements_install(job, 'edx-gomatic')

    job.add_task(
        ExecTask([
            '/usr/bin/python', 'deploy_pipelines.py', '-v', 'tools', '-f',
            'config.yml'
        ],
                 working_dir='edx-gomatic'))

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)
Exemple #9
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))

    pipeline = configurator \
        .ensure_pipeline_group(config['pipeline']['group']) \
        .ensure_replacement_of_pipeline(config['pipeline']['name']) \
        .set_label_template('${api-manager}') \
        .set_git_material(GitMaterial(config['github']['server_uri'] + '/' + config['github']['repository'],
                                      branch='#{GIT_BRANCH}',
                                      material_name='api-manager',
                                      destination_directory=API_MANAGER_WORKING_DIR)
                          ) \

    pipeline.ensure_parameters({'GIT_BRANCH': config['github']['branch']})

    pipeline.ensure_environment_variables({
        'SWAGGER_CODEGEN_JAR':
        config['swagger_codegen_jar'],
        'GITHUB_API_REPO':
        config['github']['repository'],
        'GITHUB_API_URI':
        config['github']['api_uri'],
        'GITHUB_API_POLL_WAIT_S':
        config['github']['api_poll_wait_s'],
        'GITHUB_API_POLL_RETRIES':
        config['github']['api_poll_retries']
    })

    # Note, need to move this Github poll hack to something less of a hack at some point.
    setup_stage = pipeline.ensure_stage(SETUP_STAGE_NAME)
    wait_for_travis_job = setup_stage.ensure_job(WAIT_FOR_TRAVIS_JOB_NAME)
    wait_for_travis_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'i=0; until python -c "import requests; assert(requests.get(\'${GITHUB_API_URI}/${GITHUB_API_REPO}/commits/{}/status\'.format(\'${GO_REVISION_API_MANAGER}\')).json()[\'state\'] == \'success\')"; do i=$((i+1)); if [ $i -gt ${GITHUB_API_POLL_RETRIES} ]; then exit 1; fi; sleep ${GITHUB_API_POLL_WAIT_S}; done'
        ]))

    download_stage = pipeline.ensure_stage(
        DOWNLOAD_STAGE_NAME).set_clean_working_dir()
    swagger_codegen_job = download_stage.ensure_job(
        SWAGGER_CODEGEN_JOB_NAME).ensure_artifacts(
            {BuildArtifact(SWAGGER_JAR)})
    swagger_codegen_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'wget ${{SWAGGER_CODEGEN_JAR}} -O {swagger_jar}'.format(
                swagger_jar=SWAGGER_JAR)
        ]))

    build_stage = pipeline.ensure_stage(
        BUILD_STAGE_NAME).set_clean_working_dir()
    swagger_flatten_job = build_stage.ensure_job(
        SWAGGER_FLATTEN_JOB_NAME).ensure_artifacts({
            BuildArtifact('api-manager/swagger-build-artifacts/swagger.json')
        })

    artifact_params = {
        'pipeline': pipeline.name,
        'stage': DOWNLOAD_STAGE_NAME,
        'job': SWAGGER_CODEGEN_JOB_NAME,
        'src': FetchArtifactFile(SWAGGER_JAR),
        'dest': API_MANAGER_WORKING_DIR
    }
    swagger_flatten_job.add_task(FetchArtifactTask(**artifact_params))
    swagger_flatten_job.add_task(
        ExecTask(['make', 'build'], working_dir=API_MANAGER_WORKING_DIR))

    package_source_job = build_stage.ensure_job(
        PACKAGE_SOURCE_JOB_NAME).ensure_artifacts(
            {BuildArtifact('api-manager')})
    package_source_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            '/usr/bin/pip install -t python-libs -r requirements/base.txt'
        ],
                 working_dir=API_MANAGER_WORKING_DIR))

    configurator.save_updated_config(save_config_locally=save_config_locally,
                                     dry_run=dry_run)
def install_pipelines(save_config_locally, dry_run, variable_files,
                      cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id

    Optional variables:
    - configuration_secure_version
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline = gcc.ensure_pipeline_group(config['pipeline_group'])\
                  .ensure_replacement_of_pipeline(config['pipeline_name'])

    # Example materials yaml
    # materials:
    #   - url: "https://github.com/edx/tubular"
    #     branch: "master"
    #     material_name: "tubular"
    #     polling: "True"
    #     destination_directory: "tubular"
    #     ignore_patterns:
    #     - '**/*'

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=material['ignore_patterns']))

    # If no upstream pipelines exist, don't install them!
    for material in config.get('upstream_pipelines', []):
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    stages.generate_armed_stage(pipeline,
                                constants.PRERELEASE_MATERIALS_STAGE_NAME)

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)
Exemple #11
0
 def test_merge_files_and_dicts(self, expected, file_paths, *dicts):
     merged = util.merge_files_and_dicts(file_paths, list(dicts))
     print(merged)
     self.assertEqual(merged, expected)
Exemple #12
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    materials: List of dictionaries of the materials used in this pipeline
    upstream_pipelines: List of dictionaries of the upstream piplines that feed in to the rollback pipeline.
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline = gcc.ensure_pipeline_group(config['pipeline_group'])\
                  .ensure_replacement_of_pipeline(config['pipeline_name'])\
                  .ensure_environment_variables({'WAIT_SLEEP_TIME': config['tubular_sleep_wait_time']})

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=material['ignore_patterns']))

    # Specify the upstream deploy pipeline material for this rollback pipeline.
    # Assumes there's only a single upstream pipeline material for this pipeline.
    rollback_material = config['upstream_pipeline']
    pipeline.ensure_material(
        PipelineMaterial(pipeline_name=rollback_material['pipeline_name'],
                         stage_name=rollback_material['stage_name'],
                         material_name=rollback_material['material_name']))

    # Specify the artifact that will be fetched containing the previous deployment information.
    # Assumes there's only a single upstream artifact used by this pipeline.
    artifact_config = config['upstream_deploy_artifact']
    deploy_file_location = utils.ArtifactLocation(
        artifact_config['pipeline_name'], artifact_config['stage_name'],
        artifact_config['job_name'], artifact_config['artifact_name'])

    # Create the armed stage as this pipeline needs to auto-execute
    stages.generate_armed_stage(pipeline, constants.ARMED_JOB_NAME)

    # Create a single stage in the pipeline which will rollback to the previous ASGs/AMI.
    rollback_stage = stages.generate_rollback_asg_stage(
        pipeline,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        config['hipchat_token'],
        constants.HIPCHAT_ROOM,
        deploy_file_location,
    )
    # Since we only want this stage to rollback via manual approval, ensure that it is set on this stage.
    rollback_stage.set_has_manual_approval()

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)
Exemple #13
0
 def test_merge_files_and_dicts(self, expected, file_paths, *dicts):
     merged = util.merge_files_and_dicts(file_paths, list(dicts))
     print merged
     self.assertEqual(merged, expected)
def install_pipeline(save_config_locally, dry_run, variable_files, cmd_line_vars):
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    configurator = GoCdConfigurator(
        HostRestClient(config['gocd_url'], config['gocd_username'], config['gocd_password'], ssl=True))

    pipeline = configurator \
        .ensure_pipeline_group(DRUPAL_PIPELINE_GROUP_NAME) \
        .ensure_replacement_of_pipeline(DEPLOY_MARKETING_PIPELINE_NAME) \
        .set_git_material(GitMaterial('https://github.com/edx/tubular',
                                      polling=True,
                                      destination_directory='tubular',
                                      ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
                                      )
                          )

    pipeline.ensure_environment_variables(
        {
            'MARKETING_REPOSITORY_VERSION': config['mktg_repository_version'],
        }
    )

    pipeline.ensure_encrypted_environment_variables(
        {
            'PRIVATE_GITHUB_KEY': config['github_private_key'],
            'PRIVATE_MARKETING_REPOSITORY_URL': config['mktg_repository_url'],
            'PRIVATE_ACQUIA_REMOTE': config['acquia_remote_url'],
            'PRIVATE_ACQUIA_USERNAME': config['acquia_username'],
            'PRIVATE_ACQUIA_PASSWORD': config['acquia_password'],
            'PRIVATE_ACQUIA_GITHUB_KEY': config['acquia_github_key']
        }
    )

    # Stage to fetch the current tag names from stage and prod
    fetch_tag_stage = pipeline.ensure_stage(FETCH_TAG_STAGE_NAME)
    fetch_tag_stage.set_has_manual_approval()
    fetch_tag_job = fetch_tag_stage.ensure_job(FETCH_TAG_JOB_NAME)
    tasks.generate_requirements_install(fetch_tag_job, 'tubular')
    tasks.generate_target_directory(fetch_tag_job)
    path_name = '../target/{env}_tag_name.txt'
    tasks.generate_fetch_tag(fetch_tag_job, STAGE_ENV, path_name)
    tasks.generate_fetch_tag(fetch_tag_job, PROD_ENV, path_name)

    fetch_tag_job.ensure_artifacts(
        set([BuildArtifact('target/{stage_tag}.txt'.format(stage_tag=STAGE_TAG_NAME)),
             BuildArtifact('target/{prod_tag}.txt'.format(prod_tag=PROD_TAG_NAME))])
    )

    # Stage to create and push a tag to Acquia.
    push_to_acquia_stage = pipeline.ensure_stage(PUSH_TO_ACQUIA_STAGE_NAME)
    push_to_acquia_job = push_to_acquia_stage.ensure_job(PUSH_TO_ACQUIA_JOB_NAME)
    # Ensures the tag name is accessible in future jobs.
    push_to_acquia_job.ensure_artifacts(
        set([BuildArtifact('target/{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))])
    )

    tasks.generate_requirements_install(push_to_acquia_job, 'tubular')
    tasks.generate_target_directory(push_to_acquia_job)
    tasks.fetch_edx_mktg(push_to_acquia_job, 'edx-mktg')

    # Create a tag from MARKETING_REPOSITORY_VERSION branch of marketing repo
    push_to_acquia_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                # Writing dates to a file should help with any issues dealing with a job
                # taking place over two days (23:59:59 -> 00:00:00). Only the day can be
                # affected since we don't use minutes or seconds.
                # NOTE: Uses UTC
                'echo -n "release-$(date +%Y-%m-%d-%H.%M)" > ../target/{new_tag}.txt && '
                'TAG_NAME=$(cat ../target/{new_tag}.txt) && '
                '/usr/bin/git config user.email "*****@*****.**" && '
                '/usr/bin/git config user.name "edx-secure" && '
                '/usr/bin/git tag -a $TAG_NAME -m "Release for $(date +%B\ %d,\ %Y). Created by $GO_TRIGGER_USER." && '
                'GIT_SSH_COMMAND="/usr/bin/ssh -o StrictHostKeyChecking=no -i ../github_key.pem" '
                '/usr/bin/git push origin $TAG_NAME'.format(new_tag=NEW_TAG_NAME)
            ],
            working_dir='edx-mktg'
        )
    )

    # Set up Acquia Github key for use in pushing tag to Acquia
    tasks.format_RSA_key(push_to_acquia_job, 'acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')

    # Set up Acquia remote repo and push tag to Acquia. Change new tag file to contain "tags/" for deployment.
    push_to_acquia_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                '/usr/bin/git remote add acquia $PRIVATE_ACQUIA_REMOTE && '
                'GIT_SSH_COMMAND="/usr/bin/ssh -o StrictHostKeyChecking=no -i ../acquia_github_key.pem" '
                '/usr/bin/git push acquia $(cat ../target/{new_tag}.txt) && '
                'echo -n "tags/" | cat - ../target/{new_tag}.txt > temp && mv temp ../target/{new_tag}.txt'.format(new_tag=NEW_TAG_NAME)
            ],
            working_dir='edx-mktg'
        )
    )

    # Stage to backup database in stage
    backup_stage_database_stage = pipeline.ensure_stage(BACKUP_STAGE_DATABASE_STAGE_NAME)
    backup_stage_database_job = backup_stage_database_stage.ensure_job(BACKUP_STAGE_DATABASE_JOB_NAME)

    tasks.generate_requirements_install(backup_stage_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_stage_database_job, STAGE_ENV)

    # Stage to deploy to stage
    deploy_stage_for_stage = pipeline.ensure_stage(DEPLOY_STAGE_STAGE_NAME)
    deploy_job_for_stage = deploy_stage_for_stage.ensure_job(DEPLOY_STAGE_JOB_NAME)

    tasks.generate_requirements_install(deploy_job_for_stage, 'tubular')
    tasks.generate_target_directory(deploy_job_for_stage)

    # fetch the tag name
    new_tag_name_artifact_params = {
        'pipeline': DEPLOY_MARKETING_PIPELINE_NAME,
        'stage': PUSH_TO_ACQUIA_STAGE_NAME,
        'job': PUSH_TO_ACQUIA_JOB_NAME,
        'src': FetchArtifactFile('{new_tag}.txt'.format(new_tag=NEW_TAG_NAME)),
        'dest': 'target'
    }
    deploy_job_for_stage.add_task(FetchArtifactTask(**new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(deploy_job_for_stage, STAGE_ENV, '{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))

    # Stage to clear caches in stage
    clear_stage_caches_stage = pipeline.ensure_stage(CLEAR_STAGE_CACHES_STAGE_NAME)
    clear_stage_caches_job = clear_stage_caches_stage.ensure_job(CLEAR_STAGE_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_stage_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_stage_caches_job, 'tubular')
    tasks.format_RSA_key(clear_stage_caches_job, 'edx-mktg/docroot/acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_stage_caches_job, STAGE_ENV)
    tasks.generate_clear_varnish_cache(clear_stage_caches_job, STAGE_ENV)

    # Stage to backup database in prod
    backup_prod_database_stage = pipeline.ensure_stage(BACKUP_PROD_DATABASE_STAGE_NAME)
    backup_prod_database_stage.set_has_manual_approval()
    backup_prod_database_job = backup_prod_database_stage.ensure_job(BACKUP_PROD_DATABASE_JOB_NAME)

    tasks.generate_requirements_install(backup_prod_database_job, 'tubular')
    tasks.generate_backup_drupal_database(backup_prod_database_job, PROD_ENV)

    # Stage to deploy to prod
    deploy_stage_for_prod = pipeline.ensure_stage(DEPLOY_PROD_STAGE_NAME)
    deploy_job_for_prod = deploy_stage_for_prod.ensure_job(DEPLOY_PROD_JOB_NAME)

    tasks.generate_requirements_install(deploy_job_for_prod, 'tubular')
    tasks.generate_target_directory(deploy_job_for_prod)
    deploy_job_for_prod.add_task(FetchArtifactTask(**new_tag_name_artifact_params))
    tasks.generate_drupal_deploy(deploy_job_for_prod, PROD_ENV, '{new_tag}.txt'.format(new_tag=NEW_TAG_NAME))

    # Stage to clear caches in prod
    clear_prod_caches_stage = pipeline.ensure_stage(CLEAR_PROD_CACHES_STAGE_NAME)
    clear_prod_caches_job = clear_prod_caches_stage.ensure_job(CLEAR_PROD_CACHES_JOB_NAME)

    tasks.fetch_edx_mktg(clear_prod_caches_job, 'edx-mktg')
    tasks.generate_requirements_install(clear_prod_caches_job, 'tubular')
    tasks.format_RSA_key(clear_prod_caches_job, 'edx-mktg/docroot/acquia_github_key.pem', '$PRIVATE_ACQUIA_GITHUB_KEY')
    tasks.generate_flush_drupal_caches(clear_prod_caches_job, PROD_ENV)
    tasks.generate_clear_varnish_cache(clear_prod_caches_job, PROD_ENV)

    configurator.save_updated_config(save_config_locally=save_config_locally, dry_run=dry_run)
Exemple #15
0
def install_pipelines(save_config_locally, dry_run, variable_files,
                      cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - configuration_internal_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id

    Optional variables:
    - configuration_secure_version
    - configuration_internal_version
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline = gcc.ensure_pipeline_group(config['pipeline_group'])\
                  .ensure_replacement_of_pipeline(config['pipeline_name'])

    # Example materials yaml
    # materials:
    #   - url: "https://github.com/edx/tubular"
    #     branch: "master"
    #     material_name: "tubular"
    #     polling: "True"
    #     destination_directory: "tubular"
    #     ignore_patterns:
    #     - '**/*'

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=material['ignore_patterns']))

    # If no upstream pipelines exist, don't install them!
    for material in config.get('upstream_pipelines', []):
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    #
    # Create the AMI-building stage.
    #
    launch_stage = stages.generate_launch_instance(
        pipeline,
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        config['ec2_vpc_subnet_id'],
        config['ec2_security_group_id'],
        config['ec2_instance_profile_name'],
        config['base_ami_id'],
        manual_approval=not config.get('auto_run', False))

    stages.generate_run_play(
        pipeline,
        'playbooks/edx-east/edxapp.yml',
        play=config['play_name'],
        deployment=config['edx_deployment'],
        edx_environment=config['edx_environment'],
        private_github_key=config['github_private_key'],
        app_repo=config['app_repo'],
        configuration_secure_dir='{}-secure'.format(config['edx_deployment']),
        configuration_internal_dir='{}-internal'.format(
            config['edx_deployment']),
        hipchat_token=config['hipchat_token'],
        hipchat_room='release',
        edx_platform_version='$GO_REVISION_EDX_PLATFORM',
        edx_platform_repo='$APP_REPO',
        configuration_version='$GO_REVISION_CONFIGURATION',
        edxapp_theme_source_repo=config['theme_url'],
        edxapp_theme_version='$GO_REVISION_EDX_THEME',
        edxapp_theme_name='$EDXAPP_THEME_NAME',
        disable_edx_services='true',
        COMMON_TAG_EC2_INSTANCE='true',
        cache_id='$GO_PIPELINE_COUNTER')

    stages.generate_create_ami_from_instance(
        pipeline,
        play=config['play_name'],
        deployment=config['edx_deployment'],
        edx_environment=config['edx_environment'],
        app_repo=config['app_repo'],
        app_version='$GO_REVISION_EDX_PLATFORM',
        configuration_secure_repo=config['{}_configuration_secure_repo'.format(
            config['edx_deployment'])],
        configuration_internal_repo=config[
            '{}_configuration_internal_repo'.format(config['edx_deployment'])],
        configuration_repo=config['configuration_url'],
        hipchat_auth_token=config['hipchat_token'],
        hipchat_room='release pipeline',
        configuration_version='$GO_REVISION_CONFIGURATION',
        configuration_secure_version='$GO_REVISION_{}_SECURE'.format(
            config['edx_deployment'].upper()),
        configuration_internal_version='$GO_REVISION_{}_SECURE'.format(
            config['edx_deployment'].upper()),
        aws_access_key_id=config['aws_access_key_id'],
        aws_secret_access_key=config['aws_secret_access_key'],
        edxapp_theme_source_repo=config['theme_url'],
        edxapp_theme_version='$GO_REVISION_EDX_MICROSITE',
    )

    #
    # Create the DB migration running stage.
    #
    ansible_inventory_location = utils.ArtifactLocation(
        pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME,
        constants.LAUNCH_INSTANCE_JOB_NAME, 'ansible_inventory')
    instance_ssh_key_location = utils.ArtifactLocation(
        pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME,
        constants.LAUNCH_INSTANCE_JOB_NAME, 'key.pem')
    launch_info_location = utils.ArtifactLocation(
        pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME,
        constants.LAUNCH_INSTANCE_JOB_NAME, 'launch_info.yml')
    for sub_app in config['edxapp_subapps']:
        stages.generate_run_migrations(
            pipeline,
            db_migration_pass=config['db_migration_pass'],
            inventory_location=ansible_inventory_location,
            instance_key_location=instance_ssh_key_location,
            launch_info_location=launch_info_location,
            application_user=config['db_migration_user'],
            application_name=config['play_name'],
            application_path=config['application_path'],
            sub_application_name=sub_app)

    #
    # Create the stage to deploy the AMI.
    #
    ami_file_location = utils.ArtifactLocation(pipeline.name,
                                               constants.BUILD_AMI_STAGE_NAME,
                                               constants.BUILD_AMI_JOB_NAME,
                                               'ami.yml')
    stages.generate_deploy_ami(
        pipeline,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        ami_file_location,
        manual_approval=not config.get('auto_deploy_ami', False))

    #
    # Create the stage to terminate the EC2 instance used to both build the AMI and run DB migrations.
    #
    instance_info_location = utils.ArtifactLocation(
        pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME,
        constants.LAUNCH_INSTANCE_JOB_NAME, 'launch_info.yml')
    stages.generate_terminate_instance(
        pipeline,
        instance_info_location,
        aws_access_key_id=config['aws_access_key_id'],
        aws_secret_access_key=config['aws_secret_access_key'],
        hipchat_auth_token=config['hipchat_token'],
        runif='any')

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)