def install_pipelines(configurator, config): """ Variables needed for this pipeline: materials: List of dictionaries of the materials used in this pipeline upstream_pipelines: List of dictionaries of the upstream piplines that feed in to the rollback pipeline. """ pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\ .ensure_replacement_of_pipeline(config['pipeline_name'])\ .ensure_environment_variables({'WAIT_SLEEP_TIME': config['tubular_sleep_wait_time']}) for material in config['materials']: pipeline.ensure_material( GitMaterial( url=material['url'], branch=material['branch'], material_name=material['material_name'], polling=material['polling'], destination_directory=material['destination_directory'], ignore_patterns=set(material['ignore_patterns']))) # Specify the upstream deploy pipeline material for this rollback pipeline. # Assumes there's only a single upstream pipeline material for this pipeline. rollback_material = config['upstream_pipeline'] pipeline.ensure_material( PipelineMaterial(pipeline_name=rollback_material['pipeline_name'], stage_name=rollback_material['stage_name'], material_name=rollback_material['material_name'])) # Specify the artifact that will be fetched containing the previous deployment information. # Assumes there's only a single upstream artifact used by this pipeline. artifact_config = config['upstream_deploy_artifact'] deploy_file_location = utils.ArtifactLocation( artifact_config['pipeline_name'], artifact_config['stage_name'], artifact_config['job_name'], artifact_config['artifact_name']) # Create the armed stage as this pipeline needs to auto-execute stages.generate_armed_stage(pipeline, constants.ARMED_JOB_NAME) # Create a single stage in the pipeline which will rollback to the previous ASGs/AMI. rollback_stage = stages.generate_rollback_asg_stage( pipeline, config['asgard_api_endpoints'], config['asgard_token'], config['aws_access_key_id'], config['aws_secret_access_key'], config['hipchat_token'], constants.HIPCHAT_ROOM, deploy_file_location, ) # Since we only want this stage to rollback via manual approval, ensure that it is set on this stage. rollback_stage.set_has_manual_approval()
def install_pipelines(configurator, config): """ Arguments: configurator (GoCdConfigurator) config (dict) env_config (dict) Variables needed for this pipeline: - gocd_username - gocd_password - gocd_url - configuration_secure_repo - configuration_internal_repo - hipchat_token - github_private_key - aws_access_key_id - aws_secret_access_key - ec2_vpc_subnet_id - ec2_security_group_id - ec2_instance_profile_name - base_ami_id Optional variables: - configuration_secure_version - configuration_internal_version """ configurator.ensure_removal_of_pipeline_group('edxapp') configurator.ensure_removal_of_pipeline_group('edxapp_prod_deploys') edxapp_group = configurator.ensure_pipeline_group('edxapp') ensure_permissions(configurator, edxapp_group, Permission.OPERATE, ['edxapp-operator']) ensure_permissions(configurator, edxapp_group, Permission.VIEW, ['edxapp-operator']) edxapp_deploy_group = configurator.ensure_pipeline_group('edxapp_prod_deploys') ensure_permissions(configurator, edxapp_deploy_group, Permission.ADMINS, ['deploy']) ensure_permissions(configurator, edxapp_deploy_group, Permission.OPERATE, ['prod-deploy-operators']) ensure_permissions(configurator, edxapp_deploy_group, Permission.VIEW, ['prod-deploy-operators']) cut_branch = edxapp.make_release_candidate( edxapp_group, config, ) cut_branch.set_label_template('${edx-platform[:7]}') prerelease_materials = edxapp.prerelease_materials( edxapp_group, config ) prerelease_merge_artifact = utils.ArtifactLocation( prerelease_materials.name, constants.PRERELEASE_MATERIALS_STAGE_NAME, constants.PRERELEASE_MATERIALS_JOB_NAME, constants.PRIVATE_RC_FILENAME, ) stage_b = edxapp.launch_and_terminate_subset_pipeline( edxapp_group, [ edxapp.generate_build_stages( app_repo=EDX_PLATFORM().url, edp=STAGE_EDX_EDXAPP, theme_url=EDX_MICROSITE().url, configuration_secure_repo=EDX_SECURE().url, configuration_internal_repo=EDX_INTERNAL().url, configuration_url=CONFIGURATION().url, prerelease_merge_artifact=prerelease_merge_artifact, ), ], config=config[edxapp.STAGE_EDX_EDXAPP], pipeline_name="STAGE_edxapp_B", ami_artifact=utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(STAGE_EDX_EDXAPP), constants.BASE_AMI_OVERRIDE_FILENAME, ), auto_run=True, ) stage_b.set_label_template('${prerelease}') prod_edx_b = edxapp.launch_and_terminate_subset_pipeline( edxapp_deploy_group, [ edxapp.generate_build_stages( app_repo=EDX_PLATFORM().url, edp=PROD_EDX_EDXAPP, theme_url=EDX_MICROSITE().url, configuration_secure_repo=EDX_SECURE().url, configuration_internal_repo=EDX_INTERNAL().url, configuration_url=CONFIGURATION().url, prerelease_merge_artifact=prerelease_merge_artifact, ), ], config=config[edxapp.PROD_EDX_EDXAPP], pipeline_name="PROD_edx_edxapp_B", ami_artifact=utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(PROD_EDX_EDXAPP), constants.BASE_AMI_OVERRIDE_FILENAME, ), auto_run=True, ) prod_edx_b.set_label_template('${prerelease}') prod_edge_b = edxapp.launch_and_terminate_subset_pipeline( edxapp_deploy_group, [ edxapp.generate_build_stages( app_repo=EDX_PLATFORM().url, edp=PROD_EDGE_EDXAPP, theme_url=EDX_MICROSITE().url, configuration_secure_repo=EDGE_SECURE().url, configuration_internal_repo=EDGE_INTERNAL().url, configuration_url=CONFIGURATION().url, prerelease_merge_artifact=prerelease_merge_artifact, ), ], config=config[edxapp.PROD_EDGE_EDXAPP], pipeline_name="PROD_edge_edxapp_B", ami_artifact=utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(PROD_EDGE_EDXAPP), constants.BASE_AMI_OVERRIDE_FILENAME, ), auto_run=True, ) prod_edge_b.set_label_template('${prerelease}') for pipeline in (stage_b, prod_edx_b, prod_edge_b): pipeline.ensure_material( PipelineMaterial( pipeline_name=prerelease_materials.name, stage_name=constants.BASE_AMI_SELECTION_STAGE_NAME, material_name="prerelease", ) ) deployed_ami_pairs = [ ( utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, ami_selection_job_name, constants.BASE_AMI_OVERRIDE_FILENAME, ), utils.ArtifactLocation( build_pipeline.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ) ) for build_pipeline, ami_selection_job_name in [ (prod_edx_b, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(PROD_EDX_EDXAPP)), (prod_edge_b, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(PROD_EDGE_EDXAPP)) ] ] stage_md = edxapp.launch_and_terminate_subset_pipeline( edxapp_group, stage_builders=[ edxapp.generate_migrate_stages, edxapp.generate_deploy_stages( pipeline_name_build=stage_b.name, ami_pairs=deployed_ami_pairs, stage_deploy_pipeline=None, base_ami_artifact=utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(STAGE_EDX_EDXAPP), constants.BASE_AMI_OVERRIDE_FILENAME, ), head_ami_artifact=utils.ArtifactLocation( stage_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ), auto_deploy_ami=True, ), ], post_cleanup_builders=[ edxapp.generate_e2e_test_stage, ], config=config[edxapp.STAGE_EDX_EDXAPP], pipeline_name="STAGE_edxapp_M-D", ami_artifact=utils.ArtifactLocation( stage_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ), auto_run=True, ) stage_md.set_automatic_pipeline_locking() stage_md.set_label_template('${STAGE_edxapp_B_build}') for build_stage in (stage_b, prod_edx_b, prod_edge_b): stage_md.ensure_material( PipelineMaterial( pipeline_name=build_stage.name, stage_name=constants.BUILD_AMI_STAGE_NAME, material_name="{}_build".format(build_stage.name), ) ) stage_md.ensure_material( PipelineMaterial( pipeline_name=prerelease_materials.name, stage_name=constants.BASE_AMI_SELECTION_STAGE_NAME, material_name="prerelease", ) ) rollback_stage_db = edxapp.launch_and_terminate_subset_pipeline( edxapp_deploy_group, [ edxapp.rollback_database(edxapp.STAGE_EDX_EDXAPP, stage_b, stage_md), ], config=config[edxapp.STAGE_EDX_EDXAPP], pipeline_name="stage_edxapp_Rollback_Migrations", ami_artifact=utils.ArtifactLocation( stage_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME ), auto_run=False, pre_launch_builders=[ edxapp.armed_stage_builder, ], ) rollback_stage_db.set_label_template('${deploy_pipeline}') manual_verification = edxapp.manual_verification( edxapp_deploy_group, config ) manual_verification.set_label_template('${stage_ami_deploy}') manual_verification.ensure_material( PipelineMaterial( pipeline_name=stage_md.name, stage_name=constants.TERMINATE_INSTANCE_STAGE_NAME, material_name='stage_ami_deploy', ) ) manual_verification.ensure_material( PipelineMaterial( pipeline_name=prod_edx_b.name, stage_name=constants.BUILD_AMI_STAGE_NAME, material_name='PROD_edx_edxapp_ami_build', ) ) manual_verification.ensure_material( PipelineMaterial( pipeline_name=prod_edge_b.name, stage_name=constants.BUILD_AMI_STAGE_NAME, material_name='PROD_edge_edxapp_ami_build', ) ) release_advancer = edxapp.release_advancer( edxapp_deploy_group, config ) release_advancer.set_label_template('${tubular[:7]}-${COUNT}') # When manually triggered in the pipeline above, the following two pipelines migrate/deploy # to the production EDX and EDGE environments. prod_edx_md = edxapp.launch_and_terminate_subset_pipeline( edxapp_deploy_group, [ edxapp.generate_migrate_stages, edxapp.generate_deploy_stages( pipeline_name_build=prod_edx_b.name, ami_pairs=deployed_ami_pairs, stage_deploy_pipeline=stage_md, base_ami_artifact=utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(PROD_EDX_EDXAPP), constants.BASE_AMI_OVERRIDE_FILENAME, ), head_ami_artifact=utils.ArtifactLocation( prod_edx_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ), auto_deploy_ami=True, ) ], config=config[edxapp.PROD_EDX_EDXAPP], pipeline_name="PROD_edx_edxapp_M-D", ami_artifact=utils.ArtifactLocation( prod_edx_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ), auto_run=True, ) prod_edx_md.set_label_template('${prod_release_gate}') prod_edge_md = edxapp.launch_and_terminate_subset_pipeline( edxapp_deploy_group, [ edxapp.generate_migrate_stages, edxapp.generate_deploy_stages( pipeline_name_build=prod_edge_b.name, ami_pairs=deployed_ami_pairs, stage_deploy_pipeline=stage_md, base_ami_artifact=utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(PROD_EDGE_EDXAPP), constants.BASE_AMI_OVERRIDE_FILENAME, ), head_ami_artifact=utils.ArtifactLocation( prod_edge_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ), auto_deploy_ami=True, ) ], config=config[edxapp.PROD_EDGE_EDXAPP], pipeline_name="PROD_edge_edxapp_M-D", ami_artifact=utils.ArtifactLocation( prod_edge_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ), auto_run=True, ) prod_edge_md.set_label_template('${prod_release_gate}') for deploy in (prod_edx_md, prod_edge_md): deploy.ensure_material( PipelineMaterial( pipeline_name=manual_verification.name, stage_name=constants.MANUAL_VERIFICATION_STAGE_NAME, material_name="prod_release_gate", ) ) for build in (prod_edx_b, prod_edge_b): deploy.ensure_material( PipelineMaterial(build.name, constants.BUILD_AMI_STAGE_NAME, "{}_build".format(build.name)) ) deploy.ensure_material( PipelineMaterial(stage_md.name, constants.TERMINATE_INSTANCE_STAGE_NAME, "terminate_instance_stage") ) deploy.ensure_material( PipelineMaterial( pipeline_name=prerelease_materials.name, stage_name=constants.BASE_AMI_SELECTION_STAGE_NAME, material_name="prerelease", ) ) for pipeline in (stage_b, stage_md, prod_edx_b, prod_edx_md, prod_edge_b, prod_edge_md): for material in ( TUBULAR, CONFIGURATION, EDX_PLATFORM, EDX_SECURE, EDGE_SECURE, EDX_MICROSITE, EDX_INTERNAL, EDGE_INTERNAL ): pipeline.ensure_material(material()) rollback_edx = edxapp.rollback_asgs( edxapp_deploy_group=edxapp_deploy_group, pipeline_name='PROD_edx_edxapp_Rollback_latest', deploy_pipeline=prod_edx_md, config=config[edxapp.PROD_EDX_EDXAPP], ami_pairs=deployed_ami_pairs, stage_deploy_pipeline=stage_md, base_ami_artifact=utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(PROD_EDX_EDXAPP), constants.BASE_AMI_OVERRIDE_FILENAME, ), head_ami_artifact=utils.ArtifactLocation( prod_edx_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ), ) rollback_edx.set_label_template('${deploy_ami}') rollback_edge = edxapp.rollback_asgs( edxapp_deploy_group=edxapp_deploy_group, pipeline_name='PROD_edge_edxapp_Rollback_latest', deploy_pipeline=prod_edge_md, config=config[edxapp.PROD_EDGE_EDXAPP], ami_pairs=deployed_ami_pairs, stage_deploy_pipeline=stage_md, base_ami_artifact=utils.ArtifactLocation( prerelease_materials.name, constants.BASE_AMI_SELECTION_STAGE_NAME, constants.BASE_AMI_SELECTION_EDP_JOB_NAME(PROD_EDGE_EDXAPP), constants.BASE_AMI_OVERRIDE_FILENAME, ), head_ami_artifact=utils.ArtifactLocation( prod_edge_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME, ), ) rollback_edge.set_label_template('${deploy_ami}') for rollback_pipeline in (rollback_edx, rollback_edge): rollback_pipeline.ensure_material( PipelineMaterial( pipeline_name=stage_md.name, stage_name=constants.TERMINATE_INSTANCE_STAGE_NAME, material_name='terminate_instance_stage', ) ) rollback_pipeline.ensure_material( PipelineMaterial( pipeline_name=prerelease_materials.name, stage_name=constants.BASE_AMI_SELECTION_STAGE_NAME, material_name="prerelease", ) ) for build in (prod_edx_b, prod_edge_b): rollback_pipeline.ensure_material( PipelineMaterial( pipeline_name=build.name, stage_name=constants.BUILD_AMI_STAGE_NAME, material_name='{}_build_ami'.format(build.name), ) ) rollback_edx.ensure_material( PipelineMaterial(prod_edx_md.name, constants.DEPLOY_AMI_STAGE_NAME, "deploy_ami") ) rollback_edge.ensure_material( PipelineMaterial(prod_edge_md.name, constants.DEPLOY_AMI_STAGE_NAME, "deploy_ami") ) rollback_edx_db = edxapp.launch_and_terminate_subset_pipeline( edxapp_deploy_group, [ edxapp.rollback_database(edxapp.PROD_EDX_EDXAPP, prod_edx_b, prod_edx_md), ], config=config[edxapp.PROD_EDX_EDXAPP], pipeline_name="PROD_edx_edxapp_Rollback_Migrations_latest", ami_artifact=utils.ArtifactLocation( prod_edx_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME ), auto_run=False, pre_launch_builders=[ edxapp.armed_stage_builder, ], ) rollback_edx_db.set_label_template('${deploy_pipeline}') rollback_edge_db = edxapp.launch_and_terminate_subset_pipeline( edxapp_deploy_group, [ edxapp.rollback_database(edxapp.PROD_EDGE_EDXAPP, prod_edge_b, prod_edge_md), ], config=config[edxapp.PROD_EDGE_EDXAPP], pipeline_name="PROD_edge_edxapp_Rollback_Migrations_latest", ami_artifact=utils.ArtifactLocation( prod_edge_b.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, constants.BUILD_AMI_FILENAME ), auto_run=False, pre_launch_builders=[ edxapp.armed_stage_builder, ], ) rollback_edge_db.set_label_template('${deploy_pipeline}') deploy_artifact = utils.ArtifactLocation( prod_edx_md.name, constants.DEPLOY_AMI_STAGE_NAME, constants.DEPLOY_AMI_JOB_NAME, constants.DEPLOY_AMI_OUT_FILENAME, ) merge_back = edxapp.merge_back_branches( edxapp_deploy_group, constants.BRANCH_CLEANUP_PIPELINE_NAME, deploy_artifact, prerelease_merge_artifact, config, ) merge_back.set_label_template('${{deploy_pipeline_{}}}'.format(prod_edx_md.name)) merge_back.ensure_material( PipelineMaterial( pipeline_name=prerelease_materials.name, stage_name=constants.PRERELEASE_MATERIALS_STAGE_NAME, material_name='prerelease_materials', ) ) # Specify the upstream deploy pipeline materials for this branch-merging pipeline. for deploy_pipeline in (prod_edx_md, prod_edge_md): merge_back.ensure_material( PipelineMaterial( pipeline_name=deploy_pipeline.name, stage_name=constants.DEPLOY_AMI_STAGE_NAME, material_name='deploy_pipeline_{}'.format(deploy_pipeline.name), ) )
def install_pipelines(configurator, config): """ Variables needed for this pipeline: - gocd_username - gocd_password - gocd_url - configuration_secure_repo - configuration_internal_repo - hipchat_token - github_private_key - aws_access_key_id - aws_secret_access_key - ec2_vpc_subnet_id - ec2_security_group_id - ec2_instance_profile_name - base_ami_id Optional variables: - configuration_secure_version - configuration_internal_version """ pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\ .ensure_replacement_of_pipeline(config['pipeline_name']) # Example materials yaml # materials: # - url: "https://github.com/edx/tubular" # branch: "release" # material_name: "tubular" # polling: "True" # destination_directory: "tubular" # ignore_patterns: # - '**/*' for material in config['materials']: pipeline.ensure_material( GitMaterial( url=material['url'], branch=material['branch'], material_name=material['material_name'], polling=material['polling'], destination_directory=material['destination_directory'], ignore_patterns=set(material['ignore_patterns']))) # If no upstream pipelines exist, don't install them! for material in config.get('upstream_pipelines', []): pipeline.ensure_material( PipelineMaterial(pipeline_name=material['pipeline_name'], stage_name=material['stage_name'], material_name=material['material_name'])) # # Create the AMI-building stage. # stages.generate_launch_instance( pipeline, config['aws_access_key_id'], config['aws_secret_access_key'], config['ec2_vpc_subnet_id'], config['ec2_security_group_id'], config['ec2_instance_profile_name'], config['base_ami_id'], manual_approval=not config.get('auto_run', False)) stages.generate_run_play( pipeline, 'playbooks/edx-east/edxapp.yml', edp=utils.EDP(config['edx_environment'], config['edx_deployment'], config['play_name']), private_github_key=config['github_private_key'], app_repo=config['app_repo'], configuration_secure_dir='{}-secure'.format(config['edx_deployment']), configuration_internal_dir='{}-internal'.format( config['edx_deployment']), hipchat_token=config['hipchat_token'], hipchat_room='release', edx_platform_version='$GO_REVISION_EDX_PLATFORM', edx_platform_repo='$APP_REPO', configuration_version='$GO_REVISION_CONFIGURATION', edxapp_theme_source_repo=config['theme_url'], edxapp_theme_version='$GO_REVISION_EDX_THEME', edxapp_theme_name='$EDXAPP_THEME_NAME', disable_edx_services='true', COMMON_TAG_EC2_INSTANCE='true', cache_id='$GO_PIPELINE_COUNTER') configuration_secure_repo = config['{}_configuration_secure_repo'.format( config['edx_deployment'])] configuration_internal_repo = config[ '{}_configuration_internal_repo'.format(config['edx_deployment'])] configuration_secure_version = '$GO_REVISION_{}_SECURE'.format( config['edx_deployment'].upper()) configuration_internal_version = '$GO_REVISION_{}_INTERNAL'.format( config['edx_deployment'].upper()) stages.generate_create_ami_from_instance( pipeline, edp=utils.EDP(config['edx_environment'], config['edx_deployment'], config['play_name']), app_repo=config['app_repo'], app_version='$GO_REVISION_EDX_PLATFORM', hipchat_token=config['hipchat_token'], hipchat_room='release pipeline', aws_access_key_id=config['aws_access_key_id'], aws_secret_access_key=config['aws_secret_access_key'], version_tags={ 'configuration': (config['configuration_url'], '$GO_REVISION_CONFIGURATION'), 'configuration_secure': (configuration_secure_repo, configuration_secure_version), 'configuration_internal': (configuration_internal_repo, configuration_internal_version), 'edxapp_theme': (config['theme_url'], '$GO_REVISION_EDX_MICROSITE'), }) # # Create the DB migration running stage. # ansible_inventory_location = utils.ArtifactLocation( pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME, constants.LAUNCH_INSTANCE_JOB_NAME, constants.ANSIBLE_INVENTORY_FILENAME) instance_ssh_key_location = utils.ArtifactLocation( pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME, constants.LAUNCH_INSTANCE_JOB_NAME, constants.KEY_PEM_FILENAME) launch_info_location = utils.ArtifactLocation( pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME, constants.LAUNCH_INSTANCE_JOB_NAME, constants.LAUNCH_INSTANCE_FILENAME) for sub_app in ['cms', 'lms']: stages.generate_run_migrations( pipeline, db_migration_pass=config['db_migration_pass'], inventory_location=ansible_inventory_location, instance_key_location=instance_ssh_key_location, launch_info_location=launch_info_location, application_user=config['db_migration_user'], application_name=config['play_name'], application_path=config['application_path'], sub_application_name=sub_app) # # Create the stage to deploy the AMI. # ami_file_location = utils.ArtifactLocation(pipeline.name, constants.BUILD_AMI_STAGE_NAME, constants.BUILD_AMI_JOB_NAME, 'ami.yml') stages.generate_deploy_ami( pipeline, config['asgard_api_endpoints'], config['asgard_token'], config['aws_access_key_id'], config['aws_secret_access_key'], ami_file_location, manual_approval=not config.get('auto_deploy_ami', False)) # # Create the stage to terminate the EC2 instance used to both build the AMI and run DB migrations. # instance_info_location = utils.ArtifactLocation( pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME, constants.LAUNCH_INSTANCE_JOB_NAME, constants.LAUNCH_INSTANCE_FILENAME) stages.generate_terminate_instance( pipeline, instance_info_location, aws_access_key_id=config['aws_access_key_id'], aws_secret_access_key=config['aws_secret_access_key'], hipchat_token=config['hipchat_token'], runif='any')
def install_pipelines(configurator, config): """ Install the pipelines that can deploy the edX api-gateway. """ pipeline = configurator \ .ensure_pipeline_group(config['pipeline']['group']) \ .ensure_replacement_of_pipeline(config['pipeline']['name']) \ .set_label_template('${build}') \ .set_automatic_pipeline_locking() # Allow for a multi-stage deployment if 'previous_deployment' in config['upstream_pipelines']: build_material = { 'pipeline': config['upstream_pipelines']['previous_deployment'], 'stage': 'forward_build', 'jobs': { 'swagger': 'forward_build', 'source': 'forward_build' } } elif 'build' in config['upstream_pipelines']: build_material = { 'pipeline': config['upstream_pipelines']['build'], 'stage': api_build.BUILD_STAGE_NAME, 'jobs': { 'swagger': 'swagger-flatten', 'source': 'package-source' } } else: build_material = { 'pipeline': config['pipeline']['build'], 'stage': api_build.BUILD_STAGE_NAME, 'jobs': { 'swagger': 'swagger-flatten', 'source': 'package-source' } } pipeline.ensure_material(PipelineMaterial(build_material['pipeline'], build_material['stage'], 'build')) pipeline.ensure_environment_variables( { 'ROOT_REDIRECT': config['root_redirect'], 'API_BASE': config['api_base'], 'LOG_LEVEL': config['aws']['log_level'], 'RATE_LIMIT': config['aws']['rate_limit'], 'METRICS': config['aws']['metrics'], 'ROTATION_ORDER': ' '.join(config['rotation_order']), 'BURST_LIMIT': config['aws']['burst_limit'], 'EDXAPP_HOST': config['upstream_origins']['edxapp'], 'CATALOG_HOST': config['upstream_origins']['catalog'], 'WAIT_SLEEP_TIME': config.get('tubular_sleep_wait_time', constants.TUBULAR_SLEEP_WAIT_TIME), } ) pipeline.ensure_encrypted_environment_variables({ 'AWS_ACCESS_KEY_ID': config['aws']['access_key_id'], 'AWS_SECRET_ACCESS_KEY': config['aws']['secret_access_key'] }) # Setup the Upload stage upload_stage = pipeline.ensure_stage('upload').set_clean_working_dir() upload_gateway_job = upload_stage.ensure_job('upload_gateway') upload_gateway_job.ensure_tab(Tab('output.txt', 'output.txt')) upload_gateway_job.ensure_tab(Tab('next_stage.txt', 'next_stage.txt')) upload_gateway_job.ensure_artifacts({BuildArtifact('next_stage.txt')}) swagger_flatten_artifact_params = { 'pipeline': build_material['pipeline'], 'stage': build_material['stage'], 'job': build_material['jobs']['swagger'], 'src': FetchArtifactFile('swagger.json') } upload_gateway_job.add_task(FetchArtifactTask(**swagger_flatten_artifact_params)) api_manager_artifact_params = { 'pipeline': build_material['pipeline'], 'stage': build_material['stage'], 'job': build_material['jobs']['source'], 'src': FetchArtifactDir('api-manager') } upload_gateway_job.add_task(FetchArtifactTask(**api_manager_artifact_params)) upload_gateway_job.add_task( ExecTask( [ '/bin/bash', '-c', 'PYTHONPATH=python-libs python scripts/aws/deploy.py --api-base-domain ${API_BASE} ' '--swagger-filename ../swagger.json --tag ${GO_PIPELINE_LABEL} --rotation-order ' '${ROTATION_ORDER} --log-level ${LOG_LEVEL} --metrics ${METRICS} --rate-limit ${RATE_LIMIT} ' '--burst-limit ${BURST_LIMIT} --edxapp-host ${EDXAPP_HOST} --catalog-host ${CATALOG_HOST} ' '--landing-page ${ROOT_REDIRECT} > ../next_stage.txt' ], working_dir='api-manager' ) ) # Setup the test stage test_stage = pipeline.ensure_stage('test') test_job = test_stage.ensure_job('test_job') test_job.add_task( ExecTask( [ '/bin/bash', '-c', '/bin/echo "need to implement these tests, just stubbing out the stage for now"' ] ) ) # Setup the deploy stage deploy_stage = pipeline.ensure_stage('deploy') if config.get('manual_approval_required', False): deploy_stage.set_has_manual_approval() deploy_gateway_job = deploy_stage.ensure_job('deploy_gateway') deploy_gateway_job.ensure_tab(Tab('output.txt', 'output.txt')) deploy_gateway_job.add_task(FetchArtifactTask( pipeline.name, 'upload', 'upload_gateway', FetchArtifactFile('next_stage.txt') )) deploy_gateway_job.add_task(FetchArtifactTask(**api_manager_artifact_params)) deploy_gateway_job.add_task( ExecTask( [ '/bin/bash', '-c', 'PYTHONPATH=python-libs python scripts/aws/flip.py --api-base-domain ${API_BASE} ' '--next-stage `cat ../next_stage.txt`' ], working_dir='api-manager' ) ) # Setup the Log stage log_stage = pipeline.ensure_stage('log') log_gateway_job = log_stage.ensure_job('deploy_lambda') log_gateway_job.ensure_environment_variables( { 'splunk_host': config['log_lambda']['splunk_host'], 'subnet_list': config['log_lambda']['subnet_list'], 'sg_list': config['log_lambda']['sg_list'], 'environment': config['log_lambda']['environment'], 'deployment': config['log_lambda']['deployment'], } ) log_gateway_job.ensure_encrypted_environment_variables( { 'splunk_token': config['log_lambda']['splunk_token'], 'acct_id': config['log_lambda']['acct_id'], 'kms_key': config['log_lambda']['kms_key'], } ) log_gateway_job.add_task(FetchArtifactTask(**api_manager_artifact_params)) log_gateway_job.add_task( ExecTask( [ '/bin/bash', '-c', 'PYTHONPATH=python-libs python scripts/aws/monitor.py --api-base-domain ${API_BASE} ' '--splunk-host ${splunk_host} --splunk-token ${splunk_token} --acct-id ${acct_id} ' '--kms-key ${kms_key} --subnet-list ${subnet_list} --sg-list ${sg_list} --environment ' '${environment} --deployment ${deployment}' ], working_dir='api-manager' ) ) # Setup the forward_build stage (which makes the build available to the next pipeline) forward_build_stage = pipeline.ensure_stage('forward_build') forward_build_job = forward_build_stage.ensure_job('forward_build') forward_build_job.add_task(FetchArtifactTask(**api_manager_artifact_params)) forward_build_job.add_task(FetchArtifactTask(**swagger_flatten_artifact_params)) forward_build_job.ensure_artifacts(set([BuildArtifact("api-manager"), BuildArtifact("swagger.json")]))
def install_pipelines(configurator, config): """ Install pipelines that can rollback the stage edx-mktg site. """ pipeline = configurator \ .ensure_pipeline_group(constants.DRUPAL_PIPELINE_GROUP_NAME) \ .ensure_replacement_of_pipeline('rollback-stage-marketing-site') \ .ensure_material(TUBULAR()) \ .ensure_material(EDX_MKTG()) \ .ensure_material(ECOM_SECURE()) \ .ensure_material(PipelineMaterial(constants.DEPLOY_MARKETING_PIPELINE_NAME, constants.FETCH_TAG_STAGE_NAME)) pipeline.ensure_environment_variables( { 'MARKETING_REPOSITORY_VERSION': config['mktg_repository_version'], } ) pipeline.ensure_encrypted_environment_variables( { 'PRIVATE_GITHUB_KEY': config['github_private_key'], 'PRIVATE_ACQUIA_USERNAME': config['acquia_username'], 'PRIVATE_ACQUIA_PASSWORD': config['acquia_password'], 'PRIVATE_ACQUIA_GITHUB_KEY': config['acquia_github_key'], } ) stage_tag_name_artifact_params = { 'pipeline': constants.DEPLOY_MARKETING_PIPELINE_NAME, 'stage': constants.FETCH_TAG_STAGE_NAME, 'job': constants.FETCH_TAG_JOB_NAME, 'src': FetchArtifactFile('{stage_tag}.txt'.format(stage_tag=constants.STAGE_TAG_NAME)), 'dest': 'target' } # Stage to rollback stage to its last stable tag rollback_stage = pipeline.ensure_stage(constants.ROLLBACK_STAGE_NAME) rollback_stage.set_has_manual_approval() rollback_job = rollback_stage.ensure_job(constants.ROLLBACK_JOB_NAME) tasks.generate_package_install(rollback_job, 'tubular') tasks.generate_target_directory(rollback_job) rollback_job.add_task(FetchArtifactTask(**stage_tag_name_artifact_params)) tasks.generate_drupal_deploy( rollback_job, constants.STAGE_ENV, '{stage_tag}.txt'.format(stage_tag=constants.STAGE_TAG_NAME) ) # Stage to clear the caches clear_stage_caches_stage = pipeline.ensure_stage(constants.CLEAR_STAGE_CACHES_STAGE_NAME) clear_stage_caches_job = clear_stage_caches_stage.ensure_job(constants.CLEAR_STAGE_CACHES_JOB_NAME) tasks.generate_package_install(clear_stage_caches_job, 'tubular') clear_stage_caches_job.add_task( tasks.bash_task( """ chmod 600 ecom-secure/acquia/acquia_github_key.pem && cp {ecom_secure}/acquia/acquia_github_key.pem {edx_mktg}/docroot/ """, ecom_secure=ECOM_SECURE().destination_directory, edx_mktg=EDX_MKTG().destination_directory ) ) tasks.generate_flush_drupal_caches(clear_stage_caches_job, constants.STAGE_ENV) tasks.generate_clear_varnish_cache(clear_stage_caches_job, constants.STAGE_ENV)
def install_pipelines(configurator, config): """ Variables needed for this pipeline: materials: A list of dictionaries of the materials used in this pipeline upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification """ pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\ .ensure_replacement_of_pipeline(config['pipeline_name']) for material in config['materials']: pipeline.ensure_material( GitMaterial( url=material['url'], branch=material['branch'], material_name=material['material_name'], polling=material['polling'], destination_directory=material['destination_directory'], ignore_patterns=set(material['ignore_patterns']))) for material in config['upstream_pipelines']: pipeline.ensure_material( PipelineMaterial(pipeline_name=material['pipeline_name'], stage_name=material['stage_name'], material_name=material['material_name'])) # What this accomplishes: # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic # the git materials will be carried over from the first pipeline. # # The second stage in this pipeline requires manual approval. # # This allows the overall workflow to remain paused while manual verification is completed and allows the git # materials to stay pinned. # # Once the second phase is approved, the workflow will continue and pipelines downstream will continue to execute # with the same pinned materials from the upstream pipeline. stages.generate_armed_stage(pipeline, constants.INITIAL_VERIFICATION_STAGE_NAME) # For now, you can only trigger builds on a single jenkins server, because you can only # define a single username/token. # And all the jobs that you want to trigger need the same job token defined. # TODO: refactor when required so that each job can define their own user and job tokens pipeline.ensure_unencrypted_secure_environment_variables({ 'JENKINS_USER_TOKEN': config['jenkins_user_token'], 'JENKINS_JOB_TOKEN': config['jenkins_job_token'] }) # Create the stage with the Jenkins jobs jenkins_stage = pipeline.ensure_stage( constants.JENKINS_VERIFICATION_STAGE_NAME) jenkins_stage.set_has_manual_approval() jenkins_user_name = config['jenkins_user_name'] for jenkins in config['jenkins_verifications']: pipeline_job_name = jenkins['pipeline_job_name'] jenkins_url = jenkins['url'] jenkins_job_name = jenkins['job_name'] key, _, param = jenkins['param'].partition(' ') jenkins_param = {key: param} job = jenkins_stage.ensure_job(pipeline_job_name) tasks.generate_package_install(job, 'tubular') tasks.trigger_jenkins_build(job, jenkins_url, jenkins_user_name, jenkins_job_name, jenkins_param) manual_verification_stage = pipeline.ensure_stage( constants.MANUAL_VERIFICATION_STAGE_NAME) manual_verification_stage.set_has_manual_approval() manual_verification_job = manual_verification_stage.ensure_job( constants.MANUAL_VERIFICATION_JOB_NAME) manual_verification_job.add_task( ExecTask([ '/bin/bash', '-c', 'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER' ], ))
def builder(pipeline, config): """ Add database rollback stages to ``pipeline``. """ for material in ( TUBULAR, CONFIGURATION, EDX_PLATFORM, EDX_SECURE, EDGE_SECURE, EDX_MICROSITE, EDX_INTERNAL, EDGE_INTERNAL, ): pipeline.ensure_material(material()) ansible_inventory_location = utils.ArtifactLocation( pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME, constants.LAUNCH_INSTANCE_JOB_NAME, constants.ANSIBLE_INVENTORY_FILENAME) instance_ssh_key_location = utils.ArtifactLocation( pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME, constants.LAUNCH_INSTANCE_JOB_NAME, constants.KEY_PEM_FILENAME) # Specify the upstream deploy pipeline material for this rollback pipeline. # Assumes there's only a single upstream pipeline material for this pipeline. pipeline.ensure_material( PipelineMaterial( pipeline_name=deploy_pipeline.name, stage_name=constants.DEPLOY_AMI_STAGE_NAME, material_name='deploy_pipeline', )) # We need the build_pipeline upstream so that we can fetch the AMI selection artifact from it pipeline.ensure_material( PipelineMaterial( pipeline_name=build_pipeline.name, stage_name=constants.BUILD_AMI_STAGE_NAME, material_name='select_base_ami', )) # Create a a stage for migration rollback. for sub_app in EDXAPP_SUBAPPS: migration_artifact = utils.ArtifactLocation( deploy_pipeline.name, constants.APPLY_MIGRATIONS_STAGE + "_" + sub_app, constants.APPLY_MIGRATIONS_JOB, constants.MIGRATION_OUTPUT_DIR_NAME, is_dir=True) stages.generate_rollback_migrations( pipeline, edp, db_migration_pass=config['db_migration_pass'], inventory_location=ansible_inventory_location, instance_key_location=instance_ssh_key_location, migration_info_location=migration_artifact, application_user=config['db_migration_user'], application_name=config['play_name'], application_path=config['application_path'], sub_application_name=sub_app) return pipeline