Exemplo n.º 1
0
def install_pipeline(save_config_locally, dry_run, variable_files,
                     cmd_line_vars):
    """
    Variables needed for this pipeline:
    materials: A list of dictionaries of the materials used in this pipeline
    upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline = gcc.ensure_pipeline_group(config['pipeline_group'])\
                  .ensure_replacement_of_pipeline(config['pipeline_name'])

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=material['ignore_patterns']))

    for material in config['upstream_pipelines']:
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    # What this accomplishes:
    # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic
    # the git materials will be carried over from the first pipeline.
    #
    # The second stage in this pipeline requires manual approval.
    #
    # This allows the overall workflow to remain paused while manual verification is completed and allows the git
    # materials to stay pinned.
    #
    # Once the second phase is approved, the workflow will continue and pipelines downstream will continue to execute
    # with the same pinned materials from the upstream pipeline.
    stages.generate_armed_stage(pipeline,
                                constants.INITIAL_VERIFICATION_STAGE_NAME)

    manual_verification_stage = pipeline.ensure_stage(
        constants.MANUAL_VERIFICATION_STAGE_NAME)
    manual_verification_stage.set_has_manual_approval()
    manual_verification_job = manual_verification_stage.ensure_job(
        constants.MANUAL_VERIFICATION_JOB_NAME)
    manual_verification_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER'
        ], ))

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)
Exemplo n.º 2
0
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    materials: List of dictionaries of the materials used in this pipeline
    upstream_pipelines: List of dictionaries of the upstream piplines that feed in to the rollback pipeline.
    """
    pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\
                           .ensure_replacement_of_pipeline(config['pipeline_name'])\
                           .ensure_environment_variables({'WAIT_SLEEP_TIME': config['tubular_sleep_wait_time']})

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=utils.as_bool(material['polling']),
                destination_directory=material['destination_directory'],
                ignore_patterns=set(material['ignore_patterns'])
            )
        )

    # Specify the upstream deploy pipeline material for this rollback pipeline.
    # Assumes there's only a single upstream pipeline material for this pipeline.
    rollback_material = config['upstream_pipeline']
    pipeline.ensure_material(
        PipelineMaterial(
            pipeline_name=rollback_material['pipeline_name'],
            stage_name=rollback_material['stage_name'],
            material_name=rollback_material['material_name']
        )
    )

    # Specify the artifact that will be fetched containing the previous deployment information.
    # Assumes there's only a single upstream artifact used by this pipeline.
    artifact_config = config['upstream_deploy_artifact']
    deploy_file_location = utils.ArtifactLocation(
        artifact_config['pipeline_name'],
        artifact_config['stage_name'],
        artifact_config['job_name'],
        artifact_config['artifact_name']
    )

    # Create the armed stage as this pipeline needs to auto-execute
    stages.generate_armed_stage(pipeline, constants.ARMED_JOB_NAME)

    # Create a single stage in the pipeline which will rollback to the previous ASGs/AMI.
    rollback_stage = stages.generate_rollback_asg_stage(
        pipeline,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        config['slack_token'],
        constants.HIPCHAT_ROOM,
        deploy_file_location,
    )
    # Since we only want this stage to rollback via manual approval, ensure that it is set on this stage.
    rollback_stage.set_has_manual_approval()
Exemplo n.º 3
0
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    materials: List of dictionaries of the materials used in this pipeline
    upstream_pipelines: List of dictionaries of the upstream piplines that feed in to the rollback pipeline.
    """
    pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\
                           .ensure_replacement_of_pipeline(config['pipeline_name'])\
                           .ensure_environment_variables({'WAIT_SLEEP_TIME': config['tubular_sleep_wait_time']})

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=set(material['ignore_patterns'])))

    # Specify the upstream deploy pipeline material for this rollback pipeline.
    # Assumes there's only a single upstream pipeline material for this pipeline.
    rollback_material = config['upstream_pipeline']
    pipeline.ensure_material(
        PipelineMaterial(pipeline_name=rollback_material['pipeline_name'],
                         stage_name=rollback_material['stage_name'],
                         material_name=rollback_material['material_name']))

    # Specify the artifact that will be fetched containing the previous deployment information.
    # Assumes there's only a single upstream artifact used by this pipeline.
    artifact_config = config['upstream_deploy_artifact']
    deploy_file_location = utils.ArtifactLocation(
        artifact_config['pipeline_name'], artifact_config['stage_name'],
        artifact_config['job_name'], artifact_config['artifact_name'])

    # Create the armed stage as this pipeline needs to auto-execute
    stages.generate_armed_stage(pipeline, constants.ARMED_JOB_NAME)

    # Create a single stage in the pipeline which will rollback to the previous ASGs/AMI.
    rollback_stage = stages.generate_rollback_asg_stage(
        pipeline,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        config['hipchat_token'],
        constants.HIPCHAT_ROOM,
        deploy_file_location,
    )
    # Since we only want this stage to rollback via manual approval, ensure that it is set on this stage.
    rollback_stage.set_has_manual_approval()
Exemplo n.º 4
0
def manual_verification(edxapp_deploy_group, config):
    """
    Variables needed for this pipeline:
    materials: A list of dictionaries of the materials used in this pipeline
    upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification
    """
    pipeline = edxapp_deploy_group.ensure_replacement_of_pipeline(EDXAPP_MANUAL_PIPELINE_NAME)

    for material in (
            TUBULAR, CONFIGURATION, EDX_PLATFORM, EDX_SECURE, EDGE_SECURE,
            EDX_MICROSITE, EDX_INTERNAL, EDGE_INTERNAL
    ):
        pipeline.ensure_material(material())

    # What this accomplishes:
    # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic
    # the git materials will be carried over from the first pipeline.
    #
    # The second pipeline stage checks the result of the CI testing for the commit to release in the
    # primary code repository.
    # The third stage in this pipeline requires manual approval.
    #
    # This allows the overall workflow to remain paused while manual verification is completed and allows the git
    # materials to stay pinned.
    #
    # Once the third phase is approved, the workflow will continue and pipelines downstream will continue to execute
    # with the same pinned materials from the upstream pipeline.
    stages.generate_armed_stage(pipeline, constants.INITIAL_VERIFICATION_STAGE_NAME)

    # Add all materials for which to check CI tests in this list.
    stages.generate_check_ci(
        pipeline,
        config['github_token'],
        [EDX_PLATFORM()]
    )

    manual_verification_stage = pipeline.ensure_stage(constants.MANUAL_VERIFICATION_STAGE_NAME)
    manual_verification_stage.set_has_manual_approval()
    manual_verification_job = manual_verification_stage.ensure_job(constants.MANUAL_VERIFICATION_JOB_NAME)
    manual_verification_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER'
            ],
        )
    )

    return pipeline
Exemplo n.º 5
0
def manual_verification(edxapp_deploy_group, config):
    """
    Variables needed for this pipeline:
    materials: A list of dictionaries of the materials used in this pipeline
    upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification
    """
    pipeline = edxapp_deploy_group.ensure_replacement_of_pipeline(
        EDXAPP_MANUAL_PIPELINE_NAME)

    for material in (TUBULAR, CONFIGURATION, EDX_PLATFORM, EDX_SECURE,
                     EDGE_SECURE, EDX_MICROSITE, EDX_INTERNAL, EDGE_INTERNAL):
        pipeline.ensure_material(material())

    # What this accomplishes:
    # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic
    # the git materials will be carried over from the first pipeline.
    #
    # The second pipeline stage checks the result of the CI testing for the commit to release in the
    # primary code repository.
    # The third stage in this pipeline requires manual approval.
    #
    # This allows the overall workflow to remain paused while manual verification is completed and allows the git
    # materials to stay pinned.
    #
    # Once the third phase is approved, the workflow will continue and pipelines downstream will continue to execute
    # with the same pinned materials from the upstream pipeline.
    stages.generate_armed_stage(pipeline,
                                constants.INITIAL_VERIFICATION_STAGE_NAME)

    # Add all materials for which to check CI tests in this list.
    stages.generate_check_ci(pipeline, config['github_token'],
                             [EDX_PLATFORM()])

    manual_verification_stage = pipeline.ensure_stage(
        constants.MANUAL_VERIFICATION_STAGE_NAME)
    manual_verification_stage.set_has_manual_approval()
    manual_verification_job = manual_verification_stage.ensure_job(
        constants.MANUAL_VERIFICATION_JOB_NAME)
    manual_verification_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER'
        ], ))

    return pipeline
Exemplo n.º 6
0
def install_pipelines(save_config_locally, dry_run, variable_files,
                      cmd_line_vars):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id

    Optional variables:
    - configuration_secure_version
    """
    config = utils.merge_files_and_dicts(variable_files, list(cmd_line_vars, ))

    gcc = GoCdConfigurator(
        HostRestClient(config['gocd_url'],
                       config['gocd_username'],
                       config['gocd_password'],
                       ssl=True))
    pipeline = gcc.ensure_pipeline_group(config['pipeline_group'])\
                  .ensure_replacement_of_pipeline(config['pipeline_name'])

    # Example materials yaml
    # materials:
    #   - url: "https://github.com/edx/tubular"
    #     branch: "master"
    #     material_name: "tubular"
    #     polling: "True"
    #     destination_directory: "tubular"
    #     ignore_patterns:
    #     - '**/*'

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=material['ignore_patterns']))

    # If no upstream pipelines exist, don't install them!
    for material in config.get('upstream_pipelines', []):
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    stages.generate_armed_stage(pipeline,
                                constants.PRERELEASE_MATERIALS_STAGE_NAME)

    gcc.save_updated_config(save_config_locally=save_config_locally,
                            dry_run=dry_run)
Exemplo n.º 7
0
def generate_service_deployment_pipelines(
        pipeline_group,
        config,
        app_material,
        continuous_deployment_edps=(),
        manual_deployment_edps=(),
        configuration_branch=None,
        configuration_internal_branch=None,
        has_migrations=True,
        cd_pipeline_name=None,
        manual_pipeline_name=None,
        application_user=None,
        run_e2e_tests_after_deploy=False,
        additional_migrations=None,
        management_commands=None,
        playbook_path_tpl=constants.PLAYBOOK_PATH_TPL,
        extra_tags=None,
        extra_app_material=None,
        manual_approval_stage=False,
        manual_approval_prod=True,
        **kwargs
):
    """
    Generates pipelines used to build and deploy a service to multiple environments/deployments.

    Two pipelines are produced, one for continuous deployment, and one for deployment that
    needs manual approval. Both are placed in a group with the same name as the play.


    Args:
        pipeline_group (gomatic.PipelineGroup): The group to create new pipelines in
        config (dict): Environment-independent config.
        app_material (gomatic.gomatic.gocd.materials.GitMaterial): Material representing
            the source of the app to be deployed.
        continuous_deployment_edps (list of EDP): A list of EDPs that should be deployed
            to after every change in app_material.
        manual_deployment_edps (list of EDP): A list of EDPs that should only be deployed
            to after waiting for manual approval.
        configuration_branch (str): The branch of the edx/configuration repos to
            use when building AMIs and running plays. Defaults to master.
        configuration_internal_branch (str): The branch of the edx-internal repo to
            use when building AMIs and running plays. Defaults to master.
        has_migrations (bool): Whether to generate Gomatic for applying and
            rolling back migrations.
        cd_pipeline_name (str): The name of the continuous-deployment pipeline.
            Defaults to constants.ENVIRONMENT_PIPELINE_NAME_TPL
        manual_pipeline_name (str): The name of the manual deployment pipeline.
            Defaults to constants.ENVIRONMENT_PIPELINE_NAME_TPL
        application_user (str): Name of the user application user if different from the play name.
        run_e2e_tests_after_deploy (bool): Indicates if end-to-end tests should be triggered after
            deploying a continuous deployment EDP.
        additional_migrations (list[edxpipelines.utils.MigrationAppInfo]): Additional applications to migrate.
            Will only run if has_migrations=True
        management_commands (list): management commands to run, will only run if has_management_command=True
        playbook_path_tpl (str): Path to the playbook to run. Defaults to constants.PLAYBOOK_PATH_TPL
        extra_tags (json): json defining tags that will be added to the AMI
        extra_app_material (material): additional materials to be added to the pipeline
        manual_approval_stage (bool): Set manual approval for the top continuous delivery
            pipeline (added to support MCKA)
        manual_approval_prod (bool): Set manual approval for the prod delivery pipeline
            (added to support Prospectus daily deploys)
        **kwargs: extra variables to be passed to ansible
    """
    if not additional_migrations:
        additional_migrations = []

    continuous_deployment_edps = tuple(continuous_deployment_edps)
    manual_deployment_edps = tuple(manual_deployment_edps)

    all_edps = continuous_deployment_edps + manual_deployment_edps

    plays = {edp.play for edp in all_edps}
    if not plays:
        raise ValueError("generate_service_deployment_pipelines needs at least one EDP to deploy")
    if len(plays) > 1:
        raise ValueError(
            "generate_service_deployment_pipelines expects to only deploy "
            "a single service, but was passed multiple plays: {}".format(plays)
        )

    play = plays.pop()

    if cd_pipeline_name is None:
        cd_envs = {edp.environment for edp in continuous_deployment_edps}
        if len(cd_envs) > 1:
            raise ValueError(
                "Only one environment is allowed in continuous_deployment_edps "
                "if no cd_pipeline_name is specified"
            )
        cd_pipeline_name = constants.ENVIRONMENT_PIPELINE_NAME_TPL(environment=cd_envs.pop(), play=play)

    # Frame out the continuous deployment pipeline
    cd_pipeline = pipeline_group.ensure_replacement_of_pipeline(cd_pipeline_name)
    cd_pipeline.set_label_template(constants.DEPLOYMENT_PIPELINE_LABEL_TPL(app_material))
    build_stage = cd_pipeline.ensure_stage(constants.BUILD_AMI_STAGE_NAME)
    if manual_approval_stage:
        build_stage.set_has_manual_approval()
    cd_deploy_stages = _generate_deployment_stages(cd_pipeline, has_migrations, run_e2e_tests_after_deploy)

    # Frame out the manual deployment pipeline (and wire it to the continuous deployment pipeline)
    if manual_deployment_edps:
        if manual_pipeline_name is None:
            manual_envs = {edp.environment for edp in manual_deployment_edps}
            if len(manual_envs) > 1:
                raise ValueError(
                    "Only one environment is allowed in manual_deployment_edps "
                    "if no manual_pipeline_name is specified"
                )
            manual_pipeline_name = constants.ENVIRONMENT_PIPELINE_NAME_TPL(environment=manual_envs.pop(), play=play)

        manual_pipeline = pipeline_group.ensure_replacement_of_pipeline(manual_pipeline_name)
        # The manual pipeline only requires successful completion of the continuous deployment
        # pipeline's AMI build stage, from which it will retrieve an AMI artifact.
        manual_pipeline.ensure_material(
            PipelineMaterial(
                cd_pipeline.name,
                constants.BUILD_AMI_STAGE_NAME,
                material_name=cd_pipeline.name
            )
        )

        # Pipelines return their label when referenced by name. We share the
        # label set for the CD pipeline with the manual pipeline.
        manual_pipeline.set_label_template('${{{}}}'.format(cd_pipeline.name))

        # The manual pipeline's first stage is a no-op 'armed stage' that will
        # be followed by a deploy stage requiring manual approval.
        stages.generate_armed_stage(manual_pipeline, constants.ARMED_STAGE_NAME)

        manual_deploy_stages = _generate_deployment_stages(manual_pipeline, has_migrations)
        if manual_approval_prod:
            manual_deploy_stages.deploy.set_has_manual_approval()

    else:
        manual_pipeline = None
        manual_deploy_stages = None

    # Set up the configuration/secure/internal materials
    configuration_material = materials.CONFIGURATION(branch=configuration_branch)
    secure_materials = {
        edp: materials.deployment_secure(edp.deployment)
        for edp in all_edps
    }
    internal_materials = {
        edp: materials.deployment_internal(edp.deployment, branch=configuration_internal_branch)
        for edp in all_edps
    }

    # Ensure the materials that are common across environments
    for material in [
            materials.TUBULAR(),
            configuration_material,
            materials.EDX_ANSIBLE_PRIVATE(),
            materials.MCKA_ANSIBLE_PRIVATE(),
    ] + list(secure_materials.values()) + list(internal_materials.values()):
        cd_pipeline.ensure_material(material)
        if manual_pipeline:
            manual_pipeline.ensure_material(material)

    for material in [
            app_material,
            extra_app_material,
    ]:
        if material:
            cd_pipeline.ensure_material(material)

    # Add jobs to build all required AMIs
    for edp in all_edps:
        app_version_var = material_envvar_bash(app_material)
        overrides = {
            'app_version': app_version_var,
            '{}_VERSION'.format(play.upper()): app_version_var,
        }
        overrides.update(kwargs)

        secure_material = secure_materials[edp]
        internal_material = internal_materials[edp]
        version_tags = {
            edp.play: (app_material.url, app_version_var),
            'configuration': (configuration_material.url, material_envvar_bash(configuration_material)),
            'configuration_secure': (secure_material.url, material_envvar_bash(secure_material)),
            'configuration_internal': (internal_material.url, material_envvar_bash(internal_material)),
        }
        if extra_tags:
            version_tags.update(extra_tags)

        jobs.generate_build_ami(
            build_stage,
            edp,
            app_material.url,
            secure_material,
            internal_material,
            playbook_path_tpl(edp.play),
            config[edp],
            version_tags=version_tags,
            **overrides
        )

    # Create e2e_tests job
    if run_e2e_tests_after_deploy:
        for edp in continuous_deployment_edps:
            stage = cd_deploy_stages.e2e_tests
            config_edp = config[edp]

            # This conditional allow us to opt-in to running the Jenkins job for each EDP.
            if config_edp.get('jenkins_job_token') and config_edp.get('jenkins_job_name'):
                jobs.generate_run_jenkins_job(stage, config_edp)

    # Add jobs for deploying all required AMIs
    for (pipeline, deploy_stages, edps) in (
            (cd_pipeline, cd_deploy_stages, continuous_deployment_edps),
            (manual_pipeline, manual_deploy_stages, manual_deployment_edps),
    ):
        for edp in edps:
            ami_artifact_location = ArtifactLocation(
                cd_pipeline.name,
                constants.BUILD_AMI_STAGE_NAME,
                constants.BUILD_AMI_JOB_NAME_TPL(edp),
                constants.BUILD_AMI_FILENAME
            )
            jobs.generate_deploy_ami(
                deploy_stages.deploy,
                ami_artifact_location,
                edp,
                config[edp],
                has_migrations=has_migrations,
                application_user=application_user,
                additional_migrations=additional_migrations,
                management_commands=management_commands
            )

            deployment_artifact_location = ArtifactLocation(
                pipeline.name,
                constants.DEPLOY_AMI_STAGE_NAME,
                constants.DEPLOY_AMI_JOB_NAME_TPL(edp),
                constants.DEPLOY_AMI_OUT_FILENAME
            )

            jobs.generate_rollback_asgs(
                deploy_stages.rollback_asgs,
                edp,
                deployment_artifact_location,
                config[edp],
            )

            if has_migrations:

                migration_info_location = ArtifactLocation(
                    pipeline.name,
                    constants.DEPLOY_AMI_STAGE_NAME,
                    constants.DEPLOY_AMI_JOB_NAME_TPL(edp),
                    constants.MIGRATION_OUTPUT_DIR_NAME_WITH_APP(application_user if application_user else edp.play),
                    is_dir=True
                )

                jobs.generate_rollback_migrations(
                    deploy_stages.rollback_migrations,
                    edp,
                    application_user if application_user else edp.play,
                    edp.play,
                    '/edx/app/{}'.format(edp.play),
                    constants.DB_MIGRATION_USER,
                    config[edp]['db_migration_pass'],
                    migration_info_location,
                    ami_artifact_location=ami_artifact_location,
                    config=config[edp],
                )
Exemplo n.º 8
0
def armed_stage_builder(pipeline, config):  # pylint: disable=unused-argument
    """
    Add an armed stage to pipeline.
    """
    stages.generate_armed_stage(pipeline, constants.ARM_PRERELEASE_STAGE)
    return pipeline
Exemplo n.º 9
0
def rollback_asgs(
        edxapp_deploy_group,
        pipeline_name,
        config,
        ami_pairs,
        stage_deploy_pipeline_artifact,
        base_ami_artifact,
        head_ami_artifact,
        deploy_artifact,
):
    """
    Arguments:
        edxapp_deploy_group (gomatic.PipelineGroup): The group in which to create this pipeline
        pipeline_name (str): The name of this pipeline
        config (dict): the configuraiton dictionary
        ami_pairs (list<tuple>): A list of tuples. The first item in the tuple should be Artifact location of the
            base_ami ID that was running before deployment and the ArtifactLocation of the newly deployed AMI ID
            e.g. (ArtifactLocation
                    (pipeline='prerelease_edxapp_materials_latest',
                     stage='select_base_ami', job='select_base_ami_prod_edx_job',
                     file_name='ami_override.yml',
                     is_dir=False
                    ),
                  ArtifactLocation
                    (pipeline='PROD_edx_edxapp_B',
                     stage='build_ami',
                     job='build_ami_job',
                     file_name='ami.yml',
                     is_dir=False
                    )
                 )
        stage_deploy_pipeline_artifact (edxpipelines.utils.ArtifactLocation): The edxapp staging deployment
            pipeline artifact
        base_ami_artifact (edxpipelines.utils.ArtifactLocation): ArtifactLocation of the base AMI selection
        head_ami_artifact (edxpipelines.utils.ArtifactLocation): ArtifactLocation of the head AMI selection
        deploy_artifact (edxpipelines.utils.ArtifactLocation): ArtifactLocation of the last deployment

    Configuration Required:
        tubular_sleep_wait_time
        asgard_api_endpoints
        asgard_token
        aws_access_key_id
        aws_secret_access_key
        slack_token
    """
    pipeline = edxapp_deploy_group.ensure_replacement_of_pipeline(pipeline_name)\
                                  .ensure_environment_variables({'WAIT_SLEEP_TIME': config['tubular_sleep_wait_time']})

    for material in (
            TUBULAR, CONFIGURATION, EDX_PLATFORM, EDX_SECURE, EDGE_SECURE,
            EDX_MICROSITE, EDX_INTERNAL, EDGE_INTERNAL,
    ):
        pipeline.ensure_material(material())

    # Create the armed stage as this pipeline needs to auto-execute
    stages.generate_armed_stage(pipeline, constants.ARMED_JOB_NAME)

    # Create a single stage in the pipeline which will rollback to the previous ASGs/AMI.
    rollback_stage = stages.generate_rollback_asg_stage(
        pipeline,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        config['slack_token'],
        constants.SLACK_ROOM,
        deploy_artifact,
    )
    # Since we only want this stage to rollback via manual approval, ensure that it is set on this stage.
    rollback_stage.set_has_manual_approval()

    # Message PRs being rolled back
    pipeline.ensure_unencrypted_secure_environment_variables({'GITHUB_TOKEN': config['github_token']})
    stages.generate_deployment_messages(
        pipeline=pipeline,
        ami_pairs=ami_pairs,
        stage_deploy_pipeline_artifact=stage_deploy_pipeline_artifact,
        base_ami_artifact=base_ami_artifact,
        head_ami_artifact=head_ami_artifact,
        message_tags=generate_message_tags(config),
        release_status=constants.ReleaseStatus.ROLLED_BACK,
        confluence_user=config['jira_user'],
        confluence_password=config['jira_password'],
        github_token=config['github_token'],
    )

    return pipeline
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    materials: A list of dictionaries of the materials used in this pipeline
    upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification
    """
    pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\
                           .ensure_replacement_of_pipeline(config['pipeline_name'])

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=set(material['ignore_patterns'])))

    for material in config['upstream_pipelines']:
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    # What this accomplishes:
    # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic
    # the git materials will be carried over from the first pipeline.
    #
    # The second stage in this pipeline requires manual approval.
    #
    # This allows the overall workflow to remain paused while manual verification is completed and allows the git
    # materials to stay pinned.
    #
    # Once the second phase is approved, the workflow will continue and pipelines downstream will continue to execute
    # with the same pinned materials from the upstream pipeline.
    stages.generate_armed_stage(pipeline,
                                constants.INITIAL_VERIFICATION_STAGE_NAME)

    # For now, you can only trigger builds on a single jenkins server, because you can only
    # define a single username/token.
    # And all the jobs that you want to trigger need the same job token defined.
    # TODO: refactor when required so that each job can define their own user and job tokens
    pipeline.ensure_unencrypted_secure_environment_variables({
        'JENKINS_USER_TOKEN':
        config['jenkins_user_token'],
        'JENKINS_JOB_TOKEN':
        config['jenkins_job_token']
    })

    # Create the stage with the Jenkins jobs
    jenkins_stage = pipeline.ensure_stage(
        constants.JENKINS_VERIFICATION_STAGE_NAME)
    jenkins_stage.set_has_manual_approval()
    jenkins_user_name = config['jenkins_user_name']

    for jenkins in config['jenkins_verifications']:
        pipeline_job_name = jenkins['pipeline_job_name']
        jenkins_url = jenkins['url']
        jenkins_job_name = jenkins['job_name']
        key, _, param = jenkins['param'].partition(' ')
        jenkins_param = {key: param}

        job = jenkins_stage.ensure_job(pipeline_job_name)
        tasks.generate_package_install(job, 'tubular')
        tasks.trigger_jenkins_build(job, jenkins_url, jenkins_user_name,
                                    jenkins_job_name, jenkins_param)

    manual_verification_stage = pipeline.ensure_stage(
        constants.MANUAL_VERIFICATION_STAGE_NAME)
    manual_verification_stage.set_has_manual_approval()
    manual_verification_job = manual_verification_stage.ensure_job(
        constants.MANUAL_VERIFICATION_JOB_NAME)
    manual_verification_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER'
        ], ))
Exemplo n.º 11
0
def armed_stage_builder(pipeline, config):  # pylint: disable=unused-argument
    """
    Add an armed stage to pipeline.
    """
    stages.generate_armed_stage(pipeline, constants.ARM_PRERELEASE_STAGE)
    return pipeline
Exemplo n.º 12
0
def rollback_asgs(
    edxapp_deploy_group,
    pipeline_name,
    deploy_pipeline,
    config,
    ami_pairs,
    stage_deploy_pipeline,
    base_ami_artifact,
    head_ami_artifact,
):
    """
    Arguments:
        edxapp_deploy_group (gomatic.PipelineGroup): The group in which to create this pipeline
        pipeline_name (str): The name of this pipeline
        deploy_pipeline (gomatic.Pipeline): The pipeline to retrieve the ami_deploy_info.yml artifact from
        config (dict): the configuraiton dictionary
        ami_pairs (list<tuple>): A list of tuples. The first item in the tuple should be Artifact location of the
            base_ami ID that was running before deployment and the ArtifactLocation of the newly deployed AMI ID
            e.g. (ArtifactLocation
                    (pipeline='prerelease_edxapp_materials_latest',
                     stage='select_base_ami', job='select_base_ami_prod_edx_job',
                     file_name='ami_override.yml',
                     is_dir=False
                    ),
                  ArtifactLocation
                    (pipeline='PROD_edx_edxapp_B',
                     stage='build_ami',
                     job='build_ami_job',
                     file_name='ami.yml',
                     is_dir=False
                    )
                 )
        stage_deploy_pipeline (gomatic.Pipeline): The edxapp staging deployment pipeline
        base_ami_artifact (edxpipelines.utils.ArtifactLocation): ArtifactLocation of the base AMI selection
        head_ami_artifact (edxpipelines.utils.ArtifactLocation): ArtifactLocation of the head AMI selection

    Configuration Required:
        tubular_sleep_wait_time
        asgard_api_endpoints
        asgard_token
        aws_access_key_id
        aws_secret_access_key
        hipchat_token
    """
    pipeline = edxapp_deploy_group.ensure_replacement_of_pipeline(pipeline_name)\
                                  .ensure_environment_variables({'WAIT_SLEEP_TIME': config['tubular_sleep_wait_time']})

    for material in (
            TUBULAR,
            CONFIGURATION,
            EDX_PLATFORM,
            EDX_SECURE,
            EDGE_SECURE,
            EDX_MICROSITE,
            EDX_INTERNAL,
            EDGE_INTERNAL,
    ):
        pipeline.ensure_material(material())

    # Specify the artifact that will be fetched containing the previous deployment information.
    deploy_file_location = utils.ArtifactLocation(
        deploy_pipeline.name,
        constants.DEPLOY_AMI_STAGE_NAME,
        constants.DEPLOY_AMI_JOB_NAME,
        constants.DEPLOY_AMI_OUT_FILENAME,
    )

    # Create the armed stage as this pipeline needs to auto-execute
    stages.generate_armed_stage(pipeline, constants.ARMED_JOB_NAME)

    # Create a single stage in the pipeline which will rollback to the previous ASGs/AMI.
    rollback_stage = stages.generate_rollback_asg_stage(
        pipeline,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        config['hipchat_token'],
        constants.HIPCHAT_ROOM,
        deploy_file_location,
    )
    # Since we only want this stage to rollback via manual approval, ensure that it is set on this stage.
    rollback_stage.set_has_manual_approval()

    # Message PRs being rolled back
    pipeline.ensure_unencrypted_secure_environment_variables(
        {'GITHUB_TOKEN': config['github_token']})
    stages.generate_deployment_messages(
        pipeline=pipeline,
        ami_pairs=ami_pairs,
        stage_deploy_pipeline=stage_deploy_pipeline,
        base_ami_artifact=base_ami_artifact,
        head_ami_artifact=head_ami_artifact,
        message_tags=[('edx', 'edx-platform', 'edxapp-from-pipeline'),
                      ('edx', 'edx-platform-private', 'edx_platform')],
        release_status=constants.ReleaseStatus.ROLLED_BACK,
        confluence_user=config['jira_user'],
        confluence_password=config['jira_password'],
        github_token=config['github_token'],
    )

    return pipeline