merge_back = edxapp.merge_back_branches(
        edxapp_deploy_group,
        constants.BRANCH_CLEANUP_PIPELINE_NAME,
        deploy_artifact,
        prerelease_merge_artifact,
        config,
    )
    merge_back.set_label_template('${{deploy_pipeline_{}}}'.format(prod_edx_md.name))

    merge_back.ensure_material(
        PipelineMaterial(
            pipeline_name=prerelease_materials.name,
            stage_name=constants.PRERELEASE_MATERIALS_STAGE_NAME,
            material_name='prerelease_materials',
        )
    )
    # Specify the upstream deploy pipeline materials for this branch-merging pipeline.
    for deploy_pipeline in (prod_edx_md, prod_edge_md):
        merge_back.ensure_material(
            PipelineMaterial(
                pipeline_name=deploy_pipeline.name,
                stage_name=constants.DEPLOY_AMI_STAGE_NAME,
                material_name='deploy_pipeline_{}'.format(deploy_pipeline.name),
            )
        )


if __name__ == "__main__":
    pipeline_script(install_pipelines, environments=('stage-edx', 'prod-edx', 'prod-edge'))
Exemplo n.º 2
0
    - pipeline_group
    - asgard_api_endpoints
    - asgard_token
    - aws_access_key_id
    - aws_secret_access_key

    To run this script:
    python edxpipelines/pipelines/deploy_ami.py \
        --variable_file ../gocd-pipelines/gocd/vars/tools/deploy_edge_ami.yml \
        --variable_file ../gocd-pipelines/gocd/vars/tools/tools.yml
    python edxpipelines/pipelines/deploy_ami.py \
        --variable_file ../gocd-pipelines/gocd/vars/tools/deploy_edx_ami.yml \
        --variable_file ../gocd-pipelines/gocd/vars/tools/tools.yml
    python edxpipelines/pipelines/deploy_ami.py \
        --variable_file ../gocd-pipelines/gocd/vars/tools/deploy-yonkers-ami.yml \
        --variable_file ../gocd-pipelines/gocd/vars/tools/tools.yml
    """
    pipeline_params = {
        "pipeline_name": config['pipeline_name'],
        "pipeline_group": config['pipeline_group'],
        "asgard_api_endpoints": config['asgard_api_endpoints'],
        "asgard_token": config['asgard_token'],
        "aws_access_key_id": config['aws_access_key_id'],
        "aws_secret_access_key": config['aws_secret_access_key']
    }
    configurator = pipelines.generate_ami_deployment_pipeline(configurator, **pipeline_params)
    print("done")

if __name__ == "__main__":
    pipeline_script(install_pipelines)
Exemplo n.º 3
0
#!/usr/bin/env python
"""
Script for installing pipelines used to deploy the registrar service.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from os import path
import sys

# Used to import edxpipelines files - since the module is not installed.
sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath(__file__)))))

# pylint: disable=wrong-import-position

from edxpipelines.patterns.pipelines import generate_single_deployment_service_pipelines
from edxpipelines.pipelines.script import pipeline_script


def install_pipelines(configurator, config):
    """
    Generates pipelines used to deploy the registrar service to stage, and prod.
    """
    generate_single_deployment_service_pipelines(configurator, config, 'registrar')


if __name__ == '__main__':
    pipeline_script(install_pipelines, environments=('stage-edx', 'prod-edx'))
Exemplo n.º 4
0
    ami_file_location = utils.ArtifactLocation(pipeline.name,
                                               constants.BUILD_AMI_STAGE_NAME,
                                               constants.BUILD_AMI_JOB_NAME,
                                               'ami.yml')
    stages.generate_deploy_ami(
        pipeline,
        config['asgard_api_endpoints'],
        config['asgard_token'],
        config['aws_access_key_id'],
        config['aws_secret_access_key'],
        ami_file_location,
        manual_approval=not config.get('auto_deploy_ami', False))

    #
    # Create the stage to terminate the EC2 instance used to both build the AMI and run DB migrations.
    #
    instance_info_location = utils.ArtifactLocation(
        pipeline.name, constants.LAUNCH_INSTANCE_STAGE_NAME,
        constants.LAUNCH_INSTANCE_JOB_NAME, constants.LAUNCH_INSTANCE_FILENAME)
    stages.generate_terminate_instance(
        pipeline,
        instance_info_location,
        aws_access_key_id=config['aws_access_key_id'],
        aws_secret_access_key=config['aws_secret_access_key'],
        hipchat_token=config['hipchat_token'],
        runif='any')


if __name__ == "__main__":
    pipeline_script(install_pipelines)
Exemplo n.º 5
0
from edxpipelines.pipelines.script import pipeline_script


def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    - aws_access_key_id
    - aws_secret_access_key
    - edx_deployment
    """
    for env_config in config.by_environments():
        pipeline_name = 'Instance-Cleanup-{}'.format(
            env_config['edx_deployment'])
        pipeline = configurator.ensure_pipeline_group('Janitors')\
                               .ensure_replacement_of_pipeline(pipeline_name)\
                               .set_timer('0 0,30 * * * ?')\
                               .set_git_material(materials.TUBULAR())

        stages.generate_cleanup_dangling_instances(
            pipeline,
            env_config['aws_access_key_id'],
            env_config['aws_secret_access_key'],
            name_match_pattern='gocd automation run*',
            max_run_hours=24,
            skip_if_tag='do_not_delete')


if __name__ == "__main__":
    pipeline_script(install_pipelines,
                    environments=('edx', 'edge', 'mckinsey'))
Exemplo n.º 6
0
    """
    Simple pipeline that runs a single task to run an ansible playbook
    against our neo4j box running coursegraph
    """

    pipeline = configurator.ensure_pipeline_group(
        config['pipeline_group']
    ).ensure_replacement_of_pipeline(
        config['pipeline_name']
    ).ensure_material(
        EDX_SECURE()
    ).ensure_material(
        CONFIGURATION()
    )

    stage = pipeline.ensure_stage('build_coursegraph')
    job = stage.ensure_job('run_coursegraph_ansible_job')

    job.add_task(
        ansible_task(
            variables=["edx-secure/ansible/vars/neo4j.yml"],
            playbook='playbooks/neo4j.yml',
            inventory="'{host},'".format(host=config['coursegraph_host']),
            extra_options=["-u", "ubuntu"],
        )
    )


if __name__ == '__main__':
    pipeline_script(update_coursegraph_configuration)
Exemplo n.º 7
0
from edxpipelines import materials
from edxpipelines.patterns import stages
from edxpipelines.pipelines.script import pipeline_script


def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    - aws_access_key_id
    - aws_secret_access_key
    - edx_deployment
    """
    for env_config in config.by_environments():
        pipeline_name = 'Instance-Cleanup-{}'.format(env_config['edx_deployment'])
        pipeline = configurator.ensure_pipeline_group('Janitors')\
                               .ensure_replacement_of_pipeline(pipeline_name)\
                               .set_timer('0 0,30 * * * ?')\
                               .set_git_material(materials.TUBULAR())

        stages.generate_cleanup_dangling_instances(
            pipeline,
            env_config['aws_access_key_id'],
            env_config['aws_secret_access_key'],
            name_match_pattern='gocd automation run*',
            max_run_hours=24,
            skip_if_tag='do_not_delete'
        )

if __name__ == "__main__":
    pipeline_script(install_pipelines, environments=('edx', 'edge', 'mckinsey'))
Exemplo n.º 8
0
# pylint: disable=wrong-import-position

from edxpipelines.patterns.pipelines import generate_single_deployment_service_pipelines
from edxpipelines.pipelines.script import pipeline_script


def install_pipelines(configurator, config):
    """
    Generates pipelines used to deploy the forum (cs_comments_service) service
    to stage, prod-edx, and prod-edge.
    """
    generate_single_deployment_service_pipelines(
        configurator,
        config,
        'forum',
        app_repo='https://github.com/edx-solutions/cs_comments_service.git',
        deployment='mckinsey',
        has_migrations=False,
        pipeline_group="mckinseyforums",
        cd_pipeline_name='stage-mckinsey-forum',
        manual_pipeline_name='prod-mckinsey-forum'
    )


if __name__ == '__main__':
    pipeline_script(
        install_pipelines,
        environments=('stage-mckinsey', 'prod-mckinsey')
    )