Beispiel #1
0
    def __enter__(self):
        try:
            start_go_server(self.gocd_version,
                            self.gocd_download_version_string)

            configurator = GoCdConfigurator(HostRestClient('localhost:8153'))
            pipeline = configurator \
                .ensure_pipeline_group("P.Group") \
                .ensure_replacement_of_pipeline("more-options") \
                .set_timer("0 15 22 * * ?") \
                .set_git_material(GitMaterial("https://github.com/SpringerSBM/gomatic.git", material_name="some-material-name", polling=False)) \
                .ensure_environment_variables({'JAVA_HOME': '/opt/java/jdk-1.7'}) \
                .ensure_parameters({'environment': 'qa'})
            stage = pipeline.ensure_stage("earlyStage")
            job = stage.ensure_job("earlyWorm").ensure_artifacts({
                Artifact.get_build_artifact("scripts/*", "files"),
                Artifact.get_build_artifact("target/universal/myapp*.zip",
                                            "artifacts"),
                Artifact.get_test_artifact("from", "to")
            }).set_runs_on_all_agents()
            job.add_task(ExecTask(['ls']))

            configurator.save_updated_config(save_config_locally=True)
            return GoCdConfigurator(HostRestClient('localhost:8153'))
        except:
            # Swallow exception if __exit__ returns a True value
            if self.__exit__(*sys.exc_info()):
                pass
            else:
                raise
Beispiel #2
0
    def ignore_test_can_save_pipeline_with_package_ref(self):
        gocd_version, gocd_download_version_string = self.gocd_versions[-1]
        print('test_can_save_pipeline_with_package_ref', "*" * 60,
              gocd_version)
        with populated_go_server(gocd_version,
                                 gocd_download_version_string) as configurator:
            pipeline = configurator \
                    .ensure_pipeline_group("Test") \
                    .ensure_replacement_of_pipeline("new-package")

            repo = configurator.ensure_repository("repo_one")
            repo.ensure_type('yum', '1')
            repo.ensure_property('REPO_URL', 'test/repo')
            package = repo.ensure_package('xxx')
            package.ensure_property('PACKAGE_SPEC', 'spec.*')

            pipeline.set_package_material(PackageMaterial(package.id))
            job = pipeline.ensure_stage("build").ensure_job("build")
            job.ensure_task(ExecTask(["ls"]))

            configurator.save_updated_config(save_config_locally=True,
                                             dry_run=False)
            self.assertEquals(
                1,
                len(
                    configurator.ensure_pipeline_group('Test').find_pipeline(
                        'new-package').materials))
            self.assertEquals(
                package.id,
                configurator.ensure_pipeline_group('Test').find_pipeline(
                    'new-package').package_material.ref)
Beispiel #3
0
 def test_all_versions(self):
     for gocd_version, gocd_download_version_string in self.gocd_versions:
         print 'test_all_versions', "*" * 60, gocd_version
         with populated_go_server(
                 gocd_version,
                 gocd_download_version_string) as configurator:
             self.assertEquals(
                 ["P.Group"],
                 [p.name for p in configurator.pipeline_groups])
             self.assertEquals(["more-options"], [
                 p.name for p in configurator.pipeline_groups[0].pipelines
             ])
             pipeline = configurator.pipeline_groups[0].pipelines[0]
             self.assertEquals("0 15 22 * * ?", pipeline.timer)
             self.assertEquals(
                 GitMaterial("https://github.com/SpringerSBM/gomatic.git",
                             material_name="some-material-name",
                             polling=False), pipeline.git_material)
             self.assertEquals({'JAVA_HOME': '/opt/java/jdk-1.7'},
                               pipeline.environment_variables)
             self.assertEquals({'environment': 'qa'}, pipeline.parameters)
             self.assertEquals(['earlyStage'],
                               [s.name for s in pipeline.stages])
             self.assertEquals(['earlyWorm'],
                               [j.name for j in pipeline.stages[0].jobs])
             job = pipeline.stages[0].jobs[0]
             self.assertEquals(
                 {
                     Artifact.get_build_artifact("scripts/*", "files"),
                     Artifact.get_build_artifact(
                         "target/universal/myapp*.zip", "artifacts"),
                     Artifact.get_test_artifact("from", "to")
                 }, job.artifacts)
             self.assertEquals(True, job.runs_on_all_agents)
             self.assertEquals([ExecTask(['ls'])], job.tasks)
Beispiel #4
0
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    - gocd_username
    - gocd_password
    - gocd_url
    - configuration_secure_repo
    - hipchat_token
    - github_private_key
    - aws_access_key_id
    - aws_secret_access_key
    - ec2_vpc_subnet_id
    - ec2_security_group_id
    - ec2_instance_profile_name
    - base_ami_id
    """
    pipeline = configurator.ensure_pipeline_group(
        config['pipeline_group']
    ).ensure_replacement_of_pipeline(
        config['pipeline_name']
    ).ensure_material(
        GitMaterial(
            'https://github.com/edx/edx-gomatic',
            polling=True,
            material_name='edx-gomatic',
            destination_directory='edx-gomatic',
        )
    ).ensure_material(
        GitMaterial(
            '[email protected]:edx-ops/gomatic-secure.git',
            polling=True,
            destination_directory='gomatic-secure',
            ignore_patterns=constants.MATERIAL_IGNORE_ALL_REGEX
        )
    ).set_label_template('${edx-gomatic[:7]}')

    pipeline.ensure_encrypted_environment_variables(
        {
            'GOMATIC_USER': config['gomatic_user'],
            'GOMATIC_PASSWORD': config['gomatic_password']
        }
    )

    stage = pipeline.ensure_stage('deploy_gomatic_stage')
    job = stage.ensure_job('deploy_gomatic_scripts_job')
    tasks.generate_requirements_install(job, 'edx-gomatic')

    job.add_task(
        ExecTask(
            [
                '/usr/bin/python',
                './deploy_pipelines.py',
                '-v',
                'tools',
                '-f',
                'config.yml'
            ],
            working_dir='edx-gomatic'
        )
    )
Beispiel #5
0
    def ignore_test_can_save_multiple_times_using_same_configurator(self):
        gocd_version, gocd_download_version_string = self.gocd_versions[-1]
        print('test_can_save_multiple_times_using_same_configurator', "*" * 60,
              gocd_version)
        with populated_go_server(gocd_version,
                                 gocd_download_version_string) as configurator:
            pipeline = configurator \
                    .ensure_pipeline_group("Test") \
                    .ensure_replacement_of_pipeline("new-one")
            pipeline.set_git_material(
                GitMaterial("https://github.com/SpringerSBM/gomatic.git",
                            polling=False))
            job = pipeline.ensure_stage("build").ensure_job("build")
            job.ensure_task(ExecTask(["ls"]))

            configurator.save_updated_config(save_config_locally=True,
                                             dry_run=False)

            pipeline = configurator \
                    .ensure_pipeline_group("Test") \
                    .ensure_replacement_of_pipeline("new-two")
            pipeline.set_git_material(
                GitMaterial("https://github.com/SpringerSBM/gomatic.git",
                            polling=False))
            job = pipeline.ensure_stage("build").ensure_job("build")
            job.ensure_task(ExecTask(["ls"]))

            configurator.save_updated_config(save_config_locally=True,
                                             dry_run=False)

            self.assertEquals(
                1,
                len(
                    configurator.ensure_pipeline_group('Test').find_pipeline(
                        'new-one').stages))
            self.assertEquals(
                1,
                len(
                    configurator.ensure_pipeline_group('Test').find_pipeline(
                        'new-two').stages))
Beispiel #6
0
def manual_verification(edxapp_deploy_group, config):
    """
    Variables needed for this pipeline:
    materials: A list of dictionaries of the materials used in this pipeline
    upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification
    """
    pipeline = edxapp_deploy_group.ensure_replacement_of_pipeline(
        EDXAPP_MANUAL_PIPELINE_NAME)

    for material in (TUBULAR, CONFIGURATION, EDX_PLATFORM, EDX_SECURE,
                     EDGE_SECURE, EDX_MICROSITE, EDX_INTERNAL, EDGE_INTERNAL):
        pipeline.ensure_material(material())

    # What this accomplishes:
    # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic
    # the git materials will be carried over from the first pipeline.
    #
    # The second pipeline stage checks the result of the CI testing for the commit to release in the
    # primary code repository.
    # The third stage in this pipeline requires manual approval.
    #
    # This allows the overall workflow to remain paused while manual verification is completed and allows the git
    # materials to stay pinned.
    #
    # Once the third phase is approved, the workflow will continue and pipelines downstream will continue to execute
    # with the same pinned materials from the upstream pipeline.
    stages.generate_armed_stage(pipeline,
                                constants.INITIAL_VERIFICATION_STAGE_NAME)

    # Add all materials for which to check CI tests in this list.
    stages.generate_check_ci(pipeline, config['github_token'],
                             [EDX_PLATFORM()])

    manual_verification_stage = pipeline.ensure_stage(
        constants.MANUAL_VERIFICATION_STAGE_NAME)
    manual_verification_stage.set_has_manual_approval()
    manual_verification_job = manual_verification_stage.ensure_job(
        constants.MANUAL_VERIFICATION_JOB_NAME)
    manual_verification_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER'
        ], ))

    return pipeline
Beispiel #7
0
    def test_can_save_and_read_repositories(self):
        gocd_version, gocd_download_version_string = self.gocd_versions[-1]
        print('test_can_save_and_read_repositories', "*" * 60, gocd_version)
        with populated_go_server(gocd_version, gocd_download_version_string) as configurator:
            repo = configurator.ensure_repository("repo_one")
            repo.ensure_type('yum', '1')
            repo.ensure_property('REPO_URL', 'test/repo')
            repo.ensure_property('REPO_URL', 'test/repo')
            package = repo.ensure_package('xxx')
            package.ensure_property('PACKAGE_SPEC' , 'spec.*')

            pipeline = configurator.ensure_pipeline_group('repo-pipeline').ensure_pipeline('pone')
            pipeline.set_package_ref(package.id)
            job = pipeline.ensure_stage("build").ensure_job("build")
            job.ensure_task(ExecTask(["ls"]))

            configurator.save_updated_config(save_config_locally=True, dry_run=False)

            self.assertIsNotNone(configurator.ensure_repository("repo_one"))
Beispiel #8
0
def install_pipelines(configurator, config):
    """
    Install the pipelines that can deploy the edX api-gateway.
    """
    pipeline = configurator \
        .ensure_pipeline_group(config['pipeline']['group']) \
        .ensure_replacement_of_pipeline(config['pipeline']['name']) \
        .set_label_template('${build}') \
        .set_automatic_pipeline_locking()

    # Allow for a multi-stage deployment
    if 'previous_deployment' in config['upstream_pipelines']:
        build_material = {
            'pipeline': config['upstream_pipelines']['previous_deployment'],
            'stage': 'forward_build',
            'jobs': {
                'swagger': 'forward_build',
                'source': 'forward_build'
            }
        }

    elif 'build' in config['upstream_pipelines']:
        build_material = {
            'pipeline': config['upstream_pipelines']['build'],
            'stage': api_build.BUILD_STAGE_NAME,
            'jobs': {
                'swagger': 'swagger-flatten',
                'source': 'package-source'
            }
        }

    else:
        build_material = {
            'pipeline': config['pipeline']['build'],
            'stage': api_build.BUILD_STAGE_NAME,
            'jobs': {
                'swagger': 'swagger-flatten',
                'source': 'package-source'
            }
        }

    pipeline.ensure_material(PipelineMaterial(build_material['pipeline'], build_material['stage'], 'build'))

    pipeline.ensure_environment_variables(
        {
            'ROOT_REDIRECT': config['root_redirect'],
            'API_BASE': config['api_base'],
            'LOG_LEVEL': config['aws']['log_level'],
            'RATE_LIMIT': config['aws']['rate_limit'],
            'METRICS': config['aws']['metrics'],
            'ROTATION_ORDER': ' '.join(config['rotation_order']),
            'BURST_LIMIT': config['aws']['burst_limit'],
            'EDXAPP_HOST': config['upstream_origins']['edxapp'],
            'CATALOG_HOST': config['upstream_origins']['catalog'],
            'WAIT_SLEEP_TIME': config.get('tubular_sleep_wait_time', constants.TUBULAR_SLEEP_WAIT_TIME),
        }
    )

    pipeline.ensure_encrypted_environment_variables({
        'AWS_ACCESS_KEY_ID': config['aws']['access_key_id'],
        'AWS_SECRET_ACCESS_KEY': config['aws']['secret_access_key']
    })

    # Setup the Upload stage
    upload_stage = pipeline.ensure_stage('upload').set_clean_working_dir()

    upload_gateway_job = upload_stage.ensure_job('upload_gateway')
    upload_gateway_job.ensure_tab(Tab('output.txt', 'output.txt'))
    upload_gateway_job.ensure_tab(Tab('next_stage.txt', 'next_stage.txt'))

    upload_gateway_job.ensure_artifacts({BuildArtifact('next_stage.txt')})

    swagger_flatten_artifact_params = {
        'pipeline': build_material['pipeline'],
        'stage': build_material['stage'],
        'job': build_material['jobs']['swagger'],
        'src': FetchArtifactFile('swagger.json')
    }
    upload_gateway_job.add_task(FetchArtifactTask(**swagger_flatten_artifact_params))

    api_manager_artifact_params = {
        'pipeline': build_material['pipeline'],
        'stage': build_material['stage'],
        'job': build_material['jobs']['source'],
        'src': FetchArtifactDir('api-manager')
    }
    upload_gateway_job.add_task(FetchArtifactTask(**api_manager_artifact_params))

    upload_gateway_job.add_task(
        ExecTask(
            [
                '/bin/bash', '-c',
                'PYTHONPATH=python-libs python scripts/aws/deploy.py --api-base-domain ${API_BASE} '
                '--swagger-filename ../swagger.json --tag ${GO_PIPELINE_LABEL} --rotation-order '
                '${ROTATION_ORDER} --log-level ${LOG_LEVEL} --metrics ${METRICS} --rate-limit ${RATE_LIMIT} '
                '--burst-limit ${BURST_LIMIT} --edxapp-host ${EDXAPP_HOST} --catalog-host ${CATALOG_HOST} '
                '--landing-page ${ROOT_REDIRECT} > ../next_stage.txt'
            ],
            working_dir='api-manager'
        )
    )

    # Setup the test stage
    test_stage = pipeline.ensure_stage('test')
    test_job = test_stage.ensure_job('test_job')
    test_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                '/bin/echo "need to implement these tests, just stubbing out the stage for now"'
            ]
        )
    )

    # Setup the deploy stage
    deploy_stage = pipeline.ensure_stage('deploy')

    if config.get('manual_approval_required', False):
        deploy_stage.set_has_manual_approval()

    deploy_gateway_job = deploy_stage.ensure_job('deploy_gateway')
    deploy_gateway_job.ensure_tab(Tab('output.txt', 'output.txt'))

    deploy_gateway_job.add_task(FetchArtifactTask(
        pipeline.name, 'upload', 'upload_gateway', FetchArtifactFile('next_stage.txt')
    ))
    deploy_gateway_job.add_task(FetchArtifactTask(**api_manager_artifact_params))
    deploy_gateway_job.add_task(
        ExecTask(
            [
                '/bin/bash',
                '-c',
                'PYTHONPATH=python-libs python scripts/aws/flip.py --api-base-domain ${API_BASE} '
                '--next-stage `cat ../next_stage.txt`'
            ],
            working_dir='api-manager'
        )
    )

    # Setup the Log stage
    log_stage = pipeline.ensure_stage('log')
    log_gateway_job = log_stage.ensure_job('deploy_lambda')

    log_gateway_job.ensure_environment_variables(
        {
            'splunk_host': config['log_lambda']['splunk_host'],
            'subnet_list': config['log_lambda']['subnet_list'],
            'sg_list': config['log_lambda']['sg_list'],
            'environment': config['log_lambda']['environment'],
            'deployment': config['log_lambda']['deployment'],
        }
    )
    log_gateway_job.ensure_encrypted_environment_variables(
        {
            'splunk_token': config['log_lambda']['splunk_token'],
            'acct_id': config['log_lambda']['acct_id'],
            'kms_key': config['log_lambda']['kms_key'],
        }
    )

    log_gateway_job.add_task(FetchArtifactTask(**api_manager_artifact_params))

    log_gateway_job.add_task(
        ExecTask(
            [
                '/bin/bash', '-c',
                'PYTHONPATH=python-libs python scripts/aws/monitor.py --api-base-domain ${API_BASE} '
                '--splunk-host ${splunk_host} --splunk-token ${splunk_token} --acct-id ${acct_id} '
                '--kms-key ${kms_key} --subnet-list ${subnet_list} --sg-list ${sg_list} --environment '
                '${environment} --deployment ${deployment}'
            ],
            working_dir='api-manager'
        )
    )

    # Setup the forward_build stage (which makes the build available to the next pipeline)
    forward_build_stage = pipeline.ensure_stage('forward_build')
    forward_build_job = forward_build_stage.ensure_job('forward_build')
    forward_build_job.add_task(FetchArtifactTask(**api_manager_artifact_params))
    forward_build_job.add_task(FetchArtifactTask(**swagger_flatten_artifact_params))
    forward_build_job.ensure_artifacts(set([BuildArtifact("api-manager"), BuildArtifact("swagger.json")]))
def install_pipelines(configurator, config):
    """
    Variables needed for this pipeline:
    materials: A list of dictionaries of the materials used in this pipeline
    upstream_pipelines: a list of dictionaries of the upstream pipelines that feed in to the manual verification
    """
    pipeline = configurator.ensure_pipeline_group(config['pipeline_group'])\
                           .ensure_replacement_of_pipeline(config['pipeline_name'])

    for material in config['materials']:
        pipeline.ensure_material(
            GitMaterial(
                url=material['url'],
                branch=material['branch'],
                material_name=material['material_name'],
                polling=material['polling'],
                destination_directory=material['destination_directory'],
                ignore_patterns=set(material['ignore_patterns'])))

    for material in config['upstream_pipelines']:
        pipeline.ensure_material(
            PipelineMaterial(pipeline_name=material['pipeline_name'],
                             stage_name=material['stage_name'],
                             material_name=material['material_name']))

    # What this accomplishes:
    # When a pipeline such as edx stage runs this pipeline is downstream. Since the first stage is automatic
    # the git materials will be carried over from the first pipeline.
    #
    # The second stage in this pipeline requires manual approval.
    #
    # This allows the overall workflow to remain paused while manual verification is completed and allows the git
    # materials to stay pinned.
    #
    # Once the second phase is approved, the workflow will continue and pipelines downstream will continue to execute
    # with the same pinned materials from the upstream pipeline.
    stages.generate_armed_stage(pipeline,
                                constants.INITIAL_VERIFICATION_STAGE_NAME)

    # For now, you can only trigger builds on a single jenkins server, because you can only
    # define a single username/token.
    # And all the jobs that you want to trigger need the same job token defined.
    # TODO: refactor when required so that each job can define their own user and job tokens
    pipeline.ensure_unencrypted_secure_environment_variables({
        'JENKINS_USER_TOKEN':
        config['jenkins_user_token'],
        'JENKINS_JOB_TOKEN':
        config['jenkins_job_token']
    })

    # Create the stage with the Jenkins jobs
    jenkins_stage = pipeline.ensure_stage(
        constants.JENKINS_VERIFICATION_STAGE_NAME)
    jenkins_stage.set_has_manual_approval()
    jenkins_user_name = config['jenkins_user_name']

    for jenkins in config['jenkins_verifications']:
        pipeline_job_name = jenkins['pipeline_job_name']
        jenkins_url = jenkins['url']
        jenkins_job_name = jenkins['job_name']
        key, _, param = jenkins['param'].partition(' ')
        jenkins_param = {key: param}

        job = jenkins_stage.ensure_job(pipeline_job_name)
        tasks.generate_package_install(job, 'tubular')
        tasks.trigger_jenkins_build(job, jenkins_url, jenkins_user_name,
                                    jenkins_job_name, jenkins_param)

    manual_verification_stage = pipeline.ensure_stage(
        constants.MANUAL_VERIFICATION_STAGE_NAME)
    manual_verification_stage.set_has_manual_approval()
    manual_verification_job = manual_verification_stage.ensure_job(
        constants.MANUAL_VERIFICATION_JOB_NAME)
    manual_verification_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'echo Manual Verification run number $GO_PIPELINE_COUNTER completed by $GO_TRIGGER_USER'
        ], ))
Beispiel #10
0
def install_pipelines(configurator, config):
    """
    Install pipelines that can build the edX api-gateway.
    """
    pipeline = configurator \
        .ensure_pipeline_group(config['pipeline']['group']) \
        .ensure_replacement_of_pipeline(config['pipeline']['name']) \
        .set_label_template('${api-manager}') \
        .set_git_material(GitMaterial(
            config['github']['server_uri'] + '/' + config['github']['repository'],
            branch='#{GIT_BRANCH}',
            material_name='api-manager',
            destination_directory=API_MANAGER_WORKING_DIR
        ))

    pipeline.ensure_parameters({'GIT_BRANCH': config['github']['branch']})

    pipeline.ensure_environment_variables({
        'SWAGGER_CODEGEN_JAR':
        config['swagger_codegen_jar'],
        'GITHUB_API_REPO':
        config['github']['repository'],
        'GITHUB_API_URI':
        config['github']['api_uri'],
        'GITHUB_API_POLL_WAIT_S':
        config['github']['api_poll_wait_s'],
        'GITHUB_API_POLL_RETRIES':
        config['github']['api_poll_retries']
    })

    # Note, need to move this Github poll hack to something less of a hack at some point.
    setup_stage = pipeline.ensure_stage(SETUP_STAGE_NAME)
    wait_for_travis_job = setup_stage.ensure_job(WAIT_FOR_TRAVIS_JOB_NAME)
    # pylint: disable=bad-continuation
    wait_for_travis_job.add_task(
        ExecTask([
            '/bin/bash', '-c', 'i=0; until python -c "'
            'import requests; '
            'assert(requests.get('
            '\'${GITHUB_API_URI}/${GITHUB_API_REPO}/commits/{}/status\'.format('
            '\'${GO_REVISION_API_MANAGER}\''
            ')'
            ').json()[\'state\'] == \'success\')'
            '"; '
            'do i=$((i+1)); '
            'if [ $i -gt ${GITHUB_API_POLL_RETRIES} ]; '
            'then exit 1; '
            'fi; '
            'sleep ${GITHUB_API_POLL_WAIT_S}; '
            'done'
        ]))
    # pylint: enable=bad-continuation

    download_stage = pipeline.ensure_stage(
        DOWNLOAD_STAGE_NAME).set_clean_working_dir()
    swagger_codegen_job = download_stage.ensure_job(
        SWAGGER_CODEGEN_JOB_NAME).ensure_artifacts(
            {BuildArtifact(SWAGGER_JAR)})
    swagger_codegen_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'curl ${{SWAGGER_CODEGEN_JAR}} -o {swagger_jar}'.format(
                swagger_jar=SWAGGER_JAR)
        ]))

    build_stage = pipeline.ensure_stage(
        BUILD_STAGE_NAME).set_clean_working_dir()
    swagger_flatten_job = build_stage.ensure_job(
        SWAGGER_FLATTEN_JOB_NAME).ensure_artifacts({
            BuildArtifact('api-manager/swagger-build-artifacts/swagger.json')
        })

    artifact_params = {
        'pipeline': pipeline.name,
        'stage': DOWNLOAD_STAGE_NAME,
        'job': SWAGGER_CODEGEN_JOB_NAME,
        'src': FetchArtifactFile(SWAGGER_JAR),
        'dest': API_MANAGER_WORKING_DIR
    }
    swagger_flatten_job.add_task(FetchArtifactTask(**artifact_params))
    swagger_flatten_job.add_task(
        ExecTask(['make', 'build'], working_dir=API_MANAGER_WORKING_DIR))

    package_source_job = build_stage.ensure_job(
        PACKAGE_SOURCE_JOB_NAME).ensure_artifacts(
            {BuildArtifact('api-manager')})
    package_source_job.add_task(
        ExecTask([
            '/bin/bash', '-c',
            'pip install -t python-libs -r requirements/base.txt'
        ],
                 working_dir=API_MANAGER_WORKING_DIR))