Exemple #1
0
def test_verify_required_attributes_missing_endpoint(monkeypatch):
    monkeypatch.setenv('DEPLOYMENT_USER', 'DUMMY')
    monkeypatch.setenv('DEPLOYMENT_PWD', 'DUMMY')

    with patch('flow.utils.commons.printMSG') as mock_printmsg_fn:
        with pytest.raises(SystemExit):
            _b = MagicMock(BuildConfig)
            _b.build_env_info = mock_build_config_missing_apiEndpoint_dict[
                'environments']['unittest']
            _b.json_config = mock_build_config_missing_apiEndpoint_dict
            _b.version_number = None

            _cf = CloudFoundry(_b)
            _cf._verify_required_attributes()

    mock_printmsg_fn.assert_called_with(
        'CloudFoundry', '_verify_required_attributes',
        "The build config associated with cloudfoundry is missing key 'apiEndpoint'",
        'ERROR')
Exemple #2
0
def test_get_started_apps_already_started_failed_cmd():

    with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
        with pytest.raises(SystemExit):
            with patch.object(subprocess, 'Popen') as mocked_popen:
                mocked_popen.return_value.returncode = 1
                mocked_popen.return_value.communicate.return_value = (
                    mock_started_apps_already_started.encode(),
                    'FAKE_ERR_OUTPUT')
                _b = MagicMock(BuildConfig)
                _b.project_name = 'CI-HelloWorld'
                _b.version_number = 'v2.9.0+1'
                _cf = CloudFoundry(_b)

                with patch.object(_cf, '_cf_logout'):
                    _cf._get_started_apps('true')
    mock_printmsg_fn.assert_any_call(
        'CloudFoundry', '_get_started_apps',
        "Failed calling cf apps | grep CI-HelloWorld*-v\\d*\\.\\d*\\.\\d* | grep started | awk '{print $1}'. Return code of 1",
        'ERROR')
Exemple #3
0
def test_get_started_apps_already_started():

        with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:

            with patch.object(subprocess, 'Popen') as mocked_popen:
                with pytest.raises(SystemExit):
                    mocked_popen.return_value.returncode = 0
                    mocked_popen.return_value.communicate.return_value = (mock_started_apps_already_started.encode(),
                                                                          'FAKE_ERR_OUTPUT')
                    _b = MagicMock(BuildConfig)
                    _b.project_name = 'CI-HelloWorld'
                    _b.version_number = 'v2.9.0+1'
                    _cf = CloudFoundry(_b)

                    with patch.object(_cf, '_cf_logout'):
                        _cf._get_started_apps()

        mock_printmsg_fn.assert_any_call('CloudFoundry', '_get_started_apps', "App version CI-HelloWorld-v2.9.0+1 "
                                                                              "already exists and is running. Cannot "
                                                                              "perform zero-downtime deployment.  To "
                                                                              "override, set force flag = 'true'",
                                         'ERROR')
Exemple #4
0
def main():
    clazz = 'aggregator'
    method = 'main'
    tasks_requiring_github = []

    try:
        version = pkg_resources.require("THD-Flow")[0].version
    except:
        version = 'UNKNOWN'

    parser = ArgumentParser(prog='version {} \n flow'.format(version))

    subparsers = parser.add_subparsers(help='Task types', dest='task')

    parser.add_argument(
        'env',
        help=
        "An environment that is defined in buildConfig.json environments section."
    )

    parser.add_argument('-q',
                        '--quiet',
                        help="Silence the logging to stdout",
                        default='False')

    load_task_parsers(subparsers)

    plugins = []

    for i in pluginloader.get_plugins():
        plugin = pluginloader.load_plugin(i)

        new_parser = subparsers.add_parser(
            plugin.parser, formatter_class=RawTextHelpFormatter)
        plugin.register_parser(new_parser)
        plugins.append(plugin)

        if hasattr(plugin,
                   'require_version') and plugin.require_version is True:
            tasks_requiring_github.append(plugin.parser)

    args = parser.parse_args()

    task = args.task.lower()

    if 'quiet' in args and args.quiet.lower() in ['yes', 'true', 'off', 'y']:
        Commons.quiet = True
    elif task == 'github' and args.action == 'getversion' and args.output is None:
        Commons.quiet = True
    # elif task == 'github' and args.action == 'version' and args.output is None:
    #     Commons.quiet = True

    commons.printMSG(clazz, method, "THD-Flow Version: {}".format(version))

    BuildConfig(args)

    if 'deploy_directory' in args and args.deploy_directory is not None:
        commons.printMSG(
            clazz, method,
            "Setting deployment directory to {}".format(args.deploy_directory))
        BuildConfig.push_location = args.deploy_directory

    connect_error_dispatcher()

    github = None

    # TODO check if there are any registered metrics endpoints defined in settings.ini. This is optional.
    metrics = Graphite()

    commons.printMSG(clazz, method, "Task {}".format(task))

    tasks_requiring_github.extend([
        'sonar', 'tracker', 'slack', 'artifactory', 'cf', 'zipit',
        'gcappengine'
    ])

    if task != 'github' and task in tasks_requiring_github:
        github = GitHub()

        if 'version' in args and args.version is not None and len(
                args.version.strip()) > 0 and args.version.strip().lower(
                ) != 'latest':
            # The only time a user should be targeting a snapshot environment and specifying a version
            # number without a "+" should be if they were manually versioning and passing in a base
            # version number.  Although technically this could be used outside of the manually versioned
            # experience.
            #
            # i.e. flow cf deploy -v 1.0.1 development
            #      this would deploy the latest snapshot version of 1.0.1, 1.0.1+3
            # if however, they supplied the "+" it would target that specific snapshot version and not the latest
            # i.e. flow cf deploy -v 1.0.1+2
            #      this would deploy the version 1.0.1+2 even though there is a snapshot available with +3
            if BuildConfig.artifact_category == 'snapshot' and '+' not in args.version:
                commons.printMSG(clazz, method, (
                    'Base version passed in.  Looking for latest snapshot version '
                    'determined by base', args.version))
                # TODO it doesn't appear that this is actually returning the latest snapshot, but instead returning
                #      what was passed in.  even in the older version of code.
                BuildConfig.version_number = github.get_git_last_tag(
                    args.version.strip())
            else:
                BuildConfig.version_number = BuildConfig.version_number = github.get_git_last_tag(
                    args.version.strip())
            # validate after processing what the version_number is set to.
            commons.printMSG(
                clazz, method, "Setting version number based on "
                "argument {}".format(BuildConfig.version_number))

        else:
            BuildConfig.version_number = github.get_git_last_tag()

    if task == 'github':
        github = GitHub()
        if args.action == 'version':
            if 'tracker' in BuildConfig.json_config:
                _tracker = Tracker()
                call_github_version(github,
                                    _tracker,
                                    file_path=args.output,
                                    args=args)
            else:
                call_github_version(github,
                                    None,
                                    file_path=args.output,
                                    args=args)
            metrics.write_metric(task, args.action)
        elif args.action == 'getversion':
            if 'output' in args:
                call_github_getversion(github, file_path=args.output)
            else:
                call_github_getversion(github)
            metrics.write_metric(task, args.action)
    elif task == 'tracker':
        tracker = Tracker()

        commits = get_git_commit_history(github, args)

        story_list = commons.extract_story_id_from_commit_messages(commits)

        tracker.tag_stories_in_commit(story_list)
        metrics.write_metric(task, args.action)
    elif task == 'slack':
        slack = Slack()

        if args.action == 'release':
            # TODO Check to see if they are using tracker first.
            tracker = Tracker()

            commits = get_git_commit_history(github, args)

            story_list = commons.extract_story_id_from_commit_messages(commits)
            story_details = tracker.get_details_for_all_stories(story_list)

            slack.publish_deployment(story_details)
        elif args.action == 'message':
            channel = args.channel if args.channel else None
            user = args.user if args.user else None
            icon = args.icon if args.icon else None
            emoji = args.emoji if args.emoji else None
            attachment_color = args.attachment_color if args.attachment_color else None
            slack_url = args.slack_url

            slack.publish_custom_message(message=args.message,
                                         channel=channel,
                                         user=user,
                                         icon=icon,
                                         emoji=emoji,
                                         attachment_color=attachment_color,
                                         slack_url=slack_url)
        metrics.write_metric(task, args.action)
    elif task == 'sonar':
        sonar = SonarQube()

        sonar.scan_code()
        metrics.write_metric(task, args.action)
    elif task == 'artifactory':
        artifactory = ArtiFactory()

        if args.action == 'upload':
            artifactory.publish_build_artifact()
            metrics.write_metric(task, args.action)
        elif args.action == 'download':
            create_deployment_directory()
            artifactory.download_and_extract_artifacts_locally(
                BuildConfig.push_location + '/',
                extract=args.extract in ['y', 'yes', 'true']
                or args.extract is None)
    elif task == 'cf':
        if BuildConfig.build_env_info['cf']:
            if 'version' not in args:
                commons.printMSG(
                    clazz, method,
                    'Version number not passed in for deployment. Format is: v{'
                    'major}.{minor}.{bug}+{buildnumber} ', 'ERROR')
                exit(1)

        cf = CloudFoundry()

        is_script_run_successful = True

        if 'script' in args and args.script is not None:
            commons.printMSG(clazz, method, 'Custom deploy script detected')
            cf.download_cf_cli()
            cf.download_custom_deployment_script(args.script)
            is_script_run_successful = cf.run_deployment_script(args.script)
        else:
            commons.printMSG(
                clazz, method,
                'No custom deploy script passed in.  Cloud Foundry detected in '
                'buildConfig.  Calling standard CloudFoundry deployment.')

            # TODO make this configurable in case they are using
            create_deployment_directory()

            if BuildConfig.artifact_extension is None and BuildConfig.artifact_extensions is None:
                commons.printMSG(
                    clazz, method,
                    'Attempting to retrieve and deploy from GitHub.')

                github.download_code_at_version()
            else:
                commons.printMSG(
                    clazz, method,
                    'Attempting to retrieve and deploy from Artifactory.')
                artifactory = ArtiFactory()

                artifactory.download_and_extract_artifacts_locally(
                    BuildConfig.push_location + '/')

            force = False

            if 'force' in args and args.force is not None and args.force.strip(
            ).lower() != 'false':
                force = True

            manifest = None

            if 'manifest' in args and args.manifest is not None:
                commons.printMSG(
                    clazz, method,
                    "Setting manifest to {}".format(args.manifest))
                manifest = args.manifest

            cf.deploy(force_deploy=force, manifest=manifest)

        commons.printMSG(clazz, method,
                         'Checking if we can attach the output to the CR')

        SIGNAL = 'publish-deploy-complete'
        sender = {}
        dispatcher.send(signal=SIGNAL, sender=sender)

        if is_script_run_successful is False:
            exit(1)

        metrics.write_metric(task, args.action)
    elif task == 'gcappengine':
        app_engine = GCAppEngine()

        is_script_run_successful = True

        if 'script' in args and args.script is not None:
            commons.printMSG(clazz, method, 'Custom deploy detected')
            app_engine.download_custom_deployment_script(args.script)
            is_script_run_successful = app_engine.run_deployment_script(
                args.script)
        else:
            commons.printMSG(
                clazz, method,
                'No custom deploy script passed in. Calling standard AppEngine deployment.'
            )

            artifactory = ArtiFactory()

            create_deployment_directory()

            if BuildConfig.artifact_extension is None and BuildConfig.artifact_extensions is None:
                commons.printMSG(
                    clazz, method,
                    'Attempting to retrieve and deploy from GitHub.')

                github.download_code_at_version()
            else:
                commons.printMSG(
                    clazz, method,
                    'Attempting to retrieve and deploy from Artifactory.')
                artifactory = ArtiFactory()

                artifactory.download_and_extract_artifacts_locally(
                    BuildConfig.push_location + '/')

            app_yaml = None

            if 'app_yaml' in args and args.app_yaml is not None:
                commons.printMSG(
                    clazz, method,
                    "Setting app yaml to {}".format(args.app_yaml))
                app_yaml = args.app_yaml

            if 'promote' in args and args.promote is not 'true':
                app_engine.deploy(app_yaml=app_yaml, promote=False)

        SIGNAL = 'publish-deploy-complete'
        sender = {}
        dispatcher.send(signal=SIGNAL, sender=sender)

        if is_script_run_successful is False:
            exit(1)

        metrics.write_metric(task, args.action)

    elif task == 'zipit':
        ZipIt('artifactory', args.zipfile, args.contents)

    else:
        for i in plugins:
            if i.parser == task:
                i.run_action(args)
                metrics.write_metric(task, args.action)
                continue