def test_story_bump_bug(monkeypatch): monkeypatch.setenv('TRACKER_TOKEN', 'fake_token') _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments']['unittest'] _b.json_config = mock_build_config_dict _tracker = Tracker(config_override=_b) current_test_directory = os.path.dirname(os.path.realpath(__file__)) with open(current_test_directory + "/tracker_stories_bug.json", 'r') as myfile: tracker_json_data = json.loads(myfile.read()) bump_type = _tracker.determine_semantic_version_bump(story_details=tracker_json_data["stories"]) assert bump_type == "bug"
def test_get_details_for_all_stories_for_multiple_projects(monkeypatch): monkeypatch.setenv('TRACKER_TOKEN', 'fake_token') _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict_multi_projects['environments'][ 'unittest'] _b.json_config = mock_build_config_dict_multi_projects parser = configparser.ConfigParser() parser.add_section('tracker') parser.set('tracker', 'url', 'http://happy.happy.joy.joy') _b.settings = parser with patch('requests.get') as mock_request: headers = { 'Content-type': 'application/json', 'Accept': 'application/json', 'X-TrackerToken': 'fake_token' } timeout = 30 current_test_directory = os.path.dirname(os.path.realpath(__file__)) with open(current_test_directory + "/tracker_stories_bug.json", 'r') as myfile: tracker_data = myfile.read() _tracker = Tracker(config_override=_b) mock_request.side_effect = mock_get_multiple_project_story_details_response story_details = _tracker.get_details_for_all_stories( story_list=["134082057", "134082058", "264082058"]) # assert mock_request.call_counts == 4 mock_request.assert_any_call( 'http://happy.happy.joy.joy/services/v5/projects/123456/stories/134082057', headers=headers, timeout=timeout) mock_request.assert_any_call( 'http://happy.happy.joy.joy/services/v5/projects/123456/stories/134082058', headers=headers, timeout=timeout) mock_request.assert_any_call( 'http://happy.happy.joy.joy/services/v5/projects/123456/stories/264082058', headers=headers, timeout=timeout) mock_request.assert_any_call( 'http://happy.happy.joy.joy/services/v5/projects/423476/stories/264082058', headers=headers, timeout=timeout) assert story_details[0] == json.loads(tracker_data)
def test_no_initialize_object(monkeypatch): monkeypatch.setenv('TRACKER_TOKEN', 'fake_token') _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments']['unittest'] _b.json_config = mock_build_config_dict _tracker = Tracker(config_override=_b) assert _tracker is not None
def test_init_missing_env_variable(monkeypatch): if os.getenv('TRACKER_TOKEN'): monkeypatch.delenv('TRACKER_TOKEN') with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: with pytest.raises(SystemExit) as cm: Tracker() mock_printmsg_fn.assert_called_with('Tracker', '__init__', "No tracker token found in environment. Did you define environment variable 'TRACKER_TOKEN'?", 'ERROR')
def test_tag_stories_in_commit_for_multiple_projects(monkeypatch): monkeypatch.setenv('TRACKER_TOKEN', 'fake_token') _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict_multi_projects['environments'][ 'unittest'] _b.project_name = mock_build_config_dict_multi_projects['projectInfo'][ 'name'] _b.version_number = 'v1.0' _b.json_config = mock_build_config_dict_multi_projects parser = configparser.ConfigParser() parser.add_section('tracker') parser.set('tracker', 'url', 'http://happy.happy.joy.joy') _b.settings = parser with patch('requests.post') as mock_request: headers = { 'Content-type': 'application/json', 'Accept': 'application/json', 'X-TrackerToken': 'fake_token' } timeout = 30 label = {"name": "testproject-v1.0"} json_label = json.dumps(label, default=lambda o: o.__dict__, sort_keys=False, indent=4) mock_request.return_value.text = '' mock_request.return_value.status_code = 200 mock_request.side_effect = mock_get_multiple_project_labels_response _tracker = Tracker(config_override=_b) _tracker.tag_stories_in_commit(story_list=['134082057', '222082037']) mock_request.assert_any_call( 'http://happy.happy.joy.joy/services/v5/projects/123456/stories/134082057/labels', json_label, headers=headers, timeout=timeout) mock_request.assert_any_call( 'http://happy.happy.joy.joy/services/v5/projects/423476/stories/222082037/labels', json_label, headers=headers, timeout=timeout)
def test_init_missing_tracker_project_id(monkeypatch): monkeypatch.setenv('TRACKER_TOKEN', 'fake_token') _b = MagicMock(BuildConfig) _b.json_config = mock_build_config_missing_projectid_dict with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: with pytest.raises(SystemExit) as cm: Tracker(config_override=_b) mock_printmsg_fn.assert_called_with('Tracker', '__init__', "The build config associated with projectTracking is missing key 'projectId'", 'ERROR')
def test_init_missing_tracker_url(monkeypatch): monkeypatch.setenv('TRACKER_TOKEN', 'fake_token') _b = MagicMock(BuildConfig) _b.json_config = mock_build_config_dict parser = configparser.ConfigParser() _b.settings = parser with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: with pytest.raises(SystemExit) as cm: Tracker(config_override=_b) mock_printmsg_fn.assert_called_with('Tracker', '__init__', 'No tracker url found in buildConfig or settings.ini.', 'ERROR')
def test_init_for_multiple_projects_too_many_project_id_keys(monkeypatch): monkeypatch.setenv('TRACKER_TOKEN', 'fake_token') _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict_both_project_ids[ 'environments']['unittest'] _b.json_config = mock_build_config_dict_both_project_ids with patch('flow.utils.commons.print_msg') as mock_printmsg_fn: with pytest.raises(SystemExit): Tracker(config_override=_b) mock_printmsg_fn.assert_called_with( 'Tracker', '__init__', "The build config may only contain 'projectId' for single project id" "or 'projectIds' containing an array of project ids", 'ERROR')
def main(): clazz = 'aggregator' method = 'main' tasks_requiring_github = [] try: version = pkg_resources.require("THD-Flow")[0].version except: version = 'UNKNOWN' parser = ArgumentParser(prog='version {} \n flow'.format(version)) subparsers = parser.add_subparsers(help='Task types', dest='task') parser.add_argument( 'env', help= "An environment that is defined in buildConfig.json environments section." ) parser.add_argument('-q', '--quiet', help="Silence the logging to stdout", default='False') load_task_parsers(subparsers) plugins = [] for i in pluginloader.get_plugins(): plugin = pluginloader.load_plugin(i) new_parser = subparsers.add_parser( plugin.parser, formatter_class=RawTextHelpFormatter) plugin.register_parser(new_parser) plugins.append(plugin) if hasattr(plugin, 'require_version') and plugin.require_version is True: tasks_requiring_github.append(plugin.parser) args = parser.parse_args() task = args.task.lower() if 'quiet' in args and args.quiet.lower() in ['yes', 'true', 'off', 'y']: Commons.quiet = True elif task == 'github' and args.action == 'getversion' and args.output is None: Commons.quiet = True # elif task == 'github' and args.action == 'version' and args.output is None: # Commons.quiet = True commons.printMSG(clazz, method, "THD-Flow Version: {}".format(version)) BuildConfig(args) if 'deploy_directory' in args and args.deploy_directory is not None: commons.printMSG( clazz, method, "Setting deployment directory to {}".format(args.deploy_directory)) BuildConfig.push_location = args.deploy_directory connect_error_dispatcher() github = None # TODO check if there are any registered metrics endpoints defined in settings.ini. This is optional. metrics = Graphite() commons.printMSG(clazz, method, "Task {}".format(task)) tasks_requiring_github.extend([ 'sonar', 'tracker', 'slack', 'artifactory', 'cf', 'zipit', 'gcappengine' ]) if task != 'github' and task in tasks_requiring_github: github = GitHub() if 'version' in args and args.version is not None and len( args.version.strip()) > 0 and args.version.strip().lower( ) != 'latest': # The only time a user should be targeting a snapshot environment and specifying a version # number without a "+" should be if they were manually versioning and passing in a base # version number. Although technically this could be used outside of the manually versioned # experience. # # i.e. flow cf deploy -v 1.0.1 development # this would deploy the latest snapshot version of 1.0.1, 1.0.1+3 # if however, they supplied the "+" it would target that specific snapshot version and not the latest # i.e. flow cf deploy -v 1.0.1+2 # this would deploy the version 1.0.1+2 even though there is a snapshot available with +3 if BuildConfig.artifact_category == 'snapshot' and '+' not in args.version: commons.printMSG(clazz, method, ( 'Base version passed in. Looking for latest snapshot version ' 'determined by base', args.version)) # TODO it doesn't appear that this is actually returning the latest snapshot, but instead returning # what was passed in. even in the older version of code. BuildConfig.version_number = github.get_git_last_tag( args.version.strip()) else: BuildConfig.version_number = BuildConfig.version_number = github.get_git_last_tag( args.version.strip()) # validate after processing what the version_number is set to. commons.printMSG( clazz, method, "Setting version number based on " "argument {}".format(BuildConfig.version_number)) else: BuildConfig.version_number = github.get_git_last_tag() if task == 'github': github = GitHub() if args.action == 'version': if 'tracker' in BuildConfig.json_config: _tracker = Tracker() call_github_version(github, _tracker, file_path=args.output, args=args) else: call_github_version(github, None, file_path=args.output, args=args) metrics.write_metric(task, args.action) elif args.action == 'getversion': if 'output' in args: call_github_getversion(github, file_path=args.output) else: call_github_getversion(github) metrics.write_metric(task, args.action) elif task == 'tracker': tracker = Tracker() commits = get_git_commit_history(github, args) story_list = commons.extract_story_id_from_commit_messages(commits) tracker.tag_stories_in_commit(story_list) metrics.write_metric(task, args.action) elif task == 'slack': slack = Slack() if args.action == 'release': # TODO Check to see if they are using tracker first. tracker = Tracker() commits = get_git_commit_history(github, args) story_list = commons.extract_story_id_from_commit_messages(commits) story_details = tracker.get_details_for_all_stories(story_list) slack.publish_deployment(story_details) elif args.action == 'message': channel = args.channel if args.channel else None user = args.user if args.user else None icon = args.icon if args.icon else None emoji = args.emoji if args.emoji else None attachment_color = args.attachment_color if args.attachment_color else None slack_url = args.slack_url slack.publish_custom_message(message=args.message, channel=channel, user=user, icon=icon, emoji=emoji, attachment_color=attachment_color, slack_url=slack_url) metrics.write_metric(task, args.action) elif task == 'sonar': sonar = SonarQube() sonar.scan_code() metrics.write_metric(task, args.action) elif task == 'artifactory': artifactory = ArtiFactory() if args.action == 'upload': artifactory.publish_build_artifact() metrics.write_metric(task, args.action) elif args.action == 'download': create_deployment_directory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/', extract=args.extract in ['y', 'yes', 'true'] or args.extract is None) elif task == 'cf': if BuildConfig.build_env_info['cf']: if 'version' not in args: commons.printMSG( clazz, method, 'Version number not passed in for deployment. Format is: v{' 'major}.{minor}.{bug}+{buildnumber} ', 'ERROR') exit(1) cf = CloudFoundry() is_script_run_successful = True if 'script' in args and args.script is not None: commons.printMSG(clazz, method, 'Custom deploy script detected') cf.download_cf_cli() cf.download_custom_deployment_script(args.script) is_script_run_successful = cf.run_deployment_script(args.script) else: commons.printMSG( clazz, method, 'No custom deploy script passed in. Cloud Foundry detected in ' 'buildConfig. Calling standard CloudFoundry deployment.') # TODO make this configurable in case they are using create_deployment_directory() if BuildConfig.artifact_extension is None and BuildConfig.artifact_extensions is None: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from GitHub.') github.download_code_at_version() else: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from Artifactory.') artifactory = ArtiFactory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/') force = False if 'force' in args and args.force is not None and args.force.strip( ).lower() != 'false': force = True manifest = None if 'manifest' in args and args.manifest is not None: commons.printMSG( clazz, method, "Setting manifest to {}".format(args.manifest)) manifest = args.manifest cf.deploy(force_deploy=force, manifest=manifest) commons.printMSG(clazz, method, 'Checking if we can attach the output to the CR') SIGNAL = 'publish-deploy-complete' sender = {} dispatcher.send(signal=SIGNAL, sender=sender) if is_script_run_successful is False: exit(1) metrics.write_metric(task, args.action) elif task == 'gcappengine': app_engine = GCAppEngine() is_script_run_successful = True if 'script' in args and args.script is not None: commons.printMSG(clazz, method, 'Custom deploy detected') app_engine.download_custom_deployment_script(args.script) is_script_run_successful = app_engine.run_deployment_script( args.script) else: commons.printMSG( clazz, method, 'No custom deploy script passed in. Calling standard AppEngine deployment.' ) artifactory = ArtiFactory() create_deployment_directory() if BuildConfig.artifact_extension is None and BuildConfig.artifact_extensions is None: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from GitHub.') github.download_code_at_version() else: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from Artifactory.') artifactory = ArtiFactory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/') app_yaml = None if 'app_yaml' in args and args.app_yaml is not None: commons.printMSG( clazz, method, "Setting app yaml to {}".format(args.app_yaml)) app_yaml = args.app_yaml if 'promote' in args and args.promote is not 'true': app_engine.deploy(app_yaml=app_yaml, promote=False) SIGNAL = 'publish-deploy-complete' sender = {} dispatcher.send(signal=SIGNAL, sender=sender) if is_script_run_successful is False: exit(1) metrics.write_metric(task, args.action) elif task == 'zipit': ZipIt('artifactory', args.zipfile, args.contents) else: for i in plugins: if i.parser == task: i.run_action(args) metrics.write_metric(task, args.action) continue