def test_scan_retry_logic(monkeypatch): monkeypatch.setenv('SONAR_HOME', 'FAKEHOME') def _submit_scan_failure(): raise Exception with patch('flow.utils.commons.print_msg') as mock_printmsg_fn: with patch( 'flow.staticqualityanalysis.sonar.sonarmodule.SonarQube._submit_scan', new=_submit_scan_failure): with patch('os.path.isfile', return_value=True): with pytest.raises(SystemExit): _b = MagicMock(BuildConfig) parser = configparser.ConfigParser() parser.add_section('sonar') parser.set('sonar', 'sonar_runner', 'sonar-runner-dist-2.4.jar') parser.add_section('project') parser.set('project', 'retry_sleep_interval', '0') _b.settings = parser _sonar = SonarQube(config_override=_b) _sonar.scan_code() mock_printmsg_fn.assert_called_with( 'SonarQube', 'scan_code', 'Could not connect to Sonar. Maximum number of retries reached.', 'ERROR')
def test_scan_code_missing_sonar_project_properties(monkeypatch): monkeypatch.setenv('SONAR_HOME', 'FAKEHOME') with patch('flow.utils.commons.print_msg') as mock_printmsg_fn: with patch('flow.utils.commons.get_files_of_type_from_directory' ) as mock_getfiletypefromdir_fn: with patch('os.path.isfile', return_value=False): with pytest.raises(SystemExit): mock_getfiletypefromdir_fn.return_value = [ 'sonar-scanner.jar' ] _b = MagicMock(BuildConfig) parser = configparser.ConfigParser() parser.add_section('sonar') parser.set('sonar', 'sonar_runner', 'sonar-runner-dist-2.4.jar') _b.settings = parser _sonar = SonarQube(config_override=_b) _sonar.scan_code() mock_printmsg_fn.assert_called_with( 'SonarQube', '_submit_scan', 'No sonar-project.properties file was found. Please include in the root of your project with a valid value for \'sonar.host.url\'', 'ERROR')
def test_scan_code_single_jar_executable_path(monkeypatch): monkeypatch.setenv('SONAR_HOME', 'FAKEHOME') with patch('flow.utils.commons.print_msg') as mock_printmsg_fn: with patch('flow.utils.commons.get_files_of_type_from_directory' ) as mock_getfiletypefromdir_fn: with pytest.raises(SystemExit): mock_getfiletypefromdir_fn.return_value = ['sonar-scanner.jar'] _b = MagicMock(BuildConfig) parser = configparser.ConfigParser() parser.add_section('sonar') _b.settings = parser _sonar = SonarQube(config_override=_b) _sonar.scan_code() mock_getfiletypefromdir_fn.assert_called_with('jar', 'FAKEHOME')
def test_scan_code_missing_executable_path(monkeypatch): monkeypatch.setenv('SONAR_HOME', 'FAKEHOME') with patch('flow.utils.commons.print_msg') as mock_printmsg_fn: with pytest.raises(SystemExit): _b = MagicMock(BuildConfig) parser = configparser.ConfigParser() parser.add_section('sonar') _b.settings = parser _sonar = SonarQube(config_override=_b) _sonar.scan_code() mock_printmsg_fn.assert_called_with( 'SonarQube', '_submit_scan', 'Sonar runner undefined. Please define path to ' 'sonar ' 'runner in settings.ini.', 'ERROR')
def test_scan_code_missing_sonar_home(monkeypatch): if os.getenv('SONAR_HOME'): monkeypatch.delenv('SONAR_HOME') with patch('flow.utils.commons.print_msg') as mock_printmsg_fn: with pytest.raises(SystemExit): _b = MagicMock(BuildConfig) parser = configparser.ConfigParser() parser.add_section('sonar') parser.set('sonar', 'sonar_runner', 'sonar-runner-dist-2.4.jar') _b.settings = parser _sonar = SonarQube(config_override=_b) _sonar.scan_code() mock_printmsg_fn.assert_called_with( 'SonarQube', '_submit_scan', '\'SONAR_HOME\' environment variable must be ' 'defined', 'ERROR')
def main(): clazz = 'aggregator' method = 'main' tasks_requiring_github = [] try: version = pkg_resources.require("THD-Flow")[0].version except: version = 'UNKNOWN' parser = ArgumentParser(prog='version {} \n flow'.format(version)) subparsers = parser.add_subparsers(help='Task types', dest='task') parser.add_argument( 'env', help= "An environment that is defined in buildConfig.json environments section." ) parser.add_argument('-q', '--quiet', help="Silence the logging to stdout", default='False') load_task_parsers(subparsers) plugins = [] for i in pluginloader.get_plugins(): plugin = pluginloader.load_plugin(i) new_parser = subparsers.add_parser( plugin.parser, formatter_class=RawTextHelpFormatter) plugin.register_parser(new_parser) plugins.append(plugin) if hasattr(plugin, 'require_version') and plugin.require_version is True: tasks_requiring_github.append(plugin.parser) args = parser.parse_args() task = args.task.lower() if 'quiet' in args and args.quiet.lower() in ['yes', 'true', 'off', 'y']: Commons.quiet = True elif task == 'github' and args.action == 'getversion' and args.output is None: Commons.quiet = True # elif task == 'github' and args.action == 'version' and args.output is None: # Commons.quiet = True commons.printMSG(clazz, method, "THD-Flow Version: {}".format(version)) BuildConfig(args) if 'deploy_directory' in args and args.deploy_directory is not None: commons.printMSG( clazz, method, "Setting deployment directory to {}".format(args.deploy_directory)) BuildConfig.push_location = args.deploy_directory connect_error_dispatcher() github = None # TODO check if there are any registered metrics endpoints defined in settings.ini. This is optional. metrics = Graphite() commons.printMSG(clazz, method, "Task {}".format(task)) tasks_requiring_github.extend([ 'sonar', 'tracker', 'slack', 'artifactory', 'cf', 'zipit', 'gcappengine' ]) if task != 'github' and task in tasks_requiring_github: github = GitHub() if 'version' in args and args.version is not None and len( args.version.strip()) > 0 and args.version.strip().lower( ) != 'latest': # The only time a user should be targeting a snapshot environment and specifying a version # number without a "+" should be if they were manually versioning and passing in a base # version number. Although technically this could be used outside of the manually versioned # experience. # # i.e. flow cf deploy -v 1.0.1 development # this would deploy the latest snapshot version of 1.0.1, 1.0.1+3 # if however, they supplied the "+" it would target that specific snapshot version and not the latest # i.e. flow cf deploy -v 1.0.1+2 # this would deploy the version 1.0.1+2 even though there is a snapshot available with +3 if BuildConfig.artifact_category == 'snapshot' and '+' not in args.version: commons.printMSG(clazz, method, ( 'Base version passed in. Looking for latest snapshot version ' 'determined by base', args.version)) # TODO it doesn't appear that this is actually returning the latest snapshot, but instead returning # what was passed in. even in the older version of code. BuildConfig.version_number = github.get_git_last_tag( args.version.strip()) else: BuildConfig.version_number = BuildConfig.version_number = github.get_git_last_tag( args.version.strip()) # validate after processing what the version_number is set to. commons.printMSG( clazz, method, "Setting version number based on " "argument {}".format(BuildConfig.version_number)) else: BuildConfig.version_number = github.get_git_last_tag() if task == 'github': github = GitHub() if args.action == 'version': if 'tracker' in BuildConfig.json_config: _tracker = Tracker() call_github_version(github, _tracker, file_path=args.output, args=args) else: call_github_version(github, None, file_path=args.output, args=args) metrics.write_metric(task, args.action) elif args.action == 'getversion': if 'output' in args: call_github_getversion(github, file_path=args.output) else: call_github_getversion(github) metrics.write_metric(task, args.action) elif task == 'tracker': tracker = Tracker() commits = get_git_commit_history(github, args) story_list = commons.extract_story_id_from_commit_messages(commits) tracker.tag_stories_in_commit(story_list) metrics.write_metric(task, args.action) elif task == 'slack': slack = Slack() if args.action == 'release': # TODO Check to see if they are using tracker first. tracker = Tracker() commits = get_git_commit_history(github, args) story_list = commons.extract_story_id_from_commit_messages(commits) story_details = tracker.get_details_for_all_stories(story_list) slack.publish_deployment(story_details) elif args.action == 'message': channel = args.channel if args.channel else None user = args.user if args.user else None icon = args.icon if args.icon else None emoji = args.emoji if args.emoji else None attachment_color = args.attachment_color if args.attachment_color else None slack_url = args.slack_url slack.publish_custom_message(message=args.message, channel=channel, user=user, icon=icon, emoji=emoji, attachment_color=attachment_color, slack_url=slack_url) metrics.write_metric(task, args.action) elif task == 'sonar': sonar = SonarQube() sonar.scan_code() metrics.write_metric(task, args.action) elif task == 'artifactory': artifactory = ArtiFactory() if args.action == 'upload': artifactory.publish_build_artifact() metrics.write_metric(task, args.action) elif args.action == 'download': create_deployment_directory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/', extract=args.extract in ['y', 'yes', 'true'] or args.extract is None) elif task == 'cf': if BuildConfig.build_env_info['cf']: if 'version' not in args: commons.printMSG( clazz, method, 'Version number not passed in for deployment. Format is: v{' 'major}.{minor}.{bug}+{buildnumber} ', 'ERROR') exit(1) cf = CloudFoundry() is_script_run_successful = True if 'script' in args and args.script is not None: commons.printMSG(clazz, method, 'Custom deploy script detected') cf.download_cf_cli() cf.download_custom_deployment_script(args.script) is_script_run_successful = cf.run_deployment_script(args.script) else: commons.printMSG( clazz, method, 'No custom deploy script passed in. Cloud Foundry detected in ' 'buildConfig. Calling standard CloudFoundry deployment.') # TODO make this configurable in case they are using create_deployment_directory() if BuildConfig.artifact_extension is None and BuildConfig.artifact_extensions is None: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from GitHub.') github.download_code_at_version() else: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from Artifactory.') artifactory = ArtiFactory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/') force = False if 'force' in args and args.force is not None and args.force.strip( ).lower() != 'false': force = True manifest = None if 'manifest' in args and args.manifest is not None: commons.printMSG( clazz, method, "Setting manifest to {}".format(args.manifest)) manifest = args.manifest cf.deploy(force_deploy=force, manifest=manifest) commons.printMSG(clazz, method, 'Checking if we can attach the output to the CR') SIGNAL = 'publish-deploy-complete' sender = {} dispatcher.send(signal=SIGNAL, sender=sender) if is_script_run_successful is False: exit(1) metrics.write_metric(task, args.action) elif task == 'gcappengine': app_engine = GCAppEngine() is_script_run_successful = True if 'script' in args and args.script is not None: commons.printMSG(clazz, method, 'Custom deploy detected') app_engine.download_custom_deployment_script(args.script) is_script_run_successful = app_engine.run_deployment_script( args.script) else: commons.printMSG( clazz, method, 'No custom deploy script passed in. Calling standard AppEngine deployment.' ) artifactory = ArtiFactory() create_deployment_directory() if BuildConfig.artifact_extension is None and BuildConfig.artifact_extensions is None: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from GitHub.') github.download_code_at_version() else: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from Artifactory.') artifactory = ArtiFactory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/') app_yaml = None if 'app_yaml' in args and args.app_yaml is not None: commons.printMSG( clazz, method, "Setting app yaml to {}".format(args.app_yaml)) app_yaml = args.app_yaml if 'promote' in args and args.promote is not 'true': app_engine.deploy(app_yaml=app_yaml, promote=False) SIGNAL = 'publish-deploy-complete' sender = {} dispatcher.send(signal=SIGNAL, sender=sender) if is_script_run_successful is False: exit(1) metrics.write_metric(task, args.action) elif task == 'zipit': ZipIt('artifactory', args.zipfile, args.contents) else: for i in plugins: if i.parser == task: i.run_action(args) metrics.write_metric(task, args.action) continue