def test__get_artifactory_files_name_no_artifact_found(monkeypatch): _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments']['unittest'] _b.json_config = mock_build_config_dict _b.project_name = mock_build_config_dict['projectInfo']['name'] _b.version_number = 'v1.0.0' _b.artifact_extension = 'bob' _b.artifact_extensions = None art = ArtiFactory(config_override=_b) def _get_files_of_type_from_directory(type, directory): print(type) print(directory) with patch('flow.utils.commons.get_files_of_type_from_directory', new=_get_files_of_type_from_directory): with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: monkeypatch.setenv('ARTIFACT_BUILD_DIRECTORY', 'mydir') with pytest.raises(SystemExit) as cm: art._get_artifactory_files_name_from_build_dir() print(str(mock_printmsg_fn.mock_calls)) mock_printmsg_fn.assert_called_with( 'ArtiFactory', '_get_artifactory_files_name_from_build_dir', 'Failed to find artifact of type bob in mydir', 'ERROR')
def test_get_artifact_url_specified_path_has_no_children(): with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments']['unittest'] _b.json_config = mock_build_config_dict _b.project_name = mock_build_config_dict['projectInfo']['name'] _b.version_number = 'v1.0.0' _b.artifact_extension = 'bob' _b.artifact_extensions = None art = ArtiFactory(config_override=_b) test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0" responses.add(responses.GET, test_url, body=response_body_artifactory_no_children, status=200, content_type="application/json") with pytest.raises(ArtifactException) as cm: art.get_artifact_url() print(str(mock_printmsg_fn.mock_calls)) mock_printmsg_fn.assert_called_with('ArtiFactory', 'get_artifact_url', 'Could not locate artifact bob', 'ERROR')
def test_get_artifact_url_not_found(): with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments']['unittest'] _b.json_config = mock_build_config_dict _b.project_name = mock_build_config_dict['projectInfo']['name'] _b.version_number = 'v1.0.0' art = ArtiFactory(config_override=_b) test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0" responses.add(responses.GET, test_url, body=response_body_artifactory_not_found, status=404, content_type="application/json") with pytest.raises(ArtifactException) as cm: art.get_artifact_url() print(str(mock_printmsg_fn.mock_calls)) mock_printmsg_fn.assert_called_with( 'ArtiFactory', 'get_artifact_url', 'Unable to locate artifactory path https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0\r\n Response: \n{\n "errors" : [ {\n "status" : 404,\n "message" : "Unable to find item"\n } ]\n}\n', 'ERROR')
def test_get_artifact_with_includePom(): _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_artifactoryConfig_include_POM[ 'environments']['unittest'] _b.json_config = mock_build_config_artifactoryConfig_include_POM _b.project_name = mock_build_config_artifactoryConfig_include_POM[ 'projectInfo']['name'] _b.include_pom = mock_build_config_artifactoryConfig_include_POM[ 'artifactoryConfig']['includePom'] _b.version_number = 'v1.0.0' _b.artifact_extension = 'bob' _b.artifact_extensions = None art = ArtiFactory(config_override=_b) test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0" responses.add(responses.GET, test_url, body=response_body_artifactory, status=200, content_type="application/json") url = art.get_artifact_url() assert url == "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.bob"
def _ship_it_artifactory(self, name): method = '_ship_it_artifactory' commons.printMSG(ZipIt.clazz, method, 'begin') file_with_path = name.split('/') ar = ArtiFactory() ar.publish(file_with_path[-1], name) commons.printMSG(ZipIt.clazz, method, 'end')
def test_get_artifact_home_url_no_defined_version(): with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: with pytest.raises(SystemExit) as cm: _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments'][ 'unittest'] _b.json_config = mock_build_config_dict _b.project_name = mock_build_config_dict['projectInfo']['name'] _b.version_number = None art = ArtiFactory(config_override=_b) art.get_artifact_home_url() print(str(mock_printmsg_fn.mock_calls)) mock_printmsg_fn.assert_called_with( 'commons', 'verify_version', 'Version not defined. Is your ' 'repo tagged with a version ' 'number?', 'ERROR')
def test_init_missing_artifactory(monkeypatch): _b = MagicMock(BuildConfig) _b.json_config = mock_build_config_missing_artifact_dict with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: with pytest.raises(SystemExit): ArtiFactory(config_override=_b) mock_printmsg_fn.assert_called_with( 'ArtiFactory', '__init__', "The build config associated with artifactory is missing key 'artifact'", 'ERROR')
def test__get_artifactory_file_name_directory_not_defined(monkeypatch): _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments']['unittest'] _b.json_config = mock_build_config_dict _b.project_name = mock_build_config_dict['projectInfo']['name'] _b.version_number = 'v1.0.0' art = ArtiFactory(config_override=_b) with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: if os.getenv('ARTIFACT_BUILD_DIRECTORY'): monkeypatch.delenv('ARTIFACT_BUILD_DIRECTORY') with pytest.raises(SystemExit) as cm: art._get_artifactory_files_name_from_build_dir() print(str(mock_printmsg_fn.mock_calls)) mock_printmsg_fn.assert_called_with( 'ArtiFactory', '_get_artifactory_files_name_from_build_dir', ('Missing artifact build path. Did you forget to define the environment variable \'ARTIFACT_BUILD_DIRECTORY\'? ' ), 'ERROR')
def test_get_artifact_url_failure(): with patch('flow.utils.commons.printMSG') as mock_printmsg_fn: _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments']['unittest'] _b.json_config = mock_build_config_dict _b.project_name = mock_build_config_dict['projectInfo']['name'] _b.version_number = 'v1.0.0' art = ArtiFactory(config_override=_b) test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0" exception = HTTPError('Something went wrong') responses.add(responses.GET, test_url, body=exception) with pytest.raises(ArtifactException) as cm: art.get_artifact_url() print(str(mock_printmsg_fn.mock_calls)) mock_printmsg_fn.assert_called_with( 'ArtiFactory', 'get_artifact_url', 'Unable to locate artifactory path https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0', 'ERROR')
def test_get_urls_of_artifacts(): _b = MagicMock(BuildConfig) _b.build_env_info = mock_build_config_dict['environments']['unittest'] _b.json_config = mock_build_config_dict _b.project_name = mock_build_config_dict['projectInfo']['name'] _b.version_number = 'v1.0.0' _b.artifact_extension = None _b.artifact_extensions = ["bob", "vcl"] art = ArtiFactory(config_override=_b) test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0" responses.add(responses.GET, test_url, body=response_body_artifactory, status=200, content_type="application/json") urls = art.get_urls_of_artifacts() assert urls == [ "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.bob", "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.vcl" ]
def main(): clazz = 'aggregator' method = 'main' tasks_requiring_github = [] try: version = pkg_resources.require("THD-Flow")[0].version except: version = 'UNKNOWN' parser = ArgumentParser(prog='version {} \n flow'.format(version)) subparsers = parser.add_subparsers(help='Task types', dest='task') parser.add_argument( 'env', help= "An environment that is defined in buildConfig.json environments section." ) parser.add_argument('-q', '--quiet', help="Silence the logging to stdout", default='False') load_task_parsers(subparsers) plugins = [] for i in pluginloader.get_plugins(): plugin = pluginloader.load_plugin(i) new_parser = subparsers.add_parser( plugin.parser, formatter_class=RawTextHelpFormatter) plugin.register_parser(new_parser) plugins.append(plugin) if hasattr(plugin, 'require_version') and plugin.require_version is True: tasks_requiring_github.append(plugin.parser) args = parser.parse_args() task = args.task.lower() if 'quiet' in args and args.quiet.lower() in ['yes', 'true', 'off', 'y']: Commons.quiet = True elif task == 'github' and args.action == 'getversion' and args.output is None: Commons.quiet = True # elif task == 'github' and args.action == 'version' and args.output is None: # Commons.quiet = True commons.printMSG(clazz, method, "THD-Flow Version: {}".format(version)) BuildConfig(args) if 'deploy_directory' in args and args.deploy_directory is not None: commons.printMSG( clazz, method, "Setting deployment directory to {}".format(args.deploy_directory)) BuildConfig.push_location = args.deploy_directory connect_error_dispatcher() github = None # TODO check if there are any registered metrics endpoints defined in settings.ini. This is optional. metrics = Graphite() commons.printMSG(clazz, method, "Task {}".format(task)) tasks_requiring_github.extend([ 'sonar', 'tracker', 'slack', 'artifactory', 'cf', 'zipit', 'gcappengine' ]) if task != 'github' and task in tasks_requiring_github: github = GitHub() if 'version' in args and args.version is not None and len( args.version.strip()) > 0 and args.version.strip().lower( ) != 'latest': # The only time a user should be targeting a snapshot environment and specifying a version # number without a "+" should be if they were manually versioning and passing in a base # version number. Although technically this could be used outside of the manually versioned # experience. # # i.e. flow cf deploy -v 1.0.1 development # this would deploy the latest snapshot version of 1.0.1, 1.0.1+3 # if however, they supplied the "+" it would target that specific snapshot version and not the latest # i.e. flow cf deploy -v 1.0.1+2 # this would deploy the version 1.0.1+2 even though there is a snapshot available with +3 if BuildConfig.artifact_category == 'snapshot' and '+' not in args.version: commons.printMSG(clazz, method, ( 'Base version passed in. Looking for latest snapshot version ' 'determined by base', args.version)) # TODO it doesn't appear that this is actually returning the latest snapshot, but instead returning # what was passed in. even in the older version of code. BuildConfig.version_number = github.get_git_last_tag( args.version.strip()) else: BuildConfig.version_number = BuildConfig.version_number = github.get_git_last_tag( args.version.strip()) # validate after processing what the version_number is set to. commons.printMSG( clazz, method, "Setting version number based on " "argument {}".format(BuildConfig.version_number)) else: BuildConfig.version_number = github.get_git_last_tag() if task == 'github': github = GitHub() if args.action == 'version': if 'tracker' in BuildConfig.json_config: _tracker = Tracker() call_github_version(github, _tracker, file_path=args.output, args=args) else: call_github_version(github, None, file_path=args.output, args=args) metrics.write_metric(task, args.action) elif args.action == 'getversion': if 'output' in args: call_github_getversion(github, file_path=args.output) else: call_github_getversion(github) metrics.write_metric(task, args.action) elif task == 'tracker': tracker = Tracker() commits = get_git_commit_history(github, args) story_list = commons.extract_story_id_from_commit_messages(commits) tracker.tag_stories_in_commit(story_list) metrics.write_metric(task, args.action) elif task == 'slack': slack = Slack() if args.action == 'release': # TODO Check to see if they are using tracker first. tracker = Tracker() commits = get_git_commit_history(github, args) story_list = commons.extract_story_id_from_commit_messages(commits) story_details = tracker.get_details_for_all_stories(story_list) slack.publish_deployment(story_details) elif args.action == 'message': channel = args.channel if args.channel else None user = args.user if args.user else None icon = args.icon if args.icon else None emoji = args.emoji if args.emoji else None attachment_color = args.attachment_color if args.attachment_color else None slack_url = args.slack_url slack.publish_custom_message(message=args.message, channel=channel, user=user, icon=icon, emoji=emoji, attachment_color=attachment_color, slack_url=slack_url) metrics.write_metric(task, args.action) elif task == 'sonar': sonar = SonarQube() sonar.scan_code() metrics.write_metric(task, args.action) elif task == 'artifactory': artifactory = ArtiFactory() if args.action == 'upload': artifactory.publish_build_artifact() metrics.write_metric(task, args.action) elif args.action == 'download': create_deployment_directory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/', extract=args.extract in ['y', 'yes', 'true'] or args.extract is None) elif task == 'cf': if BuildConfig.build_env_info['cf']: if 'version' not in args: commons.printMSG( clazz, method, 'Version number not passed in for deployment. Format is: v{' 'major}.{minor}.{bug}+{buildnumber} ', 'ERROR') exit(1) cf = CloudFoundry() is_script_run_successful = True if 'script' in args and args.script is not None: commons.printMSG(clazz, method, 'Custom deploy script detected') cf.download_cf_cli() cf.download_custom_deployment_script(args.script) is_script_run_successful = cf.run_deployment_script(args.script) else: commons.printMSG( clazz, method, 'No custom deploy script passed in. Cloud Foundry detected in ' 'buildConfig. Calling standard CloudFoundry deployment.') # TODO make this configurable in case they are using create_deployment_directory() if BuildConfig.artifact_extension is None and BuildConfig.artifact_extensions is None: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from GitHub.') github.download_code_at_version() else: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from Artifactory.') artifactory = ArtiFactory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/') force = False if 'force' in args and args.force is not None and args.force.strip( ).lower() != 'false': force = True manifest = None if 'manifest' in args and args.manifest is not None: commons.printMSG( clazz, method, "Setting manifest to {}".format(args.manifest)) manifest = args.manifest cf.deploy(force_deploy=force, manifest=manifest) commons.printMSG(clazz, method, 'Checking if we can attach the output to the CR') SIGNAL = 'publish-deploy-complete' sender = {} dispatcher.send(signal=SIGNAL, sender=sender) if is_script_run_successful is False: exit(1) metrics.write_metric(task, args.action) elif task == 'gcappengine': app_engine = GCAppEngine() is_script_run_successful = True if 'script' in args and args.script is not None: commons.printMSG(clazz, method, 'Custom deploy detected') app_engine.download_custom_deployment_script(args.script) is_script_run_successful = app_engine.run_deployment_script( args.script) else: commons.printMSG( clazz, method, 'No custom deploy script passed in. Calling standard AppEngine deployment.' ) artifactory = ArtiFactory() create_deployment_directory() if BuildConfig.artifact_extension is None and BuildConfig.artifact_extensions is None: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from GitHub.') github.download_code_at_version() else: commons.printMSG( clazz, method, 'Attempting to retrieve and deploy from Artifactory.') artifactory = ArtiFactory() artifactory.download_and_extract_artifacts_locally( BuildConfig.push_location + '/') app_yaml = None if 'app_yaml' in args and args.app_yaml is not None: commons.printMSG( clazz, method, "Setting app yaml to {}".format(args.app_yaml)) app_yaml = args.app_yaml if 'promote' in args and args.promote is not 'true': app_engine.deploy(app_yaml=app_yaml, promote=False) SIGNAL = 'publish-deploy-complete' sender = {} dispatcher.send(signal=SIGNAL, sender=sender) if is_script_run_successful is False: exit(1) metrics.write_metric(task, args.action) elif task == 'zipit': ZipIt('artifactory', args.zipfile, args.contents) else: for i in plugins: if i.parser == task: i.run_action(args) metrics.write_metric(task, args.action) continue