def test_creates_output(self): path = os.path.join(self.datadir, "TestLibrary.py") output = os.path.join(self.tmpdir, "index.html") task = create_task(RobotLibDoc, {"path": path, "output": output}) task() assert "created {}".format(output) in self.task_log["info"] assert os.path.exists(output)
def test_pageobject(self): """Verify that we can parse a page object file""" path = os.path.join(self.datadir, "TestPageObjects.py") output = os.path.join(self.tmpdir, "index.html") task = create_task(RobotLibDoc, {"path": path, "output": output}) task() assert "created {}".format(output) in self.task_log["info"] assert os.path.exists(output)
def test_error_option(self): """Verify that error option is propertly translated to rflint options""" task = create_task(RobotLint, { "path": self.tmpdir, "error": "LineTooLong,FileTooLong" }) expected = ["--error", "LineTooLong", "--error", "FileTooLong"] self.assertEqual(task._get_args(), expected)
def test_explicit_path(self): """Verify an explicit path is used when given This also verifies that the path is converted to a proper list if it is a comma-separated string. """ task = create_task(RobotLint, {"path": "/tmp/tests,/tmp/resources"}) assert task.options["path"] == ["/tmp/tests", "/tmp/resources"]
def test_recursive_folder(self): """Verify that subdirectories are included when finding files""" subdir = tempfile.mkdtemp(dir=self.tmpdir) file1 = self.make_test_file("", dir=subdir) task = create_task(RobotLint, {"path": self.tmpdir}) files = sorted(task._get_files()) self.assertEqual(files, [file1])
def test_init_options__defaults(self): project_config = create_project_config() project_config.project__name = "Project" task = create_task(GenerateDataDictionary, {}, project_config) assert task.options["object_path"] == "Project Objects.csv" assert task.options["field_path"] == "Project Fields.csv"
def test_task_log(self): """Verify that the task prints out the name of the output file""" path = os.path.join(self.datadir, "TestLibrary.py") output = os.path.join(self.tmpdir, "index.html") task = create_task(RobotLibDoc, {"path": path, "output": output}) task() assert "created {}".format(output) in self.task_log["info"] assert os.path.exists(output)
def test_task_log(self): """Verify that the task prints out the name of the output file""" path = os.path.join(self.datadir, "TestLibrary.py") output = os.path.join(self.tmpdir, "index.html") task = create_task(RobotLibDoc, {"path": path, "output": output}) task() assert "created {}".format(output) in self.task_log["info"] assert os.path.exists(output)
def test_init_options(self): task = create_task(MetadataETLTask, { "managed": True, "api_version": "47.0" }) assert task.options["managed"] assert task.options["api_version"] == "47.0"
def test_run_task__no_changes(self): task = create_task(ListChanges) task._init_task() task.tooling = mock.Mock() task.logger = mock.Mock() task.tooling.query_all.return_value = {"totalSize": 0, "records": []} task._run_task() self.assertIn("Found no changes.", task.logger.info.call_args[0][0])
def test_run_task__no_changes(self): task = create_task(ListChanges) task._init_task() task.tooling = mock.Mock() task.logger = mock.Mock() task.tooling.query_all.return_value = {"totalSize": 0, "records": []} task._run_task() self.assertIn("Found no changes.", task.logger.info.call_args[0][0])
def test_create_permset__alias(self): task = create_task( AssignPermissionSets, { "api_names": "PermSet1,PermSet2", "user_alias": "test", }, ) responses.add( method="GET", url=f"{task.org_config.instance_url}/services/data/v50.0/query/?q=SELECT+Id%2C%28SELECT+PermissionSetId+FROM+PermissionSetAssignments%29+FROM+User+WHERE+Alias+%3D+%27test%27", status=200, json={ "done": True, "totalSize": 1, "records": [ { "Id": "005000000000000", "PermissionSetAssignments": { "done": True, "totalSize": 1, "records": [{"PermissionSetId": "0PS000000000000"}], }, } ], }, ) responses.add( method="GET", url=f"{task.org_config.instance_url}/services/data/v50.0/query/?q=SELECT+Id%2CName+FROM+PermissionSet+WHERE+Name+IN+%28%27PermSet1%27%2C+%27PermSet2%27%29", status=200, json={ "done": True, "totalSize": 1, "records": [ { "Id": "0PS000000000000", "Name": "PermSet1", }, { "Id": "0PS000000000001", "Name": "PermSet2", }, ], }, ) responses.add( method="POST", url=f"{task.org_config.instance_url}/services/data/v50.0/sobjects/PermissionSetAssignment/", status=200, json={"id": "0Pa000000000001", "success": True, "errors": []}, ) task() assert len(responses.calls) == 3 assert "0PS000000000001" in responses.calls[2].request.body
def test_load_settings_bad_yaml(self, isfile): isfile.return_value = True task = create_task(LoadCustomSettings, {"settings_path": "test.yml"}) task.sf = Mock() task.settings = {"Test__c": "Test"} with self.assertRaises(CumulusCIException): task._load_settings()
def test_init_options(self): task = create_task(ConcreteMetadataSingleEntityTransformTask, {}) task._init_options({ "managed": True, "api_version": "47.0", "namespace_inject": "test", "api_names": "%%%NAMESPACE%%%bar,foo", }) assert task.api_names == set(["test__bar", "foo"])
def test_folder_for_path(self): """Verify that if the path is a folder, we process all files in the folder""" file1 = self.make_test_file("", name="a", suffix=".resource") file2 = self.make_test_file("", name="b", suffix=".robot") file3 = self.make_test_file("", name="c", suffix=".robot") task = create_task(RobotLint, {"path": self.tmpdir}) files = sorted(task._get_files()) self.assertEqual(files, [file1, file2, file3])
def test_create_permsetgroup(self): task = create_task( AssignPermissionSetGroups, { "api_names": "PermSetGroup1,PermSetGroup2", }, ) responses.add( method="GET", url=f"{task.org_config.instance_url}/services/data/v50.0/query/?q=SELECT+Id%2C%28SELECT+PermissionSetGroupId+FROM+PermissionSetAssignments%29+FROM+User+WHERE+Username+%3D+%27test-cci%40example.com%27", status=200, json={ "done": True, "totalSize": 1, "records": [ { "Id": "005000000000000", "PermissionSetAssignments": { "done": True, "totalSize": 1, "records": [{"PermissionSetGroupId": "0PG000000000000"}], }, } ], }, ) responses.add( method="GET", url=f"{task.org_config.instance_url}/services/data/v50.0/query/?q=SELECT+Id%2CDeveloperName+FROM+PermissionSetGroup+WHERE+DeveloperName+IN+%28%27PermSetGroup1%27%2C+%27PermSetGroup2%27%29", status=200, json={ "done": True, "totalSize": 1, "records": [ { "Id": "0PG000000000000", "DeveloperName": "PermSetGroup1", }, { "Id": "0PG000000000001", "DeveloperName": "PermSetGroup2", }, ], }, ) responses.add( method="POST", url=f"{task.org_config.instance_url}/services/data/v50.0/sobjects/PermissionSetAssignment/", status=200, json={"id": "0Pa000000000001", "success": True, "errors": []}, ) task() assert len(responses.calls) == 3 assert "0PG000000000001" in responses.calls[2].request.body
def test_sets_status(self): task = create_task(SetDuplicateRuleStatus, { "api_version": "47.0", "active": False }) tree = metadata_tree.fromstring(DUPERULE_XML) assert tree.find("isActive").text == "true" result = task._transform_entity(tree, "DupeRule") assert result.find("isActive").text == "false"
def test_invalid_picklist_field(self): add_picklist_values_task = create_task( add_picklist_values.AddPicklistValues, task_options) base_tooling_url = self._get_base_tooling_url( add_picklist_values_task.org_config.instance_url) customobject_query_url = ( base_tooling_url + "query/?q=SELECT+DeveloperName%2C+DurableId+FROM+EntityDefinition+" "WHERE+NamespacePrefix+%3D+%27hed%27+AND+DeveloperName+%3D+%27Attribute%27" ) customfield_query_url = ( base_tooling_url + "query/?q=SELECT+Id%2C+DeveloperName%2C+Metadata+FROM+CustomField+" + "WHERE+NamespacePrefix+%3D+%27hed%27+AND+DeveloperName+%3D+%27Attribute_Type%27+AND+TableEnumOrId+%3D+%27123%27" ) expected_customfield_query_response_with_invalid_field = { "done": True, "records": [{ "Id": 1, "Metadata": { "description": "Test", "inlineHelpText": "Test", "label": "Attribute Type", "type": "NotAPicklist", } }], "size": 1, } responses.add(method=responses.GET, url=customobject_query_url, match_querystring=True, json=expected_customobject_query_response) responses.add( method=responses.GET, url=customfield_query_url, match_querystring=True, json=expected_customfield_query_response_with_invalid_field) with self.assertRaises(Exception) as ex: add_picklist_values_task() self.assertEqual(2, len(responses.calls)) self.assertEqual(customobject_query_url, responses.calls[0].request.url) self.assertEqual(customfield_query_url, responses.calls[1].request.url) self.assertEqual( 'hed__Attribute_Type__c field is not a picklist field', str(ex.exception))
def test_glob_patterns(self): output = os.path.join(self.tmpdir, "index.html") path = os.path.join(self.datadir, "*Library.py") task = create_task(RobotLibDoc, {"path": path, "output": output}) task() assert os.path.exists(output) assert len(task.result["files"]) == 1 assert task.result["files"][0] == os.path.join(self.datadir, "TestLibrary.py")
def test_raises_exception_bad_sharing_model(self): with pytest.raises(TaskOptionsError): create_task( SetOrgWideDefaults, { "managed": True, "api_version": "47.0", "api_names": "bar,foo", "org_wide_defaults": [ { "api_name": "Account", "internal_sharing_model": "Nonsense", "external_sharing_model": "Private", }, {"api_name": "Test__c"}, ], }, )
def test_raises_for_empty_fields(self): with pytest.raises(TaskOptionsError): task = create_task(SetFieldHelpText, { "api_version": "47.0", "fields": [] }) tree = metadata_tree.fromstring(OBJECT_XML) task._transform_entity(tree, "MyObject")
def test_process_sfdx_release(self, fromstring): task = create_task( GenerateDataDictionary, { "object_path": "object.csv", "field_path": "fields.csv", "release_prefix": "rel/", }, ) zip_file = Mock() zip_file.read.return_value = "<test></test>" zip_file.namelist.return_value = [ "force-app/main/default/objects/Child__c.object-meta.xml", "force-app/main/default/objects/Child__c/fields/Lookup__c.field-meta.xml", "force-app/main/default/objects/Parent__c.object-meta.xml", ".gitignore", "test__c.object-meta.xml", ] task._process_object_element = Mock() task._process_field_element = Mock() task._process_sfdx_release(zip_file, LooseVersion("1.1")) zip_file.read.assert_has_calls( [ call("force-app/main/default/objects/Child__c.object-meta.xml"), call( "force-app/main/default/objects/Child__c/fields/Lookup__c.field-meta.xml" ), call("force-app/main/default/objects/Parent__c.object-meta.xml"), ] ) task._process_object_element.assert_has_calls( [ call( "Child__c", metadata_tree.fromstring("<test></test>"), LooseVersion("1.1"), ), call( "Parent__c", metadata_tree.fromstring("<test></test>"), LooseVersion("1.1"), ), ] ) task._process_field_element.assert_has_calls( [ call( "Child__c", metadata_tree.fromstring("<test></test>"), LooseVersion("1.1"), ) ] )
def test_run_task__no_results(self): task = create_task(ReportPushFailures, options={"request_id": "123"}) def _init_class(): task.sf = mock.Mock() task.sf.query.return_value = {"totalSize": 0, "records": [], "done": True} task._init_class = _init_class task() self.assertFalse(os.path.isfile(task.options["result_file"]))
def test_process_arg_eq_zero(self, mock_subprocess_run, mock_robot_run): """Verify that setting the process option to 1 runs robot rather than pabot""" mock_robot_run.return_value = 0 task = create_task(Robot, {"suites": "tests", "process": 0}) task() mock_subprocess_run.assert_not_called() mock_robot_run.assert_called_once_with("tests", listener=[], outputdir=".", variable=["org:test"])
def test_run_task__no_results(self): task = create_task(ReportPushFailures, options={"request_id": "123"}) task.sf = mock.Mock() task.sf.query.return_value = { "totalSize": 0, "records": [], "done": True } task() self.assertFalse(os.path.isfile(task.options["result_file"]))
def test_suites(self, mock_robot_run): """Verify that passing a list of suites is handled properly""" mock_robot_run.return_value = 0 task = create_task(Robot, {"suites": "tests,more_tests", "process": 0}) task() mock_robot_run.assert_called_once_with("tests", "more_tests", listener=[], outputdir=".", variable=["org:test"])
def test_create_permset_raises(self): task = create_task( AssignPermissionSets, { "api_names": "PermSet1,PermSet2,PermSet3", }, ) responses.add( method="GET", url= f"{task.org_config.instance_url}/services/data/v50.0/query/?q=SELECT+Id%2C%0A++++++++++++++++++++++++++++%28SELECT+PermissionSetId%0A+++++++++++++++++++++++++++++FROM+PermissionSetAssignments%29%0A++++++++++++++++++++++++FROM+User%0A++++++++++++++++++++++++WHERE+Username+%3D+%27test-cci%40example.com%27", status=200, json={ "done": True, "totalSize": 1, "records": [{ "Id": "005000000000000", "PermissionSetAssignments": { "done": True, "totalSize": 1, "records": [{ "PermissionSetId": "0PS000000000000" }], }, }], }, ) responses.add( method="GET", url= f"{task.org_config.instance_url}/services/data/v50.0/query/?q=SELECT+Id%2C+Name+FROM+PermissionSet+WHERE+Name+IN+%28%27PermSet1%27%2C+%27PermSet2%27%2C+%27PermSet3%27%29", status=200, json={ "done": True, "totalSize": 1, "records": [ { "Id": "0PS000000000000", "Name": "PermSet1", }, { "Id": "0PS000000000001", "Name": "PermSet2", }, ], }, ) with pytest.raises(CumulusCIException): task()
def test_comma_separated_list_of_files(self): """Verify that we properly parse a comma-separated list of files""" path = "{},{}".format( os.path.join(self.datadir, "TestLibrary.py"), os.path.join(self.datadir, "TestResource.robot"), ) output = os.path.join(self.tmpdir, "index.html") task = create_task(RobotLibDoc, {"path": path, "output": output}) task() assert os.path.exists(output) assert len(task.result["files"]) == 2
def test_remove_duplicates(self): output = os.path.join(self.tmpdir, "index.html") path = os.path.join(self.datadir, "*Library.py") task = create_task(RobotLibDoc, { "path": [path, path], "output": output }) task() assert len(task.result["files"]) == 1 assert task.result["files"][0] == os.path.join(self.datadir, "TestLibrary.py")
def test_comma_separated_list_of_files(self): """Verify that we properly parse a comma-separated list of files""" path = "{},{}".format( os.path.join(self.datadir, "TestLibrary.py"), os.path.join(self.datadir, "TestResource.robot"), ) output = os.path.join(self.tmpdir, "index.html") task = create_task(RobotLibDoc, {"path": path, "output": output}) task() assert os.path.exists(output) assert len(task.result["files"]) == 2
def test_run_task__no_changes(self): with temporary_dir() as path: task = create_task(RetrieveChanges, {"path": path}) task._init_task() messages = [] task.tooling = mock.Mock() task.tooling.query_all.return_value = {"totalSize": 0, "records": []} task.logger = mock.Mock() task.logger.info = messages.append task._run_task() self.assertIn("No changes to retrieve", messages)
def test_filter_changes__include(self): foo = {"MemberType": "CustomObject", "MemberName": "foo__c", "RevisionNum": 1} bar = {"MemberType": "CustomObject", "MemberName": "bar__c", "RevisionNum": 1} foobar = { "MemberType": "CustomObject", "MemberName": "foobar__c", "RevisionNum": 1, } task = create_task(ListChanges, {"include": "foo", "exclude": "bar"}) filtered = task._filter_changes({"records": [foo, bar, foobar]}) self.assertEqual([foo], filtered)
def test_run_task(self, ): task = create_task( ReportPushFailures, options={ "request_id": "123", "ignore_errors": "IgnoreMe" }, ) def _init_class(): task.sf = mock.Mock() task.sf.query.side_effect = [ { "done": True, "totalSize": 2, "records": [ error_record(ErrorTitle="IgnoreMe"), error_record(gack=True), { "attributes": { "type": "job" }, "SubscriberOrganizationKey": "00Dxxx000000001", }, ], }, { "done": True, "totalSize": 1, "records": [{ "OrgKey": "00Dxxx000000001", "OrgName": "Test Org", "OrgType": "Sandbox", "OrgStatus": "Demo", "InstanceName": "CSxx", }], }, ] task._init_class = _init_class with temporary_dir(): task() self.assertEqual(2, task.sf.query.call_count) self.assertTrue(os.path.isfile(task.result), "the result file does not exist") with open(task.result, "r") as f: reader = csv.DictReader(f) rows = list(reader) self.assertEqual(len(rows), 2) self.assertEqual(rows[1]["Stacktrace Id"], "-4532")
def test_run_task__no_changes(self): with temporary_dir() as path: task = create_task(RetrieveChanges, {"path": path}) task._init_task() messages = [] task.tooling = mock.Mock() task.tooling.query_all.return_value = {"totalSize": 0, "records": []} task.logger = mock.Mock() task.logger.info = messages.append task._run_task() self.assertIn("No changes to retrieve", messages)
def test_output_directory_not_exist(self): """Verify we catch an error if the output directory doesn't exist""" path = os.path.join(self.datadir, "TestLibrary.py") output = os.path.join(self.tmpdir, "bogus", "index.html") task = create_task(RobotLibDoc, {"path": path, "output": output}) # on windows, the output path may have backslashes which needs # to be protected in the expected regex expected = r"Unable to create output file '{}' (.*)".format( re.escape(output)) with pytest.raises(TaskOptionsError, match=expected): task()
def test_filter_changes__include(self): foo = {"MemberType": "CustomObject", "MemberName": "foo__c", "RevisionNum": 1} bar = {"MemberType": "CustomObject", "MemberName": "bar__c", "RevisionNum": 1} foobar = { "MemberType": "CustomObject", "MemberName": "foobar__c", "RevisionNum": 1, } task = create_task(ListChanges, {"include": "foo", "exclude": "bar"}) filtered = task._filter_changes({"records": [foo, bar, foobar]}) self.assertEqual([foo], filtered)
def test_pdb_arg(self, patch_statusreporter): create_task( Robot, { "suites": "test", # required, or the task will raise an exception "pdb": "False", }, ) patch_statusreporter.assert_not_called() create_task( Robot, { "suites": "test", # required, or the task will raise an exception "pdb": "True", }, ) patch_statusreporter.assert_called_once()
def test_run_task__snapshot(self): with temporary_dir(): task = create_task(ListChanges, {"snapshot": True}) task._init_task() task.tooling = mock.Mock() messages = [] task.logger = mock.Mock() task.logger.info = messages.append task.tooling.query_all.return_value = { "totalSize": 1, "records": [ { "MemberType": "CustomObject", "MemberName": "Test__c", "RevisionNum": 1, } ], } task._run_task() self.assertTrue( os.path.exists( os.path.join( ".sfdx", "orgs", "*****@*****.**", "maxrevision.json" ) ) ) self.assertIn("CustomObject: Test__c", messages) task = create_task(ListChanges) task._init_task() task.tooling = mock.Mock() task.logger = mock.Mock() task.logger.info = messages.append task.tooling.query_all.return_value = {"totalSize": 0, "records": []} task._run_task() self.assertIn("Found no changes.", messages)
def setUp(self): self.tmpdir = tempfile.mkdtemp(dir=".") self.datadir = os.path.dirname(__file__) path = [ os.path.join(self.datadir, "TestLibrary.py"), os.path.join(self.datadir, "TestResource.robot"), ] output = os.path.join(self.tmpdir, "index.html") self.task = create_task( RobotLibDoc, {"path": path, "output": output, "title": "Keyword Documentation, yo."}, ) self.task() docroot = ET.parse(output).getroot() self.html_body = docroot.find("body")
def test_run_task(self,): task = create_task( ReportPushFailures, options={"request_id": "123", "ignore_errors": "IgnoreMe"}, ) def _init_class(): task.sf = mock.Mock() task.sf.query.side_effect = [ { "done": True, "totalSize": 2, "records": [ error_record(ErrorTitle="IgnoreMe"), error_record(gack=True), { "attributes": {"type": "job"}, "SubscriberOrganizationKey": "00Dxxx000000001", }, ], }, { "done": True, "totalSize": 1, "records": [ { "OrgKey": "00Dxxx000000001", "OrgName": "Test Org", "OrgType": "Sandbox", "OrgStatus": "Demo", "InstanceName": "CSxx", } ], }, ] task._init_class = _init_class with temporary_dir(): task() self.assertEqual(2, task.sf.query.call_count) self.assertTrue( os.path.isfile(task.result), "the result file does not exist" ) with open(task.result, "r") as f: reader = csv.DictReader(f) rows = list(reader) self.assertEqual(len(rows), 2) self.assertEqual(rows[1]["Stacktrace Id"], "-4532")
def test_run_task__merge_changes(self): # If there is already an existing package, # we should add to it rather than overwriting it. with temporary_dir() as path: with open("package.xml", "w") as f: f.write( """<?xml version="1.0" encoding="UTF-8"?> <Package xmlns="http://soap.sforce.com/2006/04/metadata"> <types> <members>Object1</members> <name>CustomObject</name> </types> </Package> """ ) task = create_task(RetrieveChanges, {"path": path}) task._init_task() task.tooling = mock.Mock() task.tooling.query_all.return_value = { "totalSize": 1, "records": [ { "MemberType": "CustomObject", "MemberName": "Object2", "RevisionNum": 1, } ], } task.api_class = mock.Mock() task._run_task() package_xml = task.api_class.call_args[0][1] self.maxDiff = None self.assertEqual( """<?xml version="1.0" encoding="UTF-8"?> <Package xmlns="http://soap.sforce.com/2006/04/metadata"> <types> <members>Object1</members> <members>Object2</members> <name>CustomObject</name> </types> <version>45.0</version> </Package>""", package_xml, )
def test_run_task(self): task = create_task(ListChanges, {"exclude": "Ignore"}) task._init_task() task.tooling = mock.Mock() task.logger = mock.Mock() task.tooling.query_all.return_value = { "totalSize": 1, "records": [ { "MemberType": "CustomObject", "MemberName": "Test__c", "RevisionNum": 1, }, { "MemberType": "CustomObject", "MemberName": "Ignored__c", "RevisionNum": 2, }, ], } task._run_task() self.assertIn("CustomObject: Test__c", task.logger.info.call_args[0][0])
def test_run_task(self): with temporary_dir() as path: task = create_task(RetrieveChanges, {"path": path, "include": "Test"}) task._init_task() task.tooling = mock.Mock() task.tooling.query_all.return_value = { "totalSize": 1, "records": [ { "MemberType": "CustomObject", "MemberName": "Test__c", "RevisionNum": 1, } ], } zf = zipfile.ZipFile(io.BytesIO(), "w") zf.writestr("objects/Test__c.object", "<root />") task.api_class = mock.Mock(return_value=mock.Mock(return_value=zf)) task._run_task() with open(os.path.join(path, "package.xml"), "r") as f: package_xml = f.read() self.assertIn("<members>Test__c</members>", package_xml)
def test_run_task(self): with temporary_dir(): org_config = OrgConfig( { "username": "******", "scratch": True, "instance_url": "https://test.salesforce.com", "access_token": "TOKEN", }, "test", ) task = create_task(SnapshotChanges, org_config=org_config) task._init_task() task.tooling.query = mock.Mock(return_value={"records": [{"num": 1}]}) task._run_task() self.assertTrue( os.path.exists( os.path.join( ".sfdx", "orgs", "*****@*****.**", "maxrevision.json" ) ) )
def test_run_task(self, robot_run): robot_run.return_value = 1 task = create_task(Robot, {"suites": "tests", "pdb": True}) with self.assertRaises(RobotTestFailure): task()
def test_process_output__json_parse_error(self): task = create_task(SFDXJsonTask) task.logger = mock.Mock() task._process_output("{") task.logger.error.assert_called_once()
def test_process_output(self): task = create_task(SFDXJsonTask) task.logger = mock.Mock() task._process_output("{}") task.logger.info.assert_called_once_with("JSON = {}")
def test_run_task(self, testdoc): task = create_task(RobotTestDoc, {"path": ".", "output": "out"}) task() testdoc.assert_called_once_with(".", "out")
def test_validate_filenames(self): """Verify that we catch bad filenames early""" expected = "Unable to find the following input files: 'bogus.py', 'bogus.robot'" output = os.path.join(self.tmpdir, "index.html") with pytest.raises(TaskOptionsError, match=expected): create_task(RobotLibDoc, {"path": "bogus.py,bogus.robot", "output": output})
def test_freeze(self): task = create_task(SnapshotChanges) steps = task.freeze(None) self.assertEqual([], steps)
def test_get_command(self): task = create_task(SFDXJsonTask) command = task._get_command() self.assertEqual("sfdx force:mdapi:deploy --json", command)