def test_basic_run(tmpdir): tasks_dir = tmpdir.mkdir('tasks') default_cal = tasks_dir.mkdir('default') tmp_dir = tmpdir.mkdir('tmp') config = tmpdir.join('config') config.write( '[watdo]\n' 'confirmation = False\n' 'path = {path}\n' 'tmppath = {tmppath}'.format( path=str(tasks_dir), tmppath=str(tmp_dir) ) ) runner = CliRunner() result = runner.invoke(cli.main, env={ 'WATDO_CONFIG': str(config), 'EDITOR': 'echo "My cool task @default" >> ' }, catch_exceptions=False) assert not result.exception task, = default_cal.listdir() assert 'My cool task' in task.read() result = runner.invoke(cli.main, env={ 'WATDO_CONFIG': str(config), 'EDITOR': 'echo "Invalid task @wrongcalendar" >> ' }, catch_exceptions=False, input='n\n') assert result.exception assert ('Calendars are not explicitly created. ' 'Please create the directory {} yourself.' .format(str(tasks_dir.join('wrongcalendar')))) \ in result.output.splitlines()
def test_init_without_cloud_server( config, push_design_documents, get_or_create, generate_config ): runner = CliRunner() generate_config.return_value = {"test": {"test": "test"}} httpretty.register_uri( httpretty.GET, "http://localhost:5984/_config/test/test", body='"test_val"' ) httpretty.register_uri( httpretty.PUT, "http://localhost:5984/_config/test/test" ) # Show -- Should throw an error because no local server is selected res = runner.invoke(show) assert res.exit_code, res.output # Init -- Should work and push the design documents but not replicate # anything res = runner.invoke(init) assert res.exit_code == 0, res.exception or res.output assert get_or_create.call_count == len(all_dbs) assert push_design_documents.call_count == 1 push_design_documents.reset_mock() # Show -- Should work res = runner.invoke(show) assert res.exit_code == 0, res.exception or res.output # Init -- Should throw an error because a different database is already # selected res = runner.invoke(init, ["--db_url", "http://test.test:5984"]) assert res.exit_code, res.output
def test_returned_filesize(): runner = CliRunner() result = runner.invoke( cli, ['search', environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD'), 'tests/map.geojson', '--url', 'https://scihub.copernicus.eu/dhus/', '-s', '20141205', '-e', '20141208', '-q', 'producttype=GRD'] ) expected = "1 scenes found with a total size of 0.50 GB" assert result.output.split("\n")[-2] == expected result = runner.invoke( cli, ['search', environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD'), 'tests/map.geojson', '--url', 'https://scihub.copernicus.eu/dhus/', '-s', '20140101', '-e', '20141231', '-q', 'producttype=GRD'] ) expected = "20 scenes found with a total size of 11.06 GB" assert result.output.split("\n")[-2] == expected
def test_instance_create_created_instance(): """Test ``instance create`` command checking the resulting instance.""" runner = CliRunner() # Missing arg result = runner.invoke(instance, ['create']) assert result.exit_code != 0 # With arg with runner.isolated_filesystem(): site_name = 'mysite2' result = runner.invoke(instance, ['create', site_name]) assert result.exit_code == 0 cwd = os.getcwd() path_to_folder = os.path.join(cwd, site_name) os.chdir(path_to_folder) if os.getenv('REQUIREMENTS') == 'devel': assert call( ['pip', 'install', '-r', 'requirements-devel.txt'] ) == 0 assert call(['pip', 'install', '-e', '.']) == 0 assert pkg_resources.get_distribution(site_name) app = importlib.import_module(site_name + '.factory').create_app() with app.app_context(): assert app.name == site_name assert call([site_name, '--help']) == 0 assert call(['pip', 'uninstall', site_name, '-y']) == 0 os.chdir(cwd)
def test_cli(): runner = CliRunner() result = runner.invoke( cli, ['search', environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD'), 'tests/map.geojson'] ) assert result.exit_code == 0 result = runner.invoke( cli, ['search', environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD'), 'tests/map.geojson', '--url', 'https://scihub.copernicus.eu/dhus/'] ) assert result.exit_code == 0 result = runner.invoke( cli, ['search', environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD'), 'tests/map.geojson', '-q', 'producttype=GRD,polarisationmode=HH'] ) assert result.exit_code == 0
def test_console(monkeypatch): stack = MagicMock(stack_name='test-1') inst = MagicMock() inst.tags = {'aws:cloudformation:stack-name': 'test-1'} ec2 = MagicMock() ec2.get_only_instances.return_value = [inst] ec2.get_console_output.return_value.output = b'**MAGIC-CONSOLE-OUTPUT**' monkeypatch.setattr('boto.ec2.connect_to_region', lambda x: ec2) monkeypatch.setattr('boto.cloudformation.connect_to_region', lambda x: MagicMock(list_stacks=lambda stack_status_filters: [stack])) monkeypatch.setattr('boto.iam.connect_to_region', lambda x: MagicMock()) runner = CliRunner() data = {'SenzaInfo': {'StackName': 'test'}} with runner.isolated_filesystem(): with open('myapp.yaml', 'w') as fd: yaml.dump(data, fd) result = runner.invoke(cli, ['console', 'myapp.yaml', '--region=myregion', '1'], catch_exceptions=False) assert '**MAGIC-CONSOLE-OUTPUT**' in result.output result = runner.invoke(cli, ['console', 'foobar', '--region=myregion'], catch_exceptions=False) assert '' == result.output result = runner.invoke(cli, ['console', '172.31.1.2', '--region=myregion'], catch_exceptions=False) assert '**MAGIC-CONSOLE-OUTPUT**' in result.output result = runner.invoke(cli, ['console', 'i-123', '--region=myregion'], catch_exceptions=False) assert '**MAGIC-CONSOLE-OUTPUT**' in result.output
def test_delete(monkeypatch): cf = MagicMock() stack = MagicMock(stack_name='test-1') cf.list_stacks.return_value = [stack] monkeypatch.setattr('boto.cloudformation.connect_to_region', lambda x: cf) monkeypatch.setattr('boto.iam.connect_to_region', lambda x: MagicMock()) runner = CliRunner() data = {'SenzaInfo': {'StackName': 'test'}} with runner.isolated_filesystem(): with open('myapp.yaml', 'w') as fd: yaml.dump(data, fd) result = runner.invoke(cli, ['delete', 'myapp.yaml', '--region=myregion', '1'], catch_exceptions=False) assert 'OK' in result.output cf.list_stacks.return_value = [stack, stack] result = runner.invoke(cli, ['delete', 'myapp.yaml', '--region=myregion'], catch_exceptions=False) assert 'Please use the "--force" flag if you really want to delete multiple stacks' in result.output result = runner.invoke(cli, ['delete', 'myapp.yaml', '--region=myregion', '--force'], catch_exceptions=False) assert 'OK' in result.output
class TestDeployReqs(unittest.TestCase): def setUp(self): self.runner = CliRunner() self.conf = mock_conf(self) @unittest.skip('flaky') def test_can_decompress_downloaded_packages_and_call_deploy_reqs(self): requirements_file = self._write_tmp_requirements_file() with mock.patch('shub.utils.build_and_deploy_egg') as m: self.runner.invoke( deploy_reqs.cli, ('-r', requirements_file), ) self.assertEqual(m.call_count, 2) for args, kwargs in m.call_args_list: project, endpoint, apikey = args self.assertEqual(project, 1) self.assertIn('https://app.scrapinghub.com', endpoint) self.assertEqual(apikey, self.conf.apikeys['default']) def _write_tmp_requirements_file(self): basepath = 'tests/samples/deploy_reqs_sample_project/' eggs = ['other-egg-0.2.1.zip', 'inflect-0.2.5.tar.gz'] tmp_dir = tempfile.mkdtemp(prefix="shub-test-deploy-reqs") requirements_file = os.path.join(tmp_dir, 'requirements.txt') with open(requirements_file, 'w') as f: for egg in eggs: f.write(os.path.abspath(os.path.join(basepath, egg)) + "\n") return requirements_file
class VersionTests(unittest.TestCase): """A set of tests to ensure that the version command runs in the way that we expect. """ def setUp(self): self.runner = CliRunner() def test_version_command(self): """Establish that the version command returns the output we expect. """ # Set up output from the /config/ endpoint in Tower and # invoke the command. with client.test_mode as t: t.register_json('/config/', {'version': '4.21'}) result = self.runner.invoke(version) # Verify that we got the output we expected. self.assertEqual(result.exit_code, 0) self.assertEqual( result.output.strip(), 'Ansible Tower 4.21\nTower CLI %s' % tower_cli.__version__, ) def test_cannot_connect(self): """Establish that the version command gives a nice error in cases where it cannot connect to Tower. """ with mock.patch.object(client, 'get') as get: get.side_effect = requests.exceptions.RequestException result = self.runner.invoke(version) self.assertEqual(result.exit_code, 1) self.assertIn('Could not connect to Ansible Tower.', result.output)
def test_appgroup(): """Test of with_appcontext.""" @click.group(cls=AppGroup) def cli(): pass @cli.command(with_appcontext=True) def test(): click.echo(current_app.name) @cli.group() def subgroup(): pass @subgroup.command(with_appcontext=True) def test2(): click.echo(current_app.name) obj = ScriptInfo(create_app=lambda info: Flask("testappgroup")) runner = CliRunner() result = runner.invoke(cli, ['test'], obj=obj) assert result.exit_code == 0 assert result.output == 'testappgroup\n' result = runner.invoke(cli, ['subgroup', 'test2'], obj=obj) assert result.exit_code == 0 assert result.output == 'testappgroup\n'
class ScheduleTest(unittest.TestCase): def setUp(self): self.runner = CliRunner() self.conf = mock_conf(self) @mock.patch('shub.schedule.schedule_spider', autospec=True) def test_schedules_job_if_input_is_ok(self, mock_schedule): proj, endpoint, apikey = self.conf.get_target('default') # Default self.runner.invoke(schedule.cli, ['spider']) mock_schedule.assert_called_with( proj, endpoint, apikey, 'spider', (), ()) # Other project self.runner.invoke(schedule.cli, ['123/spider']) mock_schedule.assert_called_with( 123, endpoint, apikey, 'spider', (), ()) # Other endpoint proj, endpoint, apikey = self.conf.get_target('vagrant') self.runner.invoke(schedule.cli, ['vagrant/spider']) mock_schedule.assert_called_with( proj, endpoint, apikey, 'spider', (), ()) # Other project at other endpoint self.runner.invoke(schedule.cli, ['vagrant/456/spider']) mock_schedule.assert_called_with( 456, endpoint, apikey, 'spider', (), ()) @mock.patch('shub.schedule.Connection', autospec=True) def test_schedule_invalid_spider(self, mock_conn): mock_proj = mock_conn.return_value.__getitem__.return_value mock_proj.schedule.side_effect = APIError('') with self.assertRaises(RemoteErrorException): schedule.schedule_spider(1, 'https://endpoint/api/scrapyd', 'FAKE_API_KEY', 'fake_spider') @mock.patch('shub.schedule.Connection', autospec=True) def test_schedule_spider_calls_project_schedule(self, mock_conn): mock_proj = mock_conn.return_value.__getitem__.return_value schedule.schedule_spider(1, 'https://endpoint/api/scrapyd', 'FAKE_API_KEY', 'fake_spider') self.assertTrue(mock_proj.schedule.called) @mock.patch('shub.schedule.Connection', autospec=True) def test_forwards_args_and_settings(self, mock_conn): mock_proj = mock_conn.return_value.__getitem__.return_value self.runner.invoke( schedule.cli, "testspider -s SETT=99 -a ARG=val1 --set SETTWITHEQUAL=10=10 " "--argument ARGWITHEQUAL=val2=val2".split(' '), ) call_kwargs = mock_proj.schedule.call_args[1] self.assertDictContainsSubset( {'ARG': 'val1', 'ARGWITHEQUAL': 'val2=val2'}, call_kwargs, ) # SH API expects settings as json-encoded string named 'job_settings' self.assertEqual( {'SETT': '99', 'SETTWITHEQUAL': '10=10'}, json.loads(call_kwargs['job_settings']), )
def test_cli_remove_delete_credential_found_by_database(mock_db): runner = CliRunner() result = runner.invoke(cli.remove, ['foo@bar'], input='y') result_print = runner.invoke(cli.cli) assert result.exit_code == 0 assert 'foo' not in result_print.output
def test_cli(): """Test cds-dojson CLI.""" runner = CliRunner() result = runner.invoke( compile_schema, [pkg_resources.resource_filename( 'cds_dojson.schemas', 'records/video_src-v1.0.0.json'), ] ) assert 0 == result.exit_code compiled_schema_result = json.loads(result.output) with open(pkg_resources.resource_filename( 'cds_dojson.schemas', 'records/video-v1.0.0.json'), 'r') as f: compile_schema_expected = json.load(f) assert compile_schema_expected == compiled_schema_result result = runner.invoke( compile_schema, [pkg_resources.resource_filename( 'cds_dojson.schemas', 'records/project_src-v1.0.0.json'), ] ) assert 0 == result.exit_code compiled_schema_result = json.loads(result.output) with open(pkg_resources.resource_filename( 'cds_dojson.schemas', 'records/project-v1.0.0.json'), 'r') as f: compile_schema_expected = json.load(f) assert compile_schema_expected == compiled_schema_result
def test_single_notice_one_agency_meta(self, notice_xmls_for_url): """ Verify that we get agency info from the metadata. """ cli = CliRunner() agencies_info = [{ u'name': u'Environmental Protection Agency', u'parent_id': None, u'raw_name': u'ENVIRONMENTAL PROTECTION AGENCY', u'url': u'%s%s' % (u'https://www.federalregister.gov/', u'agencies/environmental-protection-agency'), u'json_url': u'%s%s' % (u'https://www.federalregister.gov/', u'api/v1/agencies/145.json'), u'id': 145 }] self.expect_common_json(agencies=agencies_info) notice_xmls_for_url.return_value = [self.example_xml()] with cli.isolated_filesystem(): cli.invoke(preprocess_notice, ['1234-5678']) self.assertEqual(1, len(list(entry.Notice().sub_entries()))) written = entry.Notice('1234-5678').read() self.assertEqual(len(written.xpath("//EREGS_AGENCIES")), 1) self.assertEqual(len(written.xpath("//EREGS_AGENCY")), 1) epa = written.xpath("//EREGS_AGENCY")[0] self.assertEqual(epa.attrib["name"], "Environmental Protection Agency") self.assertEqual(epa.attrib["raw-name"], "ENVIRONMENTAL PROTECTION AGENCY") self.assertEqual(epa.attrib["agency-id"], "145")
def test_cli(self, mocked): # the test creates .releases file locally # this context manager cleans it in the end with FakeProjectDirectory(): runner = CliRunner() result = runner.invoke(cli, []) assert result.exit_code == \ shub_exceptions.NotFoundException.exit_code deploy_id1 = utils.store_status_url('http://linkA', 2) deploy_id2 = utils.store_status_url('http://linkB', 2) utils.store_status_url('http://linkC', 2) # get latest (deploy 3) result = runner.invoke(cli, []) assert result.exit_code == 0 mocked.assert_called_with('http://linkC', timeout=300) # get deploy by id result = runner.invoke(cli, ["--id", deploy_id2]) assert result.exit_code == 0 mocked.assert_called_with('http://linkB', timeout=300) # get non-existing deploy result = runner.invoke(cli, ["--id", deploy_id1]) assert result.exit_code == \ shub_exceptions.NotFoundException.exit_code
def test_invalid_url_for_login(monkeypatch, tmpdir): runner = CliRunner() response = MagicMock() monkeypatch.setattr('stups_cli.config.load_config', lambda x: {}) monkeypatch.setattr('pierone.api.get_token', MagicMock(return_value='tok123')) monkeypatch.setattr('os.path.expanduser', lambda x: x.replace('~', str(tmpdir))) # Missing Pier One header response.text = 'Not valid API' monkeypatch.setattr('requests.get', lambda *args, **kw: response) with runner.isolated_filesystem(): result = runner.invoke(cli, ['login'], catch_exceptions=False, input='pieroneurl\n') assert 'ERROR: Did not find a valid Pier One registry at https://pieroneurl' in result.output assert result.exit_code == 1 assert not os.path.exists(os.path.join(str(tmpdir), '.docker/config.json')) # Not a valid header response.raise_for_status = MagicMock(side_effect=RequestException) monkeypatch.setattr('requests.get', lambda *args, **kw: response) with runner.isolated_filesystem(): result = runner.invoke(cli, ['login'], catch_exceptions=False, input='pieroneurl\n') assert 'ERROR: Could not reach https://pieroneurl' in result.output assert result.exit_code == 1 assert not os.path.exists(os.path.join(str(tmpdir), '.docker/config.json'))
def test_sandbox_56(): runner = CliRunner() with runner.isolated_filesystem(): sbdir = os.path.abspath('test_sandbox') result = runner.invoke(sandbox_cli, ['-d', sbdir, '-m', '5.6.35'], auto_envvar_prefix='DBSAKE', obj={}) assert result.exit_code == 0 result = runner.invoke(sandbox_cli, ['--force', '-d', sbdir, '-m', '5.6.35'], auto_envvar_prefix='DBSAKE', obj={}) assert result.exit_code == 0 tar = tarfile.open("backup.tar.gz", "w:gz") cwd = os.getcwd() os.chdir(os.path.join(sbdir, 'data')) tar.add('.') tar.close() os.chdir(cwd) result = runner.invoke(sandbox_cli, ['--force', '-d', sbdir, '-m', '5.6.35', '-s', 'backup.tar.gz', '-t', 'mysql.user'], auto_envvar_prefix='DBSAKE', obj={}) assert result.exit_code == 0
def test_validator(): runner = CliRunner() result = runner.invoke(yadageschemas.validatecli.main,['workflow.yml','-t','tests/testspecs/local-helloworld']) assert result.exit_code == 0 result = runner.invoke(yadageschemas.validatecli.main,['workflow.yml','-t','tests/testspecs/local-helloworld','-s']) assert result.exit_code == 0
def test_clis(tmpdir): runner = CliRunner() result = runner.invoke(yadage.steering.main,[str(tmpdir.join('workdir')),'workflow.yml', '-t','tests/testspecs/local-helloworld','-p','par=value', '-b','foregroundasync' ] ) assert tmpdir.join('workdir/hello_world/hello_world.txt').check() result = runner.invoke(yadage.utilcli.testsel,[ str(tmpdir.join('workdir/_yadage/yadage_snapshot_workflow.json')), '{stages: hello_world, output: outputfile}' ]) assert result.exit_code == 0 result = runner.invoke(yadage.utilcli.testsel,[ str(tmpdir.join('workdir/_yadage/yadage_snapshot_workflow.json')), '{stages: nonexistent, output: nonexistent}' ]) assert result.exit_code == 0 result = runner.invoke(yadage.utilcli.viz,[ str(tmpdir.join('workdir/_yadage/yadage_snapshot_workflow.json')), str(tmpdir.join('viz.pdf')) ]) tmpdir.join('viz.pdf').check() assert result.exit_code == 0
class CommandsParseRuleChangesTests(XMLBuilderMixin, TestCase): def setUp(self): super(CommandsParseRuleChangesTests, self).setUp() self.cli = CliRunner() with self.tree.builder("ROOT") as root: root.PRTPAGE(P="1234") self.notice_xml = NoticeXML(self.tree.render_xml()) def test_missing_notice(self): """If the necessary notice XML is not present, we should expect a dependency error""" with self.cli.isolated_filesystem(): result = self.cli.invoke(parse_rule_changes, ['1111']) self.assertTrue(isinstance(result.exception, dependency.Missing)) @patch('regparser.commands.parse_rule_changes.process_amendments') def test_writes(self, process_amendments): """If the notice XML is present, we write the parsed version to disk, even if that version's already present""" process_amendments.return_value = {'filled': 'values'} with self.cli.isolated_filesystem(): entry.Notice('1111').write(self.notice_xml) self.cli.invoke(parse_rule_changes, ['1111']) self.assertTrue(process_amendments.called) args = process_amendments.call_args[0] self.assertTrue(isinstance(args[0], dict)) self.assertTrue(isinstance(args[1], etree._Element)) process_amendments.reset_mock() entry.Entry('rule_changes', '1111').write('content') self.cli.invoke(parse_rule_changes, ['1111']) self.assertTrue(process_amendments.called)
def test_bashhub_save(): def print_failed(command): print("Failed") pass rest_client.save_command = print_failed runner = CliRunner() args = ['save', 'echo "Running bashhub tests"', '/tmp', '1', '100000', '1'] ignored_command = ['save', 'echo "Running bashhub tests" #ignore', '/tmp', '1', '100000', '1'] # Should omit saving if #ignore is set result = runner.invoke(bashhub, ignored_command) assert '' == result.output # Should omit saving a command if BH_FILTER regex is set #bashhub_globals = Temp() bashhub_globals.BH_FILTER = 'echo' result = runner.invoke(bashhub, args) assert '' == result.output
def test_set_meta(tmpdir): testdir = tmpdir.mkdir("test") testfile = tmpdir.join("test.jpg") testfile.write("") runner = CliRunner() result = runner.invoke(set_meta, [str(testdir), "title", "testing"]) assert result.exit_code == 0 assert result.output.startswith("1 metadata key(s) written to") assert os.path.isfile(str(testdir.join("index.md"))) assert testdir.join("index.md").read() == "Title: testing\n" # Run again, should give file exists error result = runner.invoke(set_meta, [str(testdir), "title", "testing"]) assert result.exit_code == 2 result = runner.invoke(set_meta, [str(testdir.join("non-existant.jpg")), "title", "testing"]) assert result.exit_code == 1 result = runner.invoke(set_meta, [str(testfile), "title", "testing"]) assert result.exit_code == 0 assert result.output.startswith("1 metadata key(s) written to") assert os.path.isfile(str(tmpdir.join("test.md"))) assert tmpdir.join("test.md").read() == "Title: testing\n" result = runner.invoke(set_meta, [str(testfile), "title"]) assert result.exit_code == 1 assert result.output.startswith("Need an even number of arguments")
def test_returned_filesize(): runner = CliRunner() result = runner.invoke( cli, ['--user', _api_auth[0], '--password', _api_auth[1], '--geometry', path.join(FIXTURES_DIR, 'map.geojson'), '-s', '20141205', '-e', '20141208', '-q', 'producttype=GRD'], catch_exceptions=False ) expected = "1 scenes found with a total size of 0.50 GB" assert result.output.split("\n")[-2] == expected result = runner.invoke( cli, ['--user', _api_auth[0], '--password', _api_auth[1], '--geometry', path.join(FIXTURES_DIR, 'map.geojson'), '-s', '20170101', '-e', '20170105', '-q', 'producttype=GRD'], catch_exceptions=False ) expected = "18 scenes found with a total size of 27.81 GB" assert result.output.split("\n")[-2] == expected
def test_create_list_delete(monkeypatch): token = 'abc-123' perform_implicit_flow = MagicMock() perform_implicit_flow.return_value = {'access_token': token, 'expires_in': 1, 'token_type': 'test'} monkeypatch.setattr('zign.api.perform_implicit_flow', perform_implicit_flow) load_config = MagicMock() load_config.return_value = {'authorize_url': 'https://localhost/authorize', 'token_url': 'https://localhost/token', 'client_id': 'foobar', 'business_partner_id': '123'} monkeypatch.setattr('stups_cli.config.load_config', load_config) runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke(cli_zign, ['token', '-n', 'mytok', '--password', 'mypass'], catch_exceptions=False) assert token == result.output.rstrip().split('\n')[-1] result = runner.invoke(cli_zign, ['list', '-o', 'json'], catch_exceptions=False) data = json.loads(result.output) assert len(data) >= 1 assert 'mytok' in [r['name'] for r in data] result = runner.invoke(cli_zign, ['delete', 'mytok'], catch_exceptions=False) result = runner.invoke(cli_zign, ['list', '-o', 'json'], catch_exceptions=False) data = json.loads(result.output) assert 'mytok' not in [r['name'] for r in data] # should work again for already deleted tokens result = runner.invoke(cli_zign, ['delete', 'mytok'], catch_exceptions=False)
def test_cli(): runner = CliRunner() result = runner.invoke( cli, ['--user', _api_auth[0], '--password', _api_auth[1], '--geometry', path.join(FIXTURES_DIR, 'map.geojson')], catch_exceptions=False ) assert result.exit_code == 0 result = runner.invoke( cli, ['--user', _api_auth[0], '--password', _api_auth[1], '--geometry', path.join(FIXTURES_DIR, 'map.geojson'), '--url', 'https://scihub.copernicus.eu/dhus/'], catch_exceptions=False ) assert result.exit_code == 0 result = runner.invoke( cli, ['--user', _api_auth[0], '--password', _api_auth[1], '--geometry', path.join(FIXTURES_DIR, 'map.geojson'), '-q', 'producttype=GRD,polarisationmode=HH'], catch_exceptions=False ) assert result.exit_code == 0
def test_cli_do_cds_marc21_from_xml(app): """Test MARC21 loading from XML.""" from dojson import cli runner = CliRunner() with app.app_context(), runner.isolated_filesystem(): with open('record.xml', 'wb') as f: f.write(CDS_ALBUM.encode('utf-8')) result = runner.invoke( cli.cli, ['-i', 'record.xml', '-l', 'cds_marcxml', 'missing', 'cds_marc21'] ) assert '' == result.output assert 0 == result.exit_code result = runner.invoke( cli.cli, ['-i', 'record.xml', '-l', 'cds_marcxml', 'do', 'cds_marc21'] ) data = json.loads(result.output)[0] # Check the control number (doJSON) assert data['physical_medium'][1][ 'material_base_and_configuration'] == ['Neg NB 6 x 6'] # Check the parent album (CDSImage) assert data['images'][3]['$ref'] == 'http://cds.cern.ch/record/1782448' assert data['images'][3]['relation'] == 'Cover' # Check the imprint (CDSMarc21) assert data['imprint'][0]['_complete_date'] == 'Sep 1970' assert data['imprint'][0]['complete_date'] == '1970-09-01'
class CommandsFetchSxSTests(TestCase): def setUp(self): super(CommandsFetchSxSTests, self).setUp() self.cli = CliRunner() with XMLBuilder("ROOT") as ctx: ctx.PRTPAGE(P="1234") ctx.CFR('12 CFR 1000') self.notice_xml = NoticeXML(ctx.xml) def test_missing_notice(self): """If the necessary notice XML is not present, we should expect a dependency error""" with self.cli.isolated_filesystem(): result = self.cli.invoke(fetch_sxs, ['1111']) self.assertTrue(isinstance(result.exception, dependency.Missing)) @patch('regparser.commands.fetch_sxs.build_notice') @patch('regparser.commands.fetch_sxs.meta_data') def test_writes(self, meta_data, build_notice): """If the notice XML is present, we write the parsed version to disk, even if that version's already present""" with self.cli.isolated_filesystem(): entry.Notice('1111').write(self.notice_xml) self.cli.invoke(fetch_sxs, ['1111']) meta_data.return_value = {'example': 1} self.assertTrue(build_notice.called) args, kwargs = build_notice.call_args self.assertTrue(args[2], {'example': 1}) self.assertTrue( isinstance(kwargs['xml_to_process'], etree._Element)) build_notice.reset_mock() entry.Entry('rule_changes', '1111').write('content') self.cli.invoke(fetch_sxs, ['1111']) self.assertTrue(build_notice.called)
def test_cloud_flag_url(): command = ['--user', _api_auth[0], '--password', _api_auth[1], '--geometry', path.join(FIXTURES_DIR, 'map.geojson'), '--url', 'https://scihub.copernicus.eu/apihub/', '-s', '20151219', '-e', '20151228', '-c', '10'] runner = CliRunner() result = runner.invoke( cli, command + ['--sentinel', '2'], catch_exceptions=False ) expected = "Product 6ed0b7de-3435-43df-98bf-ad63c8d077ef - Date: 2015-12-27T14:22:29Z, Instrument: MSI, Mode: , Satellite: Sentinel-2, Size: 5.47 GB" assert re.findall("^Product .+$", result.output, re.M)[0] == expected # For order-by test assert '0848f6b8-5730-4759-850e-fc9945d42296' not in re.findall("^Product .+$", result.output, re.M)[1] with pytest.raises(ValueError) as excinfo: result = runner.invoke( cli, command + ['--sentinel', '1'], catch_exceptions=False )
def test_cli(): """Test cli.""" runner = CliRunner() with runner.isolated_filesystem(): shutil.copytree(DATA, abspath(join(getcwd(), "data/"))) result = runner.invoke(cli, ["-l", "min", "data/setup.py"]) assert result.exit_code == 0 assert result.output == "click==5.0.0\nmock==1.3.0\n" result = runner.invoke(cli, ["-l", "pypi", "data/setup.py"]) assert result.exit_code == 0 assert result.output == "click>=5.0.0\nmock>=1.3.0\n" result = runner.invoke(cli, ["-l", "dev", "data/setup.py"]) assert result.exit_code == 2 result = runner.invoke(cli, ["-l", "dev", "-r", "data/req.txt", "data/setup.py"]) assert result.exit_code == 0 assert result.output == "-e git+https://github.com/mitsuhiko/click.git#egg=click\n" "mock>=1.3.0\n" result = runner.invoke(cli, ["-l", "min", "-o", "requirements.txt", "data/setup.py"]) assert result.exit_code == 0 assert result.output == "" with open(join(getcwd(), "requirements.txt")) as f: assert f.read() == "click==5.0.0\nmock==1.3.0\n"
class BasicTestCase(unittest.TestCase): def setUp(self): self.repo_dir = path.join(ROOT, 'mock_tldr') self.config_path = path.join(self.repo_dir, '.tldrrc') os.environ['TLDR_CONFIG_DIR'] = self.repo_dir self.runner = CliRunner() self.call_init_command() def tearDown(self): if path.exists(self.config_path): os.remove(self.config_path) def call_init_command(self, repo_dir=path.join(ROOT, 'mock_tldr'), platform='linux'): with mock.patch('click.prompt', side_effect=[repo_dir, platform]): result = self.runner.invoke(cli.init) return result def call_update_command(self): with mock.patch('tldr.cli.build_index', return_value=None): result = self.runner.invoke(cli.update) return result def call_find_command(self, command_name): result = self.runner.invoke(cli.find, [command_name]) return result def call_reindex_command(self): result = self.runner.invoke(cli.reindex) return result def call_locate_command(self, command_name): result = self.runner.invoke(cli.locate, [command_name]) return result
def test_cli_command_entrance(): runner = CliRunner() result = runner.invoke(cli) assert result.exit_code == 0 assert result.output == """Usage: cli [OPTIONS] COMMAND [ARGS]...
def test_create_scope(secrets_api_mock): runner = CliRunner() runner.invoke(cli.create_scope, ['--scope', SCOPE, '--initial-manage-principal', 'users']) assert secrets_api_mock.create_scope.call_args[0][0] == SCOPE assert secrets_api_mock.create_scope.call_args[0][1] == 'users'
def test_delete_acl(secrets_api_mock): runner = CliRunner() runner.invoke(cli.delete_acl, ['--scope', SCOPE, '--principal', PRINCIPAL]) assert secrets_api_mock.delete_acl.call_args[0][0] == SCOPE assert secrets_api_mock.delete_acl.call_args[0][1] == PRINCIPAL
def test_delete_secret(secrets_api_mock): runner = CliRunner() runner.invoke(cli.delete_secret, ['--scope', SCOPE, '--key', KEY]) assert secrets_api_mock.delete_secret.call_args[0][0] == SCOPE assert secrets_api_mock.delete_secret.call_args[0][1] == KEY
class TestCli(object): def setup_method(self, test_method): self.runner = CliRunner() @patch("sys.exit") def test_catch_excecptions(self, mock_exit): @sceptre.cli.catch_exceptions def raises_exception(): raise SceptreException raises_exception() mock_exit.assert_called_once_with(1) @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_validate_template(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["validate-template", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].validate_template\ .assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_generate_template(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd result = self.runner.invoke(cli, ["generate-template", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) assert result.output == "{0}\n".format( mock_get_env.return_value.stacks["vpc"].template.body) @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_lock_stack(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["lock-stack", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].lock\ .assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_unlock_stack(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["unlock-stack", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].unlock\ .assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_describe_env_resources(self, mock_get_env, mock_getcwd): mock_get_env.return_value.describe_resources.return_value = { "stack-name-1": { "StackResources": [{ "LogicalResourceId": "logical-resource-id", "PhysicalResourceId": "physical-resource-id" }] }, "stack-name-2": { "StackResources": [{ "LogicalResourceId": "logical-resource-id", "PhysicalResourceId": "physical-resource-id" }] } } mock_getcwd.return_value = sentinel.cwd result = self.runner.invoke(cli, ["describe-env-resources", "dev"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.describe_resources\ .assert_called_with() # Assert that there is output assert result.output @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_describe_stack_resources(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd mock_get_env.return_value.stacks["vpc"].describe_resources\ .return_value = { "StackResources": [ { "LogicalResourceId": "logical-resource-id", "PhysicalResourceId": "physical-resource-id" } ] } result = self.runner.invoke(cli, ["describe-stack-resources", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].describe_resources\ .assert_called_with() # Assert that there is output. assert result.output @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_create_stack(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["create-stack", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].create\ .assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_delete_stack(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["delete-stack", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].delete\ .assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_update_stack(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["update-stack", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].update\ .assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_launch_stack(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["launch-stack", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].launch\ .assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_launch_env(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["launch-env", "dev"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.launch.assert_called_with() @patch("sceptre.cli.get_env") def test_launch_env_returns_zero_correctly(self, mock_get_env): mock_get_env.return_value.launch.return_value = dict( (sentinel.stack_name, StackStatus.COMPLETE) for _ in range(5)) result = self.runner.invoke(cli, ["launch-env", "environment"]) assert result.exit_code == 0 @patch("sceptre.cli.get_env") def test_launch_env_returns_non_zero_correctly(self, mock_get_env): mock_get_env.return_value.launch.return_value = dict( (sentinel.stack_name, StackStatus.FAILED) for _ in range(5)) result = self.runner.invoke(cli, ["launch-env", "environment"]) assert result.exit_code == 1 @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_delete_env(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd mock_get_env.return_value.delete.return_value = \ sentinel.response self.runner.invoke(cli, ["delete-env", "dev"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.delete.assert_called_with() @patch("sceptre.cli.get_env") def test_delete_env_returns_zero_correctly(self, mock_get_env): mock_get_env.return_value.delete.return_value = dict( (sentinel.stack_name, StackStatus.COMPLETE) for _ in range(5)) result = self.runner.invoke(cli, ["delete-env", "environment"]) assert result.exit_code == 0 @patch("sceptre.cli.get_env") def test_delete_env_returns_non_zero_correctly(self, mock_get_env): mock_get_env.return_value.delete.return_value = dict( (sentinel.stack_name, StackStatus.FAILED) for _ in range(5)) result = self.runner.invoke(cli, ["delete-env", "environment"]) assert result.exit_code == 1 @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_continue_update_rollback(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["continue-update-rollback", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].\ continue_update_rollback.assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_create_change_set(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["create-change-set", "dev", "vpc", "cs1"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].create_change_set\ .assert_called_with("cs1") @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_delete_change_set(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["delete-change-set", "dev", "vpc", "cs1"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].delete_change_set\ .assert_called_with("cs1") @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_describe_change_set(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd mock_get_env.return_value.stacks["vpc"].describe_change_set\ .return_value = { "ChangeSetName": "change-set-1", "Changes": [ { "ResourceChange": { "ResourceType": "AWS::EC2::InternetGateway", "Replacement": "True", "PhysicalResourceId": "igw-04a59561", "Details": [], "Action": "Remove", "Scope": [], "LogicalResourceId": "InternetGateway" } } ], "CreationTime": "2017-01-20 14:10:25.239000+00:00", "ExecutionStatus": "AVAILABLE", "StackName": "example-dev-vpc", "Status": "CREATE_COMPLETE" } result = self.runner.invoke( cli, ["describe-change-set", "dev", "vpc", "cs1"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].describe_change_set\ .assert_called_with("cs1") assert yaml.safe_load(result.output) == { "ChangeSetName": "change-set-1", "Changes": [{ "ResourceChange": { "ResourceType": "AWS::EC2::InternetGateway", "Replacement": "True", "PhysicalResourceId": "igw-04a59561", "Action": "Remove", "LogicalResourceId": "InternetGateway", "Scope": [] } }], "CreationTime": "2017-01-20 14:10:25.239000+00:00", "ExecutionStatus": "AVAILABLE", "StackName": "example-dev-vpc", "Status": "CREATE_COMPLETE" } @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_describe_change_set_with_verbose_flag(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd mock_get_env.return_value.stacks["vpc"].describe_change_set\ .return_value = { "Changes": [ { "ResourceChange": { "ResourceType": "AWS::EC2::InternetGateway", "PhysicalResourceId": "igw-04a59561", "Details": [], "Action": "Remove", "Scope": [], "LogicalResourceId": "InternetGateway" } } ] } result = self.runner.invoke( cli, ["describe-change-set", "--verbose", "dev", "vpc", "cs1"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].describe_change_set\ .assert_called_with("cs1") assert yaml.safe_load(result.output) == { "Changes": [{ "ResourceChange": { "ResourceType": "AWS::EC2::InternetGateway", "PhysicalResourceId": "igw-04a59561", "Details": [], "Action": "Remove", "Scope": [], "LogicalResourceId": "InternetGateway" } }] } @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_execute_change_set(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["execute-change-set", "dev", "vpc", "cs1"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].execute_change_set\ .assert_called_with("cs1") @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_list_change_sets(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["list-change-sets", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].list_change_sets\ .assert_called_with() @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.uuid1") @patch("sceptre.cli.get_env") def test_update_with_change_set_with_input_yes(self, mock_get_env, mock_uuid1, mock_getcwd): mock_getcwd.return_value = sentinel.cwd mock_get_env.return_value.stacks["vpc"].wait_for_cs_completion\ .return_value = StackChangeSetStatus.READY mock_get_env.return_value.stacks["vpc"].describe_change_set\ .return_value = "description" mock_uuid1().hex = "1" self.runner.invoke(cli, ["update-stack-cs", "dev", "vpc", "--verbose"], input="y") mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].create_change_set\ .assert_called_with("change-set-1") mock_get_env.return_value.stacks["vpc"].wait_for_cs_completion\ .assert_called_with("change-set-1") mock_get_env.return_value.stacks["vpc"].execute_change_set\ .assert_called_with("change-set-1") @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli._simplify_change_set_description") @patch("sceptre.cli.uuid1") @patch("sceptre.cli.get_env") def test_update_with_change_set_without_verbose_flag( self, mock_get_environment, mock_uuid1, mock_simplify_change_set_description, mock_getcwd): mock_getcwd.return_value = sentinel.cwd mock_get_environment.return_value.stacks["vpc"].wait_for_cs_completion\ .return_value = StackChangeSetStatus.READY mock_get_environment.return_value.stacks["vpc"].describe_change_set\ .return_value = "description" mock_simplify_change_set_description.return_value = \ "simplified_description" mock_uuid1().hex = "1" response = self.runner.invoke(cli, ["update-stack-cs", "dev", "vpc"], input="y") assert "simplified_description" in response.output @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.uuid1") @patch("sceptre.cli.get_env") def test_update_with_change_set_with_input_no(self, mock_get_env, mock_uuid1, mock_getcwd): mock_getcwd.return_value = sentinel.cwd mock_get_env.return_value.stacks["vpc"].wait_for_cs_completion\ .return_value = StackChangeSetStatus.READY mock_get_env.return_value.stacks["vpc"].describe_change_set\ .return_value = "description" mock_uuid1().hex = "1" self.runner.invoke(cli, ["update-stack-cs", "dev", "vpc", "--verbose"], input="n") mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].create_change_set\ .assert_called_with("change-set-1") mock_get_env.return_value.stacks["vpc"].wait_for_cs_completion\ .assert_called_with("change-set-1") mock_get_env.return_value.stacks["vpc"].delete_change_set\ .assert_called_with("change-set-1") @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.uuid1") @patch("sceptre.cli.get_env") def test_update_with_change_set_with_status_defunct( self, mock_get_env, mock_uuid1, mock_getcwd): mock_getcwd.return_value = sentinel.cwd mock_get_env.return_value.stacks["vpc"].wait_for_cs_completion\ .return_value = StackChangeSetStatus.DEFUNCT mock_get_env.return_value.stacks["vpc"].describe_change_set\ .return_value = "description" mock_uuid1().hex = "1" result = self.runner.invoke( cli, ["update-stack-cs", "dev", "vpc", "--verbose"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].create_change_set\ .assert_called_with("change-set-1") mock_get_env.return_value.stacks["vpc"].wait_for_cs_completion\ .assert_called_with("change-set-1") assert result.exit_code == 1 @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_describe_stack_outputs(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, ["describe-stack-outputs", "dev", "vpc"]) mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value.stacks["vpc"].describe_outputs\ .assert_called_with() @patch("sceptre.cli.get_env") def test_describe_stack_outputs_handles_envvar_flag(self, mock_get_env): mock_get_env.return_value.stacks["vpc"].describe_outputs\ .return_value = [ { "OutputKey": "key", "OutputValue": "value" } ] result = self.runner.invoke( cli, ["describe-stack-outputs", "--export=envvar", "dev", "vpc"]) assert result.output == "export SCEPTRE_key=value\n" @patch("sceptre.cli.get_env") def test_describe_env(self, mock_get_env): mock_Environment = Mock() mock_Environment.describe.return_value = {"stack": "status"} mock_get_env.return_value = mock_Environment result = self.runner.invoke(cli, ["describe-env", "dev"]) assert result.output == "stack: status\n\n" @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.get_env") def test_set_stack_policy_with_file_flag(self, mock_get_env, mock_getcwd): mock_getcwd.return_value = sentinel.cwd self.runner.invoke(cli, [ "set-stack-policy", "dev", "vpc", "--policy-file=tests/fixtures/stack_policies/lock.json" ]) mock_Environment = Mock() mock_get_env.assert_called_with(sentinel.cwd, "dev", {}) mock_get_env.return_value = mock_Environment @patch("sceptre.cli.get_env") def test_get_stack_policy_with_existing_policy(self, mock_get_env): mock_get_env.return_value.stacks["vpc"].get_policy\ .return_value = { "StackPolicyBody": "policy" } result = self.runner.invoke(cli, ["get-stack-policy", "dev", "vpc"]) assert result.output == "policy\n" @patch("sceptre.cli.get_env") def test_get_stack_policy_without_existing_policy(self, mock_get_env): mock_get_env.return_value.stacks["vpc"].get_policy\ .return_value = {} result = self.runner.invoke(cli, ["get-stack-policy", "dev", "vpc"]) assert result.output == "{}\n" @patch("sceptre.cli.os.getcwd") @patch("sceptre.cli.Environment") def test_get_env(self, mock_Environment, mock_getcwd): mock_Environment.return_value = sentinel.environment mock_getcwd.return_value = sentinel.cwd response = sceptre.cli.get_env(sentinel.cwd, sentinel.environment_path, sentinel.options) mock_Environment.assert_called_once_with( sceptre_dir=sentinel.cwd, environment_path=sentinel.environment_path, options=sentinel.options) assert response == sentinel.environment def test_setup_logging_with_debug(self): logger = sceptre.cli.setup_logging(True, False) assert logger.getEffectiveLevel() == logging.DEBUG assert logging.getLogger("botocore").getEffectiveLevel() == \ logging.INFO # Silence logging for the rest of the tests logger.setLevel(logging.CRITICAL) def test_setup_logging_without_debug(self): logger = sceptre.cli.setup_logging(False, False) assert logger.getEffectiveLevel() == logging.INFO assert logging.getLogger("botocore").getEffectiveLevel() == \ logging.CRITICAL # Silence logging for the rest of the tests logger.setLevel(logging.CRITICAL) @patch("sceptre.cli.click.echo") def test_write_with_yaml_format(self, mock_echo): sceptre.cli.write({"key": "value"}, "yaml") mock_echo.assert_called_once_with("key: value\n") @patch("sceptre.cli.click.echo") def test_write_with_json_format(self, mock_echo): sceptre.cli.write({"key": "value"}, "json") mock_echo.assert_called_once_with('{"key": "value"}') @patch("sceptre.cli.click.echo") def test_write_status_with_colour(self, mock_echo): sceptre.cli.write("stack: CREATE_COMPLETE", no_colour=False) mock_echo.assert_called_once_with( "stack: \x1b[32mCREATE_COMPLETE\x1b[0m") @patch("sceptre.cli.click.echo") def test_write_status_without_colour(self, mock_echo): sceptre.cli.write("stack: CREATE_COMPLETE", no_colour=True) mock_echo.assert_called_once_with("stack: CREATE_COMPLETE") @patch("sceptre.cli.StackStatusColourer.colour") @patch("sceptre.cli.Formatter.format") def test_ColouredFormatter_format_with_string(self, mock_format, mock_colour): mock_format.return_value = sentinel.response mock_colour.return_value = sentinel.coloured_response coloured_formatter = sceptre.cli.ColouredFormatter() response = coloured_formatter.format("string") mock_format.assert_called_once_with("string") mock_colour.assert_called_once_with(sentinel.response) assert response == sentinel.coloured_response def test_CustomJsonEncoder_with_non_json_serialisable_object(self): encoder = sceptre.cli.CustomJsonEncoder() response = encoder.encode(datetime.datetime(2016, 5, 3)) assert response == '"2016-05-03 00:00:00"'
class TestCtl(unittest.TestCase): @patch('socket.getaddrinfo', socket_getaddrinfo) def setUp(self): with patch.object(Client, 'machines') as mock_machines: mock_machines.__get__ = Mock( return_value=['http://*****:*****@patch('psycopg2.connect', psycopg2_connect) def test_get_cursor(self): self.assertIsNone( get_cursor(get_cluster_initialized_without_leader(), {}, role='master')) self.assertIsNotNone( get_cursor(get_cluster_initialized_with_leader(), {}, role='master')) # MockCursor returns pg_is_in_recovery as false self.assertIsNone( get_cursor(get_cluster_initialized_with_leader(), {}, role='replica')) self.assertIsNotNone( get_cursor(get_cluster_initialized_with_leader(), {'database': 'foo'}, role='any')) def test_parse_dcs(self): assert parse_dcs(None) is None assert parse_dcs('localhost') == {'etcd': {'host': 'localhost:2379'}} assert parse_dcs('') == {'etcd': {'host': 'localhost:2379'}} assert parse_dcs('localhost:8500') == { 'consul': { 'host': 'localhost:8500' } } assert parse_dcs('zookeeper://localhost') == { 'zookeeper': { 'hosts': ['localhost:2181'] } } assert parse_dcs('exhibitor://dummy') == { 'exhibitor': { 'hosts': ['dummy'], 'port': 8181 } } assert parse_dcs('consul://localhost') == { 'consul': { 'host': 'localhost:8500' } } self.assertRaises(PatroniCtlException, parse_dcs, 'invalid://test') def test_output_members(self): cluster = get_cluster_initialized_with_leader() self.assertIsNone(output_members(cluster, name='abc', fmt='pretty')) self.assertIsNone(output_members(cluster, name='abc', fmt='json')) self.assertIsNone(output_members(cluster, name='abc', fmt='tsv')) @patch('patroni.ctl.get_dcs') @patch('patroni.ctl.request_patroni', Mock(return_value=MockResponse())) def test_failover(self, mock_get_dcs): mock_get_dcs.return_value = self.e mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader mock_get_dcs.return_value.set_failover_value = Mock() result = self.runner.invoke(ctl, ['failover', 'dummy'], input='leader\nother\n\ny') assert 'leader' in result.output result = self.runner.invoke( ctl, ['failover', 'dummy'], input='leader\nother\n2300-01-01T12:23:00\ny') assert result.exit_code == 0 with patch('patroni.dcs.Cluster.is_paused', Mock(return_value=True)): result = self.runner.invoke(ctl, [ 'failover', 'dummy', '--force', '--scheduled', '2015-01-01T12:00:00' ]) assert result.exit_code == 1 # Aborting failover,as we anser NO to the confirmation result = self.runner.invoke(ctl, ['failover', 'dummy'], input='leader\nother\n\nN') assert result.exit_code == 1 # Target and source are equal result = self.runner.invoke(ctl, ['failover', 'dummy'], input='leader\nleader\n\ny') assert result.exit_code == 1 # Reality is not part of this cluster result = self.runner.invoke(ctl, ['failover', 'dummy'], input='leader\nReality\n\ny') assert result.exit_code == 1 result = self.runner.invoke(ctl, ['failover', 'dummy', '--force']) assert 'Member' in result.output result = self.runner.invoke(ctl, [ 'failover', 'dummy', '--force', '--scheduled', '2015-01-01T12:00:00+01:00' ]) assert result.exit_code == 0 # Invalid timestamp result = self.runner.invoke( ctl, ['failover', 'dummy', '--force', '--scheduled', 'invalid']) assert result.exit_code != 0 # Invalid timestamp result = self.runner.invoke(ctl, [ 'failover', 'dummy', '--force', '--scheduled', '2115-02-30T12:00:00+01:00' ]) assert result.exit_code != 0 # Specifying wrong leader result = self.runner.invoke(ctl, ['failover', 'dummy'], input='dummy') assert result.exit_code == 1 with patch('patroni.ctl.request_patroni', Mock(side_effect=Exception)): # Non-responding patroni result = self.runner.invoke( ctl, ['failover', 'dummy'], input='leader\nother\n2300-01-01T12:23:00\ny') assert 'falling back to DCS' in result.output with patch('patroni.ctl.request_patroni') as mocked: mocked.return_value.status_code = 500 result = self.runner.invoke(ctl, ['failover', 'dummy'], input='leader\nother\n\ny') assert 'Failover failed' in result.output # No members available mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_only_leader result = self.runner.invoke(ctl, ['failover', 'dummy'], input='leader\nother\n\ny') assert result.exit_code == 1 # No master available mock_get_dcs.return_value.get_cluster = get_cluster_initialized_without_leader result = self.runner.invoke(ctl, ['failover', 'dummy'], input='leader\nother\n\ny') assert result.exit_code == 1 def test_get_dcs(self): self.assertRaises(PatroniCtlException, get_dcs, {'dummy': {}}, 'dummy') @patch('psycopg2.connect', psycopg2_connect) @patch('patroni.ctl.query_member', Mock(return_value=([['mock column']], None))) @patch('patroni.ctl.get_dcs') @patch.object(etcd.Client, 'read', etcd_read) def test_query(self, mock_get_dcs): mock_get_dcs.return_value = self.e # Mutually exclusive result = self.runner.invoke( ctl, ['query', 'alpha', '--member', 'abc', '--role', 'master']) assert result.exit_code == 1 with self.runner.isolated_filesystem(): with open('dummy', 'w') as dummy_file: dummy_file.write('SELECT 1') # Mutually exclusive result = self.runner.invoke( ctl, ['query', 'alpha', '--file', 'dummy', '--command', 'dummy']) assert result.exit_code == 1 result = self.runner.invoke(ctl, ['query', 'alpha', '--file', 'dummy']) assert result.exit_code == 0 os.remove('dummy') result = self.runner.invoke( ctl, ['query', 'alpha', '--command', 'SELECT 1']) assert 'mock column' in result.output # --command or --file is mandatory result = self.runner.invoke(ctl, ['query', 'alpha']) assert result.exit_code == 1 result = self.runner.invoke(ctl, [ 'query', 'alpha', '--command', 'SELECT 1', '--username', 'root', '--password', '--dbname', 'postgres' ], input='ab\nab') assert 'mock column' in result.output def test_query_member(self): with patch('patroni.ctl.get_cursor', Mock(return_value=MockConnect().cursor())): rows = query_member(None, None, None, 'master', 'SELECT pg_is_in_recovery()', {}) self.assertTrue('False' in str(rows)) rows = query_member(None, None, None, 'replica', 'SELECT pg_is_in_recovery()', {}) self.assertEquals(rows, (None, None)) with patch('test_postgresql.MockCursor.execute', Mock(side_effect=OperationalError('bla'))): rows = query_member(None, None, None, 'replica', 'SELECT pg_is_in_recovery()', {}) with patch('patroni.ctl.get_cursor', Mock(return_value=None)): rows = query_member(None, None, None, None, 'SELECT pg_is_in_recovery()', {}) self.assertTrue('No connection to' in str(rows)) rows = query_member(None, None, None, 'replica', 'SELECT pg_is_in_recovery()', {}) self.assertTrue('No connection to' in str(rows)) with patch('patroni.ctl.get_cursor', Mock(side_effect=OperationalError('bla'))): rows = query_member(None, None, None, 'replica', 'SELECT pg_is_in_recovery()', {}) @patch('patroni.ctl.get_dcs') def test_dsn(self, mock_get_dcs): mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader result = self.runner.invoke(ctl, ['dsn', 'alpha']) assert 'host=127.0.0.1 port=5435' in result.output # Mutually exclusive options result = self.runner.invoke( ctl, ['dsn', 'alpha', '--role', 'master', '--member', 'dummy']) assert result.exit_code == 1 # Non-existing member result = self.runner.invoke(ctl, ['dsn', 'alpha', '--member', 'dummy']) assert result.exit_code == 1 @patch('requests.post', requests_get) @patch('patroni.ctl.get_dcs') def test_restart_reinit(self, mock_get_dcs): mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader result = self.runner.invoke(ctl, ['restart', 'alpha'], input='y\n\nnow') assert 'Failed: restart for' in result.output assert result.exit_code == 0 result = self.runner.invoke(ctl, ['reinit', 'alpha'], input='y') assert result.exit_code == 1 # successful reinit result = self.runner.invoke(ctl, ['reinit', 'alpha', 'other'], input='y') assert result.exit_code == 0 # Aborted restart result = self.runner.invoke(ctl, ['restart', 'alpha'], input='N') assert result.exit_code == 1 result = self.runner.invoke( ctl, ['restart', 'alpha', '--pending', '--force']) assert result.exit_code == 0 # Not a member result = self.runner.invoke(ctl, ['restart', 'alpha', 'dummy', '--any'], input='y') assert result.exit_code == 1 # Wrong pg version result = self.runner.invoke( ctl, ['restart', 'alpha', '--any', '--pg-version', '9.1'], input='y') assert 'Error: PostgreSQL version' in result.output assert result.exit_code == 1 result = self.runner.invoke( ctl, ['restart', 'alpha', '--pending', '--force', '--timeout', '10min']) assert result.exit_code == 0 with patch('requests.delete', Mock(return_value=MockResponse(500))): # normal restart, the schedule is actually parsed, but not validated in patronictl result = self.runner.invoke(ctl, [ 'restart', 'alpha', 'other', '--force', '--scheduled', '2300-10-01T14:30' ]) assert 'Failed: flush scheduled restart' in result.output with patch('patroni.dcs.Cluster.is_paused', Mock(return_value=True)): result = self.runner.invoke(ctl, [ 'restart', 'alpha', 'other', '--force', '--scheduled', '2300-10-01T14:30' ]) assert result.exit_code == 1 with patch('requests.post', Mock(return_value=MockResponse())): # normal restart, the schedule is actually parsed, but not validated in patronictl result = self.runner.invoke(ctl, [ 'restart', 'alpha', '--pg-version', '42.0.0', '--scheduled', '2300-10-01T14:30' ], input='y') assert result.exit_code == 0 with patch('requests.post', Mock(return_value=MockResponse(204))): # get restart with the non-200 return code # normal restart, the schedule is actually parsed, but not validated in patronictl result = self.runner.invoke(ctl, [ 'restart', 'alpha', '--pg-version', '42.0.0', '--scheduled', '2300-10-01T14:30' ], input='y') assert result.exit_code == 0 # force restart with restart already present with patch('patroni.ctl.request_patroni', Mock(return_value=MockResponse(204))): result = self.runner.invoke(ctl, [ 'restart', 'alpha', 'other', '--force', '--scheduled', '2300-10-01T14:30' ]) assert result.exit_code == 0 with patch('requests.post', Mock(return_value=MockResponse(202))): # get restart with the non-200 return code # normal restart, the schedule is actually parsed, but not validated in patronictl result = self.runner.invoke(ctl, [ 'restart', 'alpha', '--pg-version', '99.0.0', '--scheduled', '2300-10-01T14:30' ], input='y') assert 'Success: restart scheduled' in result.output assert result.exit_code == 0 with patch('requests.post', Mock(return_value=MockResponse(409))): # get restart with the non-200 return code # normal restart, the schedule is actually parsed, but not validated in patronictl result = self.runner.invoke(ctl, [ 'restart', 'alpha', '--pg-version', '99.0.0', '--scheduled', '2300-10-01T14:30' ], input='y') assert 'Failed: another restart is already' in result.output assert result.exit_code == 0 @patch('patroni.ctl.get_dcs') def test_remove(self, mock_get_dcs): mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader result = self.runner.invoke(ctl, ['remove', 'alpha'], input='alpha\nslave') assert 'Please confirm' in result.output assert 'You are about to remove all' in result.output # Not typing an exact confirmation assert result.exit_code == 1 # master specified does not match master of cluster result = self.runner.invoke(ctl, ['remove', 'alpha'], input='alpha\nYes I am aware\nslave') assert result.exit_code == 1 # cluster specified on cmdline does not match verification prompt result = self.runner.invoke(ctl, ['remove', 'alpha'], input='beta\nleader') assert result.exit_code == 1 result = self.runner.invoke(ctl, ['remove', 'alpha'], input='alpha\nYes I am aware\nleader') assert result.exit_code == 0 @patch('requests.post', Mock(side_effect=requests.exceptions.ConnectionError('foo'))) def test_request_patroni(self): member = get_cluster_initialized_with_leader().leader.member self.assertRaises(requests.exceptions.ConnectionError, request_patroni, member, 'post', 'dummy', {}) def test_ctl(self): self.runner.invoke(ctl, ['list']) result = self.runner.invoke(ctl, ['--help']) assert 'Usage:' in result.output def test_get_any_member(self): self.assertIsNone( get_any_member(get_cluster_initialized_without_leader(), role='master')) m = get_any_member(get_cluster_initialized_with_leader(), role='master') self.assertEquals(m.name, 'leader') def test_get_all_members(self): self.assertEquals( list( get_all_members(get_cluster_initialized_without_leader(), role='master')), []) r = list( get_all_members(get_cluster_initialized_with_leader(), role='master')) self.assertEquals(len(r), 1) self.assertEquals(r[0].name, 'leader') r = list( get_all_members(get_cluster_initialized_with_leader(), role='replica')) self.assertEquals(len(r), 1) self.assertEquals(r[0].name, 'other') self.assertEquals( len( list( get_all_members(get_cluster_initialized_without_leader(), role='replica'))), 2) @patch('patroni.ctl.get_dcs') def test_members(self, mock_get_dcs): mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader result = self.runner.invoke(members, ['alpha']) assert '127.0.0.1' in result.output assert result.exit_code == 0 def test_configure(self): result = self.runner.invoke( configure, ['--dcs', 'abc', '-c', 'dummy', '-n', 'bla']) assert result.exit_code == 0 @patch('patroni.ctl.get_dcs') def test_scaffold(self, mock_get_dcs): mock_get_dcs.return_value = self.e mock_get_dcs.return_value.get_cluster = get_cluster_not_initialized_without_leader mock_get_dcs.return_value.initialize = Mock(return_value=True) mock_get_dcs.return_value.touch_member = Mock(return_value=True) mock_get_dcs.return_value.attempt_to_acquire_leader = Mock( return_value=True) mock_get_dcs.return_value.delete_cluster = Mock() with patch.object(self.e, 'initialize', return_value=False): result = self.runner.invoke(ctl, ['scaffold', 'alpha']) assert result.exception with patch.object(mock_get_dcs.return_value, 'touch_member', Mock(return_value=False)): result = self.runner.invoke(ctl, ['scaffold', 'alpha']) assert result.exception result = self.runner.invoke(ctl, ['scaffold', 'alpha']) assert result.exit_code == 0 mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader result = self.runner.invoke(ctl, ['scaffold', 'alpha']) assert result.exception @patch('patroni.ctl.get_dcs') def test_list_extended(self, mock_get_dcs): mock_get_dcs.return_value = self.e cluster = get_cluster_initialized_with_leader(sync=('leader', 'other')) mock_get_dcs.return_value.get_cluster = Mock(return_value=cluster) result = self.runner.invoke(ctl, ['list', 'dummy', '--extended']) assert '2100' in result.output assert 'Scheduled restart' in result.output @patch('patroni.ctl.get_dcs') @patch('requests.delete', Mock(return_value=MockResponse())) def test_flush(self, mock_get_dcs): mock_get_dcs.return_value = self.e mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader result = self.runner.invoke( ctl, ['flush', 'dummy', 'restart', '-r', 'master'], input='y') assert 'No scheduled restart' in result.output result = self.runner.invoke(ctl, ['flush', 'dummy', 'restart', '--force']) assert 'Success: flush scheduled restart' in result.output with patch.object(requests, 'delete', return_value=MockResponse(404)): result = self.runner.invoke( ctl, ['flush', 'dummy', 'restart', '--force']) assert 'Failed: flush scheduled restart' in result.output @patch('patroni.ctl.get_dcs') def test_pause_cluster(self, mock_get_dcs): mock_get_dcs.return_value = self.e mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader with patch('requests.patch', Mock(return_value=MockResponse(200))): result = self.runner.invoke(ctl, ['pause', 'dummy']) assert 'Success' in result.output with patch('requests.patch', Mock(return_value=MockResponse(500))): result = self.runner.invoke(ctl, ['pause', 'dummy']) assert 'Failed' in result.output with patch('requests.patch', Mock(return_value=MockResponse(200))),\ patch('patroni.dcs.Cluster.is_paused', Mock(return_value=True)): result = self.runner.invoke(ctl, ['pause', 'dummy']) assert 'Cluster is already paused' in result.output @patch('patroni.ctl.get_dcs') def test_resume_cluster(self, mock_get_dcs): mock_get_dcs.return_value = self.e mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader with patch('patroni.dcs.Cluster.is_paused', Mock(return_value=True)): with patch('requests.patch', Mock(return_value=MockResponse(200))): result = self.runner.invoke(ctl, ['resume', 'dummy']) assert 'Success' in result.output with patch('requests.patch', Mock(return_value=MockResponse(500))): result = self.runner.invoke(ctl, ['resume', 'dummy']) assert 'Failed' in result.output with patch('requests.patch', Mock(return_value=MockResponse(200))),\ patch('patroni.dcs.Cluster.is_paused', Mock(return_value=False)): result = self.runner.invoke(ctl, ['resume', 'dummy']) assert 'Cluster is not paused' in result.output with patch('requests.patch', Mock(side_effect=Exception)): result = self.runner.invoke(ctl, ['resume', 'dummy']) assert 'Can not find accessible cluster member' in result.output def test_apply_config_changes(self): config = { "postgresql": { "parameters": { "work_mem": "4MB" }, "use_pg_rewind": True }, "ttl": 30 } before_editing = format_config_for_editing(config) # Spaces are allowed and stripped, numbers and booleans are interpreted after_editing, changed_config = apply_config_changes( before_editing, config, [ "postgresql.parameters.work_mem = 5MB", "ttl=15", "postgresql.use_pg_rewind=off" ]) self.assertEquals( changed_config, { "postgresql": { "parameters": { "work_mem": "5MB" }, "use_pg_rewind": False }, "ttl": 15 }) # postgresql.parameters namespace is flattened after_editing, changed_config = apply_config_changes( before_editing, config, ["postgresql.parameters.work_mem.sub = x"]) self.assertEquals( changed_config, { "postgresql": { "parameters": { "work_mem": "4MB", "work_mem.sub": "x" }, "use_pg_rewind": True }, "ttl": 30 }) # Setting to null deletes after_editing, changed_config = apply_config_changes( before_editing, config, ["postgresql.parameters.work_mem=null"]) self.assertEquals(changed_config, { "postgresql": { "use_pg_rewind": True }, "ttl": 30 }) after_editing, changed_config = apply_config_changes( before_editing, config, [ "postgresql.use_pg_rewind=null", "postgresql.parameters.work_mem=null" ]) self.assertEquals(changed_config, {"ttl": 30}) @patch('sys.stdout.isatty', return_value=False) @patch('cdiff.markup_to_pager') def test_show_diff(self, mock_markup_to_pager, mock_isatty): show_diff("foo:\n bar: 1\n", "foo:\n bar: 2\n") mock_markup_to_pager.assert_not_called() mock_isatty.return_value = True show_diff("foo:\n bar: 1\n", "foo:\n bar: 2\n") mock_markup_to_pager.assert_called_once() # Test that unicode handling doesn't fail with an exception show_diff(b"foo:\n bar: \xc3\xb6\xc3\xb6\n".decode('utf-8'), b"foo:\n bar: \xc3\xbc\xc3\xbc\n".decode('utf-8'))
def test_cli_command_bad_command(): runner = CliRunner() result = runner.invoke(cli, [u"blarg"]) assert result.exit_code == 2 assert result.output == """Usage: cli [OPTIONS] COMMAND [ARGS]...
def test_cli_version(): runner = CliRunner() result = runner.invoke(cli, ["--version"]) assert ge_version in str(result.output)
def _call_colin(fnc, parameters=None): runner = CliRunner() if not parameters: return runner.invoke(fnc) else: return runner.invoke(fnc, parameters)
def test_cli_init_on_new_project(tmp_path_factory, filesystem_csv_2): try: basedir = tmp_path_factory.mktemp("test_cli_init_diff") basedir = str(basedir) os.makedirs(os.path.join(basedir, "data")) curdir = os.path.abspath(os.getcwd()) shutil.copy( "./tests/test_sets/Titanic.csv", str(os.path.join(basedir, "data/Titanic.csv")) ) os.chdir(basedir) runner = CliRunner() result = runner.invoke(cli, ["init"], input="Y\n1\n%s\n\nn\n\n" % str( os.path.join(basedir, "data"))) print(result.output) print("result.output length:", len(result.output)) assert len(result.output) < 10000, "CLI output is unreasonably long." assert len(re.findall( "{", result.output)) < 100, "CLI contains way more '{' than we would reasonably expect." assert """Always know what to expect from your data""" in result.output assert """Let's add Great Expectations to your project""" in result.output assert """open a tutorial notebook""" in result.output assert os.path.isdir(os.path.join(basedir, "great_expectations")) assert os.path.isfile(os.path.join( basedir, "great_expectations/great_expectations.yml")) config = yaml.load( open(os.path.join(basedir, "great_expectations/great_expectations.yml"), "r")) assert config["datasources"]["data__dir"]["class_name"] == "PandasDatasource" print(gen_directory_tree_str(os.path.join(basedir, "great_expectations"))) assert gen_directory_tree_str(os.path.join(basedir, "great_expectations")) == """\ great_expectations/ .gitignore great_expectations.yml datasources/ expectations/ data__dir/ default/ Titanic/ BasicDatasetProfiler.json notebooks/ create_expectations.ipynb integrate_validation_into_pipeline.ipynb plugins/ uncommitted/ config_variables.yml data_docs/ local_site/ index.html expectations/ data__dir/ default/ Titanic/ BasicDatasetProfiler.html validations/ profiling/ data__dir/ default/ Titanic/ BasicDatasetProfiler.html samples/ validations/ profiling/ data__dir/ default/ Titanic/ BasicDatasetProfiler.json """ assert os.path.isfile( os.path.join( basedir, "great_expectations/expectations/data__dir/default/Titanic/BasicDatasetProfiler.json" ) ) fnames = [] path = os.path.join(basedir, "great_expectations/uncommitted/validations/profiling/data__dir/default/Titanic") for (dirpath, dirnames, filenames) in os.walk(path): for filename in filenames: fnames.append(filename) assert fnames == ["BasicDatasetProfiler.json"] assert os.path.isfile( os.path.join( basedir, "great_expectations/uncommitted/data_docs/local_site/validations/profiling/data__dir/default/Titanic/BasicDatasetProfiler.html") ) assert os.path.getsize( os.path.join( basedir, "great_expectations/uncommitted/data_docs/local_site/validations/profiling/data__dir/default/Titanic/BasicDatasetProfiler.html" ) ) > 0 print(result) except: raise finally: os.chdir(curdir)
def test_pocket_feed(mock_pocket): runner = CliRunner() result = runner.invoke(cli, ['pocket', 'feed']) mock_pocket.pocket_list.assert_called_with(tag='feed')
def test_cli_validate_missing_positional_arguments(): runner = CliRunner() result = runner.invoke(cli, ["validate"]) assert "Error: Missing argument \"DATASET\"." in str(result.output)
def test_feed_parse_two_url(mock_feed): runner = CliRunner() result = runner.invoke(cli, ['feed', 'parse', 'http://feed1', 'http://feed2']) mock_feed.parse_all.assert_called_with(('http://feed1', 'http://feed2'))
def test_pocket_add_no_arg(mock_pocket): runner = CliRunner() result = runner.invoke(cli, ['pocket', 'add']) mock_pocket.pocket_add.assert_called_with([])
def test_remote_execution_fail(mock_run, tmp_workspace): mock_run.side_effect = [ Mock(returncode=0), Mock(returncode=123), Mock(returncode=0) ] runner = CliRunner() with cwd(tmp_workspace): result = runner.invoke(entrypoints.remote, ["echo", "test >> .file"]) assert result.exit_code == 123 assert mock_run.call_count == 3 mock_run.assert_has_calls([ call( [ "rsync", "-arlpmchz", "--copy-unsafe-links", "-e", "ssh -Kq -o BatchMode=yes", "--force", "--delete", "--rsync-path", "mkdir -p .remotes/myproject && rsync", "--include-from", ANY, "--exclude-from", ANY, f"{tmp_workspace}/", f"{TEST_HOST}:{TEST_DIR}", ], stdout=ANY, stderr=ANY, ), call( [ "ssh", "-tKq", "-o", "BatchMode=yes", TEST_HOST, """\ cd .remotes/myproject if [ -f .remoteenv ]; then source .remoteenv fi cd . echo 'test >> .file' """, ], stdout=ANY, stdin=ANY, stderr=ANY, ), call( [ "rsync", "-arlpmchz", "--copy-unsafe-links", "-e", "ssh -Kq -o BatchMode=yes", "--force", "--exclude-from", ANY, f"{TEST_HOST}:{TEST_DIR}/", f"{tmp_workspace}", ], stdout=ANY, stderr=ANY, ), ])
def test_pocket_add_one_arg(mock_pocket): runner = CliRunner() result = runner.invoke(cli, ['pocket', 'add', 'http://url1.com']) mock_pocket.pocket_add.assert_called_with(('http://url1.com', ))
def test_cli_loads(): runner = CliRunner() res = runner.invoke(hscli.cli) assert res.exception is None assert res.exit_code == 0
def test_feed_parse_no_url(mock_feed): runner = CliRunner() result = runner.invoke(cli, ['feed', 'parse']) mock_feed.parse_all.assert_called_with([])
def test_handle_session_error_during_photo_iteration(self): if os.path.exists("tests/fixtures/Photos"): shutil.rmtree("tests/fixtures/Photos") os.makedirs("tests/fixtures/Photos") with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"): # Pass fixed client ID via environment variable os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321" def mock_raise_response_error(offset): raise PyiCloudAPIResponseError("Invalid global session", 100) with mock.patch("time.sleep") as sleep_mock: with mock.patch.object(PhotoAlbum, "photos_request") as pa_photos_request: pa_photos_request.side_effect = mock_raise_response_error # Let the initial authenticate() call succeed, # but do nothing on the second try. orig_authenticate = PyiCloudService.authenticate def mocked_authenticate(self): if not hasattr(self, "already_authenticated"): orig_authenticate(self) setattr(self, "already_authenticated", True) with mock.patch.object( PyiCloudService, "authenticate", new=mocked_authenticate ): runner = CliRunner() result = runner.invoke( main, [ "--username", "*****@*****.**", "--password", "password1", "--recent", "1", "--skip-videos", "--skip-live-photos", "--no-progress-bar", "tests/fixtures/Photos", ], ) print_result_exception(result) # Error msg should be repeated 5 times assert ( self._caplog.text.count( "Session error, re-authenticating..." ) == 5 ) self.assertIn( "INFO iCloud re-authentication failed! Please try again later.", self._caplog.text, ) # Make sure we only call sleep 4 times (skip the first retry) self.assertEquals(sleep_mock.call_count, 4) assert result.exit_code == -1
def test_remote_labeling_works(mock_run, tmp_path, label, host): mock_run.return_value = Mock(returncode=0) runner = CliRunner() (tmp_path / WORKSPACE_CONFIG).write_text(f"""\ [[hosts]] host = "host1" directory = "{TEST_DIR}" default = true label = "usual" [[hosts]] host = "host2" directory = "{TEST_DIR}" label = "unusual" [[hosts]] host = "host3" directory = "{TEST_DIR}" """) with cwd(tmp_path): result = runner.invoke(entrypoints.remote, ["-l", label, "echo test >> .file"]) assert result.exit_code == 0 assert mock_run.call_count == 3 mock_run.assert_has_calls([ call( [ "rsync", "-arlpmchz", "--copy-unsafe-links", "-e", "ssh -Kq -o BatchMode=yes", "--force", "--delete", "--rsync-path", "mkdir -p .remotes/myproject && rsync", "--include-from", ANY, "--exclude-from", ANY, f"{tmp_path}/", f"{host}:{TEST_DIR}", ], stdout=ANY, stderr=ANY, ), call( [ "ssh", "-tKq", "-o", "BatchMode=yes", host, """\ cd .remotes/myproject if [ -f .remoteenv ]; then source .remoteenv fi cd . echo test >> .file """, ], stdout=ANY, stdin=ANY, stderr=ANY, ), call( [ "rsync", "-arlpmchz", "--copy-unsafe-links", "-e", "ssh -Kq -o BatchMode=yes", "--force", "--exclude-from", ANY, f"{host}:{TEST_DIR}/", f"{tmp_path}", ], stdout=ANY, stderr=ANY, ), ])
def test_until_found(self): base_dir = "tests/fixtures/Photos" if os.path.exists("tests/fixtures/Photos"): shutil.rmtree("tests/fixtures/Photos") os.makedirs("tests/fixtures/Photos/2018/07/30/") os.makedirs("tests/fixtures/Photos/2018/07/31/") files_to_download = [] files_to_skip = [] files_to_download.append(("2018/07/31/IMG_7409.JPG", "photo")) files_to_download.append(("2018/07/31/IMG_7409-medium.MOV", "photo")) files_to_skip.append(("2018/07/30/IMG_7408.JPG", "photo")) files_to_skip.append(("2018/07/30/IMG_7408-medium.MOV", "photo")) files_to_download.append(("2018/07/30/IMG_7407.JPG", "photo")) files_to_download.append(("2018/07/30/IMG_7407-medium.MOV", "photo")) files_to_skip.append(("2018/07/30/IMG_7405.MOV", "video")) files_to_skip.append(("2018/07/30/IMG_7404.MOV", "video")) files_to_download.append(("2018/07/30/IMG_7403.MOV", "video")) files_to_download.append(("2018/07/30/IMG_7402.MOV", "video")) files_to_skip.append(("2018/07/30/IMG_7401.MOV", "photo")) files_to_skip.append(("2018/07/30/IMG_7400.JPG", "photo")) files_to_skip.append(("2018/07/30/IMG_7400-medium.MOV", "photo")) files_to_skip.append(("2018/07/30/IMG_7399.JPG", "photo")) files_to_download.append(("2018/07/30/IMG_7399-medium.MOV", "photo")) for f in files_to_skip: open("%s/%s" % (base_dir, f[0]), "a").close() with mock.patch("icloudpd.download.download_media") as dp_patched: dp_patched.return_value = True with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"): # Pass fixed client ID via environment variable os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321" runner = CliRunner() result = runner.invoke( main, [ "--username", "*****@*****.**", "--password", "password1", "--live-photo-size", "medium", "--until-found", "3", "--recent", "20", "--no-progress-bar", base_dir, ], ) print_result_exception(result) expected_calls = list( map( lambda f: call( ANY, ANY, "%s/%s" % (base_dir, f[0]), "mediumVideo" if ( f[1] == 'photo' and f[0].endswith('.MOV') ) else "original"), files_to_download, ) ) dp_patched.assert_has_calls(expected_calls) self.assertIn( "DEBUG Looking up all photos and videos...", self._caplog.text ) self.assertIn( "INFO Downloading ??? original photos and videos to tests/fixtures/Photos/ ...", self._caplog.text, ) for f in files_to_skip: expected_message = "INFO %s/%s already exists." % (base_dir, f[0]) self.assertIn(expected_message, self._caplog.text) self.assertIn( "INFO Found 3 consecutive previously downloaded photos. Exiting", self._caplog.text, ) assert result.exit_code == 0
def test_version(): runner = CliRunner() res = runner.invoke(cli.cli, ['--version']) assert res.output.rstrip('\n').endswith(__version__)
def test_apt_group(): """ test the apt group cli call """ runner = CliRunner() result = runner.invoke(voithos.cli.service.apt.get_apt_group()) assert result.exit_code == 0
def test_download_and_skip_existing_photos(self): if os.path.exists("tests/fixtures/Photos"): shutil.rmtree("tests/fixtures/Photos") os.makedirs("tests/fixtures/Photos") os.makedirs("tests/fixtures/Photos/2018/07/30/") open("tests/fixtures/Photos/2018/07/30/IMG_7408.JPG", "a").close() open("tests/fixtures/Photos/2018/07/30/IMG_7407.JPG", "a").close() with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"): # Pass fixed client ID via environment variable os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321" runner = CliRunner() result = runner.invoke( main, [ "--username", "*****@*****.**", "--password", "password1", "--recent", "5", "--skip-videos", "--skip-live-photos", "--set-exif-datetime", "--no-progress-bar", "tests/fixtures/Photos", ], ) print_result_exception(result) self.assertIn("DEBUG Looking up all photos...", self._caplog.text) self.assertIn( "INFO Downloading 5 original photos to tests/fixtures/Photos/ ...", self._caplog.text, ) self.assertIn( "INFO Downloading tests/fixtures/Photos/2018/07/31/IMG_7409.JPG", self._caplog.text, ) self.assertNotIn( "IMG_7409.MOV", self._caplog.text, ) self.assertIn( "INFO tests/fixtures/Photos/2018/07/30/IMG_7408.JPG already exists.", self._caplog.text, ) self.assertIn( "INFO tests/fixtures/Photos/2018/07/30/IMG_7407.JPG already exists.", self._caplog.text, ) self.assertIn( "INFO Skipping IMG_7405.MOV, only downloading photos.", self._caplog.text, ) self.assertIn( "INFO Skipping IMG_7404.MOV, only downloading photos.", self._caplog.text, ) self.assertIn( "INFO All photos have been downloaded!", self._caplog.text ) # Check that file was downloaded self.assertTrue( os.path.exists("tests/fixtures/Photos/2018/07/31/IMG_7409.JPG")) # Check that mtime was updated to the photo creation date photo_mtime = os.path.getmtime("tests/fixtures/Photos/2018/07/31/IMG_7409.JPG") photo_modified_time = datetime.datetime.fromtimestamp(photo_mtime) self.assertEquals( "2018-07-31 14:22:24", photo_modified_time.strftime('%Y-%m-%d %H:%M:%S')) assert result.exit_code == 0
def test_normal_append(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): files = [valid_json_multi_file_2_1()] table_name = "multij2" expected_tables = [table_name, SourceInfo.get_table_name()] # first execution without --append option (new) --- result = runner.invoke(cmd, ["-o", db_path, "file"] + files) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") actual_tables = con.fetch_table_names() print_test_result(expected=expected_tables, actual=actual_tables) assert set(actual_tables) == set(expected_tables) actual_data = con.select("*", table_name=table_name).fetchall() expected_data = [(1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc")] print_test_result(expected=expected_data, actual=actual_data) assert expected_data == actual_data # second execution with --append option --- result = runner.invoke(cmd, ["-o", db_path, "--append", "file"] + files) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") actual_tables = con.fetch_table_names() print_test_result(expected=expected_tables, actual=actual_tables) assert set(actual_tables) == set(expected_tables) actual_data = con.select("*", table_name=table_name).fetchall() expected_data = [ (1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc"), (1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc"), ] print_test_result(expected=expected_data, actual=actual_data) assert expected_data == actual_data # third execution without --append option (overwrite) --- result = runner.invoke(cmd, ["-o", db_path, "file"] + files) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") actual_tables = con.fetch_table_names() print_test_result(expected=expected_tables, actual=actual_tables) assert set(actual_tables) == set(expected_tables) actual_data = con.select("*", table_name=table_name).fetchall() expected_data = [(1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc")] print_test_result(expected=expected_data, actual=actual_data) assert expected_data == actual_data
def test_download_photos_and_set_exif(self): if os.path.exists("tests/fixtures/Photos"): shutil.rmtree("tests/fixtures/Photos") os.makedirs("tests/fixtures/Photos") os.makedirs("tests/fixtures/Photos/2018/07/30/") open("tests/fixtures/Photos/2018/07/30/IMG_7408.JPG", "a").close() open("tests/fixtures/Photos/2018/07/30/IMG_7407.JPG", "a").close() # Download the first photo, but mock the video download orig_download = PhotoAsset.download def mocked_download(self, size): if not hasattr(PhotoAsset, "already_downloaded"): response = orig_download(self, size) setattr(PhotoAsset, "already_downloaded", True) return response return mock.MagicMock() with mock.patch.object(PhotoAsset, "download", new=mocked_download): with mock.patch( "icloudpd.exif_datetime.get_photo_exif" ) as get_exif_patched: get_exif_patched.return_value = False with vcr.use_cassette("tests/vcr_cassettes/listing_photos.yml"): # Pass fixed client ID via environment variable os.environ["CLIENT_ID"] = "DE309E26-942E-11E8-92F5-14109FE0B321" runner = CliRunner() result = runner.invoke( main, [ "--username", "*****@*****.**", "--password", "password1", "--recent", "4", "--set-exif-datetime", # '--skip-videos', # "--skip-live-photos", "--no-progress-bar", "tests/fixtures/Photos", ], ) print_result_exception(result) self.assertIn( "DEBUG Looking up all photos and videos...", self._caplog.text, ) self.assertIn( "INFO Downloading 4 original photos and videos to tests/fixtures/Photos/ ...", self._caplog.text, ) self.assertIn( "INFO Downloading tests/fixtures/Photos/2018/07/31/IMG_7409.JPG", self._caplog.text, ) # YYYY:MM:DD is the correct format. self.assertIn( "DEBUG Setting EXIF timestamp for tests/fixtures/Photos/2018/07/31/IMG_7409.JPG: 2018:07:31", self._caplog.text, ) self.assertIn( "INFO All photos have been downloaded!", self._caplog.text ) assert result.exit_code == 0
def test_normal_multi_file_different_table(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): files = [ valid_json_single_file(), invalid_json_single_file(), valid_json_multi_file_1(), valid_json_kv_file(), valid_csv_file_1_1(), valid_csv_file_2_1(), invalid_csv_file(), valid_tsv_file(), invalid_tsv_file(), valid_excel_file(), invalid_excel_file_1(), invalid_excel_file_2(), valid_html_file(), invalid_html_file(), valid_ltsv_file(), invalid_ltsv_file(), valid_markdown_file(), not_supported_format_file(), ] result = runner.invoke(cmd, ["-o", db_path, "file"] + files) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") expected_tables = [ "singlejson", "multij1", "multij2", "valid_kv", "csv_a", "rename_insert", "excel_sheet_a", "excel_sheet_c", "excel_sheet_d", "valid_ltsv_a", "testtitle_tablename", "testtitle_html2", "tsv_a", "valid_mdtable_markdown1", SourceInfo.get_table_name(), ] actual_tables = con.fetch_table_names() print_test_result(expected=expected_tables, actual=actual_tables) assert set(actual_tables) == set(expected_tables) expected_data_table = { "singlejson": [(1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc")], "multij1": [(1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc")], "multij2": [(1, 4.0), (2, None), (3, 120.9)], "valid_kv": [("json_b", "hoge"), ("json_c", "bar")], "csv_a": [(1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc")], "rename_insert": [ (1, 55, "D Sam", 31, "Raven"), (2, 36, "J Ifdgg", 30, "Raven"), (3, 91, "K Wedfb", 28, "Raven"), ], "excel_sheet_a": [(1.0, 1.1, "a"), (2.0, 2.2, "bb"), (3.0, 3.3, "cc")], "excel_sheet_c": [(1, 1.1, "a"), (2, "", "bb"), (3, 3.3, "")], "excel_sheet_d": [(1, 1.1, "a"), (2, "", "bb"), (3, 3.3, "")], "testtitle_tablename": [(1, 123.1, "a"), (2, 2.2, "bb"), (3, 3.3, "ccc")], "valid_ltsv_a": [ (1, 123.1, u'"ltsv0"', 1.0, u'"1"'), (2, 2.2, u'"ltsv1"', 2.2, u'"2.2"'), (3, 3.3, u'"ltsv2"', 3.0, u'"cccc"'), ], "testtitle_html2": [(1, 123.1), (2, 2.2), (3, 3.3)], "tsv_a": [(1, 4.0, "tsv0"), (2, 2.1, "tsv1"), (3, 120.9, "tsv2")], "valid_mdtable_markdown1": [(1, 123.1, "a"), (2, 2.2, "bb"), (3, 3.3, "ccc")], } for table in con.fetch_table_names(): if table == SourceInfo.get_table_name(): continue result = con.select("*", table_name=table) expected_data = expected_data_table.get(table) actual_data = result.fetchall() message = "table={}, expected={}, actual={}".format( table, expected_data, actual_data) print("--- table: {} ---".format(table)) print_test_result(expected=expected_data, actual=actual_data) assert sorted(expected_data) == sorted(actual_data), message
class CliIntegrationTestCase(unittest.TestCase): def setUp(self): self.runner = CliRunner() os.environ.setdefault('LC_ALL', 'en_US.utf-8') os.environ.setdefault('LANG', 'en_US.utf-8') def test_cli_init(self): with self.runner.isolated_filesystem(): result = self.runner.invoke(cli, ['init']) self.assertTrue(os.path.isfile('CHANGELOG.md')) self.assertTrue(result) def test_cli_current(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) result = self.runner.invoke(cli, ['current']) self.assertEqual(result.output.strip(), '0.0.0') def test_cli_current_missing(self): with self.runner.isolated_filesystem(): result = self.runner.invoke(cli, ['current']) self.assertEqual(result.output.strip(), '') def test_cli_suggest(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) result = self.runner.invoke(cli, ['suggest']) self.assertEqual(result.output.strip(), '0.0.1') def test_cli_suggest_missing(self): with self.runner.isolated_filesystem(): result = self.runner.invoke(cli, ['suggest']) self.assertEqual(result.output.strip(), '') def test_cli_version_flag(self): result = self.runner.invoke(cli, ['--version']) self.assertTrue(result) def test_cli_added(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) result = self.runner.invoke(cli, ['added', 'Adding a new feature']) self.assertTrue(result) suggest = self.runner.invoke(cli, ['suggest']) self.assertEqual(suggest.output.strip(), '0.1.0') def test_cli_added_missing(self): with self.runner.isolated_filesystem(): result = self.runner.invoke(cli, ['added', 'Adding a new feature'], input='y\n') self.assertEqual( result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]: y') def test_cli_changed(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) result = self.runner.invoke(cli, ['changed', 'Changing a feature']) self.assertTrue(result) suggest = self.runner.invoke(cli, ['suggest']) self.assertEqual(suggest.output.strip(), '0.0.1') def test_cli_changed_missing(self): with self.runner.isolated_filesystem(): result = self.runner.invoke(cli, ['changed', 'changing a feature'], input='y\n') self.assertEqual( result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]: y') def test_cli_fixed(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) result = self.runner.invoke(cli, ['fixed', 'Fix a Bug']) self.assertTrue(result) suggest = self.runner.invoke(cli, ['suggest']) self.assertEqual(suggest.output.strip(), '0.0.1') def test_cli_suggest_type_fixed(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) result = self.runner.invoke(cli, ['fixed', 'Fix a Bug']) self.assertTrue(result) suggest = self.runner.invoke(cli, ['suggest', '--type']) self.assertEqual(suggest.output.strip(), 'patch') def test_cli_fixed_missing(self): with self.runner.isolated_filesystem(): result = self.runner.invoke(cli, ['fixed', 'Fix a Bug'], input='y\n') self.assertEqual( result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]: y') def test_cli_removed(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) result = self.runner.invoke(cli, ['removed', 'Breaking Change']) self.assertTrue(result) suggest = self.runner.invoke(cli, ['suggest']) self.assertEqual(suggest.output.strip(), '1.0.0') def test_cli_suggest_type_removed(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) result = self.runner.invoke(cli, ['removed', 'Breaking Change']) self.assertTrue(result) suggest = self.runner.invoke(cli, ['suggest', '--type']) self.assertEqual(suggest.output.strip(), 'major') def test_cli_removed_missing(self): with self.runner.isolated_filesystem(): result = self.runner.invoke(cli, ['removed', 'Breaking Change'], input='y\n') self.assertEqual( result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]: y') def test_cli_release(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) self.runner.invoke(cli, ['added', 'Adding a new feature']) result = self.runner.invoke(cli, ['release']) self.assertEqual( result.output.strip(), 'Planning on releasing version 0.1.0. Proceed? [y/N]:') def test_cli_release_y(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) self.runner.invoke(cli, ['added', 'Adding a new feature']) result = self.runner.invoke(cli, ['release', '--yes']) self.assertTrue(result) suggest = self.runner.invoke(cli, ['current']) self.assertEqual(suggest.output.strip(), '0.1.0') def test_cli_release_y_specify_type(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) self.runner.invoke(cli, ['added', 'Adding a new feature']) result = self.runner.invoke(cli, ['release', '--major', '--yes']) self.assertTrue(result) suggest = self.runner.invoke(cli, ['current']) self.assertEqual(suggest.output.strip(), '1.0.0') def test_cli_release_missing(self): with self.runner.isolated_filesystem(): result = self.runner.invoke(cli, ['release']) self.assertEqual( result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]:') def test_cli_view(self): with self.runner.isolated_filesystem(): self.runner.invoke(cli, ['init']) self.runner.invoke(cli, ['added', 'Adding a new feature']) result = self.runner.invoke(cli, ['view']) self.assertTrue(result)
class TestKfkConsole(TestCase): def setUp(self): self.runner = CliRunner() self.cluster = "my-cluster" self.namespace = "kafka" self.topic = "my-topic" @mock.patch('kfk.console_command.os') def test_console_consumer(self, mock_os): result = self.runner.invoke(kfk, ['console-consumer', '--topic', self.topic, '-c', self.cluster, '-n', self.namespace]) assert result.exit_code == 0 native_command = "bin/kafka-console-consumer.sh --bootstrap-server my-cluster-kafka-bootstrap:9092 --topic {" \ "topic} " mock_os.system.assert_called_with( Kubectl().exec("-it", "{cluster}-kafka-0").container("kafka").namespace(self.namespace).exec_command( native_command).build().format(cluster=self.cluster, topic=self.topic)) @mock.patch('kfk.console_command.transfer_file_to_container') @mock.patch('kfk.console_command.os') def test_console_consumer_with_consumer_config(self, mock_os, mock_transfer_file_to_container): result = self.runner.invoke(kfk, ['console-consumer', '--topic', self.topic, '--consumer.config', 'files/client.properties', '-c', self.cluster, '-n', self.namespace]) assert result.exit_code == 0 native_command = "bin/kafka-console-consumer.sh --bootstrap-server {cluster}-kafka-bootstrap:9093 --topic {" \ "topic} --consumer-property security.protocol=SSL --consumer-property " \ "ssl.truststore.location=/tmp/truststore.jks --consumer-property " \ "ssl.truststore.password=123456 --consumer-property ssl.keystore.location=/tmp/user.p12 " \ "--consumer-property ssl.keystore.password=123456;rm -rf /tmp/truststore.jks;rm -rf " \ "/tmp/user.p12;" mock_os.system.assert_called_with( Kubectl().exec("-it", "{cluster}-kafka-0").container("kafka").namespace(self.namespace).exec_command( native_command).build().format(cluster=self.cluster, topic=self.topic)) @mock.patch('kfk.console_command.os') def test_console_consumer_with_from_beginning(self, mock_os): from_beginning = True result = self.runner.invoke(kfk, ['console-consumer', '--topic', self.topic, '-c', self.cluster, '-n', self.namespace, '--from-beginning']) assert result.exit_code == 0 native_command = "bin/kafka-console-consumer.sh --bootstrap-server my-cluster-kafka-bootstrap:9092 --topic {" \ "topic} {from_beginning}" mock_os.system.assert_called_with( Kubectl().exec("-it", "{cluster}-kafka-0").container("kafka").namespace(self.namespace).exec_command( native_command).build().format(cluster=self.cluster, topic=self.topic, from_beginning=(from_beginning and '--from-beginning' or ''))) @mock.patch('kfk.console_command.os') def test_console_producer(self, mock_os): result = self.runner.invoke(kfk, ['console-producer', '--topic', self.topic, '-c', self.cluster, '-n', self.namespace]) assert result.exit_code == 0 native_command = "bin/kafka-console-producer.sh --broker-list my-cluster-kafka-brokers:9092 --topic {topic}" mock_os.system.assert_called_with( Kubectl().exec("-it", "{cluster}-kafka-0").container("kafka").namespace(self.namespace).exec_command( native_command).build().format(cluster=self.cluster, topic=self.topic)) @mock.patch('kfk.console_command.transfer_file_to_container') @mock.patch('kfk.console_command.os') def test_console_producer_with_producer_config(self, mock_os, mock_transfer_file_to_container): result = self.runner.invoke(kfk, ['console-producer', '--topic', self.topic, '--producer.config', 'files/client.properties', '-c', self.cluster, '-n', self.namespace]) assert result.exit_code == 0 native_command = "bin/kafka-console-producer.sh --broker-list {cluster}-kafka-brokers:9093 --topic {topic} " \ "--producer-property security.protocol=SSL --producer-property " \ "ssl.truststore.location=/tmp/truststore.jks --producer-property " \ "ssl.truststore.password=123456 --producer-property ssl.keystore.location=/tmp/user.p12 " \ "--producer-property ssl.keystore.password=123456;rm -rf /tmp/truststore.jks;rm -rf " \ "/tmp/user.p12;" mock_os.system.assert_called_with( Kubectl().exec("-it", "{cluster}-kafka-0").container("kafka").namespace(self.namespace).exec_command( native_command).build().format(cluster=self.cluster, topic=self.topic))
def test_help(self, options, expected): runner = CliRunner() result = runner.invoke(cmd, options) assert result.exit_code == expected