def test_delete_repository_success(self): self.create_repository() test = clicktest.CliRunner() result = test.invoke( curator.repomgrcli, [ '--logfile', os.devnull, '--host', host, '--port', str(port), 'delete', '--yes', # This ensures no prompting will happen '--repository', self.args['repository'] ], obj={"filters": []}) self.assertFalse( curator.get_repository(self.client, self.args['repository']))
def test_merge(self): count = 1 idx = 'my_index' self.create_index(idx) self.add_docs(idx) ilo1 = curator.IndexList(self.client) ilo1._get_segmentcounts() self.assertEqual(3, ilo1.index_info[idx]['segments']) self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config(self.args['actionfile'], testvars.forcemerge_test.format(count, 0.20)) test = clicktest.CliRunner() result = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) ilo2 = curator.IndexList(self.client) ilo2._get_segmentcounts() self.assertEqual(count, ilo2.index_info[idx]['segments'])
def test_cli_open_indices_only(self): self.create_index('open-one') self.create_index('closed-one') self.close_index('closed-one') test = clicktest.CliRunner() result = test.invoke(curator.cli, [ '--logfile', os.devnull, '--host', host, '--port', str(port), 'show', 'indices', '--open-only', '--suffix', 'one', ], obj={"filters": []}) self.assertEqual(['open-one'], result.output.splitlines()[:2])
def test_clodius_aggregate_bigwig(): runner = clt.CliRunner() input_file = op.join(testdir, 'sample_data', 'test1.bw') print("input_file:", input_file) result = runner.invoke(cca.bigwig, [ input_file, '--chromsizes-filename', 'test/sample_data/test.mr.chromSizes', '--output-file', '/tmp/test.mr.bw' ]) import traceback print("exc_info:", result.exc_info) a, b, tb = result.exc_info print("result:", result) print("result.output", result.output) print("result.error", traceback.print_tb(tb)) print("Exception:", a, b) assert (result.exit_code == 0)
def test_default_plugin_file_success(mock_build, plugin_config_filename, plugin_config_content, artifact_file): runner = click_testing.CliRunner() # Change the runner's working dir to test the default works with runner.isolated_filesystem(): with open(plugin_config_filename, 'w') as f: f.write( yaml.dump(plugin_config_content, default_flow_style=False)) plugin_config_file = os.path.realpath(f.name) result = runner.invoke(cli.delphix_sdk, ['build', '-a', artifact_file]) assert result.exit_code == 0, 'Output: {}'.format(result.output) mock_build.assert_called_once_with(plugin_config_file, artifact_file, False, False, local_vsdk_root=None)
def test_filter_by_alias_bad_aliases(self): alias = 'testalias' self.write_config( self.args['configfile'], testvars.client_config.format(host, port)) self.write_config(self.args['actionfile'], testvars.filter_by_alias.format('{"this":"isadict"}', False)) self.create_index('my_index') self.create_index('dummy') self.client.indices.put_alias(index='dummy', name=alias) test = clicktest.CliRunner() result = test.invoke( curator.cli, [ '--config', self.args['configfile'], self.args['actionfile'] ], ) self.assertEquals( type(curator.ConfigurationError()), type(result.exception)) self.assertEquals(2, len(curator.get_indices(self.client)))
def test_cli_show_indices_older_than_zero(self): self.create_indices(10) indices = curator.get_indices(self.client) expected = sorted(indices, reverse=True) test = clicktest.CliRunner() result = test.invoke( curator.cli, [ '--logfile', os.devnull, '--host', host, '--port', str(port), 'show', 'indices', '--older-than', '0', '--timestring', '%Y.%m.%d', '--time-unit', 'days' ], obj={"filters":[]}) output = sorted(result.output.splitlines(), reverse=True) self.assertEqual(expected, output)
def test_ignore_empty_list(self): self.create_indices(10) self.write_config( self.args['configfile'], testvars.client_config.format(host, port)) self.write_config(self.args['actionfile'], testvars.delete_ignore_proto.format( 'age', 'creation_date', 'older', ' ', 'days', 90, ' ', ' ', int(time.time()) ) ) test = clicktest.CliRunner() result = test.invoke( curator.cli, [ '--config', self.args['configfile'], self.args['actionfile'] ], ) self.assertEquals(10, len(curator.get_indices(self.client))) self.assertEqual(0, result.exit_code)
def test_retention_from_name_illegal_regex_with_fallback(self): # Test extraction of unit_count from index name when pattern contains an illegal regular expression # Create indices for 10 months with retention time of 2 months in index name # Expected: Fallback value of 3 is used and 3 most recent indices remain in place self.args['prefix'] = 'logstash_2_' self.args['time_unit'] = 'months' self.create_indices(10) self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.delete_pattern_proto.format('age', 'name', 'older', '\'%Y.%m\'', 'months', 3, '_[0-9+_', ' ', ' ', ' ')) test = clicktest.CliRunner() result = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) self.assertEquals(3, len(curator.get_indices(self.client)))
def test_archived_release_updates(self): """Assert that updates for archived releases isn't being considered by the script. """ # Archive the F17 release rel = self.db.query(models.Release).filter_by(name='F17').one() rel.state = models.ReleaseState.archived self.db.commit() runner = testing.CliRunner() update = self.db.query(models.Update).all()[0] update.status = models.UpdateStatus.testing self.db.commit() with patch('bodhi.server.models.util.greenwave_api_post') as mock_greenwave: mock_greenwave.side_effect = Exception( 'Greenwave should not be accessed for archived releases.') result = runner.invoke(check_policies.check, []) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_greenwave.call_count, 0)
def test_clodius_aggregate_bedpe(): input_file = op.join(testdir, 'sample_data', 'Rao_RepA_GM12878_Arrowhead.txt') output_file = '/tmp/bedpe.db' runner = clt.CliRunner() result = runner.invoke(cca.bedpe, [ input_file, '--output-file', output_file, '--assembly', 'hg19', '--chr1-col', '1', '--from1-col', '2', '--to1-col', '3', '--chr2-col', '1', '--from2-col', '2', '--to2-col', '3' ]) print("result:", result) print("result.output", result.output) assert (result.exit_code == 0) tiles = cdt.get_2d_tiles(output_file, 0, 0, 0, numx=1, numy=1) print("tiles:", tiles) assert ('\n' not in tiles[(0, 0)][0]['fields'][2])
def test_remove_with_empty_list(self): alias = 'testalias' self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.alias_add_remove_empty.format(alias, 'rickroll', 'my')) self.create_index('my_index') self.create_index('dummy') self.client.indices.put_alias(index='dummy', name=alias) test = clicktest.CliRunner() _ = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) self.assertEqual({'dummy': { 'aliases': { alias: {} } }}, self.client.indices.get_alias(alias))
def test_reindex_into_alias(self): wait_interval = 1 max_wait = 3 source = 'my_source' dest = 'my_dest' expected = 3 alias_body = {'aliases': {dest: {}}} self.client.indices.create(index='dummy', body=alias_body) self.add_docs(source) self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.reindex.format(wait_interval, max_wait, source, dest)) test = clicktest.CliRunner() _ = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) self.assertEqual(expected, self.client.count(index=dest)['count'])
def test_retention_from_name_no_capture_group(self): # Test extraction of unit_count from index name when pattern contains no capture group # Create indices for 10 months with retention time of 2 months in index name # Expected: all indices remain as the pattern cannot be used to extract a retention time self.args['prefix'] = 'logstash_2_' self.args['time_unit'] = 'months' self.create_indices(10) self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.delete_pattern_proto.format('age', 'name', 'older', '\'%Y.%m\'', 'months', -1, '_[0-9]+_', ' ', ' ', ' ')) test = clicktest.CliRunner() result = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) self.assertEquals(10, len(curator.get_indices(self.client)))
def test_reindex_from_remote_no_connection(self): wait_interval = 1 max_wait = 3 bad_port = 70000 dest = 'my_dest' expected = 1 self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.remote_reindex.format( wait_interval, max_wait, 'http://{0}:{1}'.format(rhost, bad_port), 'REINDEX_SELECTION', dest, 'my_')) test = clicktest.CliRunner() result = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) self.assertEqual(expected, result.exit_code)
def test_reindex_manual(self): wait_interval = 1 max_wait = 3 source = 'my_source' dest = 'my_dest' expected = 3 self.create_index(source) self.add_docs(source) self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.reindex.format(wait_interval, max_wait, source, dest)) test = clicktest.CliRunner() result = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) self.assertEqual(expected, self.client.count(index=dest)['count'])
def test_delete_indices_huge_list(self): self.create_indices(365) pre = curator.get_indices(self.client) test = clicktest.CliRunner() result = test.invoke(curator.cli, [ '--logfile', os.devnull, '--host', host, '--port', str(port), 'delete', 'indices', '--all-indices', '--exclude', pre[0], ], obj={"filters": []}) post = curator.get_indices(self.client) self.assertEquals(1, len(post))
def test_filter_by_array_of_aliases(self): alias = 'testalias' self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.filter_by_alias.format(' [ testalias, foo ]', False)) self.create_index('my_index') self.create_index('dummy') self.client.indices.put_alias(index='dummy', name=alias) test = clicktest.CliRunner() _ = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) ver = curator.get_version(self.client) if ver >= (5, 5, 0): self.assertEquals(2, len(curator.get_indices(self.client))) else: self.assertEquals(1, len(curator.get_indices(self.client)))
def setUp(self): super(CliBeamEndToEndTest, self).setUp() # Change the encoding for Click since Python 3 is configured to use ASCII as # encoding for the environment. if codecs.lookup(locale.getpreferredencoding()).name == 'ascii': os.environ['LANG'] = 'en_US.utf-8' # Set home folders for engines. self._home = os.path.join( os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()), self._testMethodName) self._original_home_value = os.environ.get('HOME', '') os.environ['HOME'] = self._home self._original_beam_home_value = os.environ.get('BEAM_HOME', '') os.environ['BEAM_HOME'] = os.path.join(os.environ['HOME'], 'beam') self.chicago_taxi_pipeline_dir = os.path.join( os.path.dirname(os.path.dirname(__file__)), 'testdata') self.runner = click_testing.CliRunner()
def test_index_selection_all_indices_skip_non_exclude_filters(self): self.create_index('my_index') self.create_index('your_index') test = clicktest.CliRunner() result = test.invoke(curator.cli, [ '--logfile', os.devnull, '--host', host, '--port', str(port), 'show', 'indices', '--all-indices', '--suffix', 'index', ], obj={"filters": []}) self.assertEqual(['my_index', 'your_index'], result.output.splitlines()[:2])
def test_skopeo_copy_insecure(insecure): """ Testing falling back to HTTP when talking to a server """ runner = testing.CliRunner() content_type = MEDIA_TYPE_MANIFEST_V2 reg1 = MockRegistry('registry1.example.com', insecure=insecure) digest = reg1.add_fake_image('repo1', '1.2.3', content_type) result = runner.invoke(skopeo_lite.copy, [ '--src-tls-verify', 'false', '--dest-tls-verify', 'false', 'docker://registry1.example.com/repo1:1.2.3', 'docker://registry1.example.com/repo1:latest' ], catch_exceptions=False) assert result.exit_code == 0 reg1.check_fake_image('repo1', 'latest', digest, content_type)
def test_open_closed(self): self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config(self.args['actionfile'], testvars.optionless_proto.format('open')) t1, t2 = ('dummy', 'my_index') self.create_index(t1) self.create_index(t2) # Decorators make this pylint exception necessary # pylint: disable=E1123 self.client.indices.close(index=t2, ignore_unavailable=True) test = clicktest.CliRunner() _ = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) csi = self.client.cluster.state( metric='metadata')['metadata']['indices'] self.assertNotEqual('close', csi[t2]['state']) self.assertNotEqual('close', csi[t1]['state'])
def test_reindex_empty_list(self): wait_interval = 1 max_wait = 3 source = 'my_source' dest = 'my_dest' expected = '.tasks' self.write_config( self.args['configfile'], testvars.client_config.format(host, port)) self.write_config(self.args['actionfile'], testvars.reindex.format(wait_interval, max_wait, source, dest)) test = clicktest.CliRunner() result = test.invoke( curator.cli, [ '--config', self.args['configfile'], self.args['actionfile'] ], ) self.assertEqual(expected, curator.get_indices(self.client)[0])
def test_retention_from_name_months(self): # Test extraction of unit_count from index name # Create indices for 10 months with retention time of 2 months in index name # Expected: 8 oldest indices are deleted, 2 remain self.args['prefix'] = 'logstash_2_' self.args['time_unit'] = 'months' self.create_indices(10) self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.delete_pattern_proto.format('age', 'name', 'older', '\'%Y.%m\'', 'months', -1, '_([0-9]+)_', ' ', ' ', ' ')) test = clicktest.CliRunner() result = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) self.assertEquals(2, len(curator.get_indices(self.client)))
def test_main(): table = [ Data(args=['--version'], mocked=[], stdout=f'zabbixctl 0.1.19\n'), Data(args=['--dry-run', 'hosts', 'list'], mocked=[ ('zabbix_controller.hosts.command.get_hosts', []), ], stdout=f'{json.dumps({"message": "There is not host."})}\n'), Data(args=['--dry-run', 'hosts', 'list'], mocked=[ ('zabbix_controller.hosts.command.get_hosts', [{ 'name': 'hhh' }]), ], stdout=f'{json.dumps({"hosts": [{"name": "hhh"}]})}\n'), Data(args=['--dry-run', 'hosts', 'graphs', 'list'], mocked=[ ('zabbix_controller.hosts.command.get_hosts', [{ 'host': 'hhh' }]), ('zabbix_controller.hosts.graphs.get_graphs', [{ 'name': 'hhh_graph' }]), ], stdout=f'{json.dumps({"graphs": [{"name": "hhh_graph"}]})}\n'), ] for data in table: with ExitStack() as stack: ms = [ stack.enter_context(mock.patch(d[0], return_value=d[1])) for d in data.mocked ] with mock.patch('zabbix_controller.cli.zabbix_auth', return_value=object): runner = testing.CliRunner() result = runner.invoke(zabbix_controller.cli.main, data.args) assert result.exit_code == 0 assert result.output == data.stdout
def test_policies_satisfied(self): """Assert correct behavior when the policies enforced by Greenwave are satisfied""" runner = testing.CliRunner() update = self.db.query(models.Update).all()[0] update.status = models.UpdateStatus.testing self.db.commit() with patch('bodhi.server.scripts.check_policies.greenwave_api_post' ) as mock_greenwave: greenwave_response = { 'policies_satisified': True, 'summary': 'All tests passed', 'applicable_policies': ['taskotron_release_critical_tasks'], 'unsatisfied_requirements': [] } mock_greenwave.return_value = greenwave_response result = runner.invoke(check_policies.check, []) self.assertEqual(result.exit_code, 0) update = self.db.query( models.Update).filter(models.Update.id == update.id).one() self.assertEqual(update.test_gating_status, models.TestGatingStatus.passed) self.assertEqual(update.greenwave_summary_string, 'All tests passed') expected_query = { 'product_version': 'fedora-17', 'decision_context': 'bodhi_update_push_stable', 'subject': [{ 'item': u'bodhi-2.0-1.fc17', 'type': 'koji_build' }, { 'original_spec_nvr': u'bodhi-2.0-1.fc17' }, { 'item': u'FEDORA-2017-a3bbe1a8f2', 'type': 'bodhi_update' }] } mock_greenwave.assert_called_once_with( config['greenwave_api_url'] + '/decision', expected_query)
def test_allow_ilm_indices_false(self): # ILM will not be added until 6.6 if curator.get_version(self.client) < (6, 6, 0): self.assertTrue(True) else: import requests name = 'test' policy = { 'policy': { 'phases': { 'hot': { 'min_age': '0ms', 'actions': { 'rollover': { 'max_age': '2h', 'max_docs': 4 } } } } } } url = 'http://{0}:{1}/_ilm/policy/{2}'.format(host, port, name) r = requests.put(url, json=policy) # print(r.text) # logging reminder self.create_indices(10, ilm_policy=name) self.write_config(self.args['configfile'], testvars.client_config.format(host, port)) self.write_config( self.args['actionfile'], testvars.ilm_delete_proto.format('age', 'name', 'older', '\'%Y.%m.%d\'', 'days', 5, ' ', ' ', ' ', 'false')) test = clicktest.CliRunner() _ = test.invoke( curator.cli, ['--config', self.args['configfile'], self.args['actionfile']], ) self.assertEquals( 10, len(exclude_ilm_history(curator.get_indices(self.client))))
def test_policies_unsatisfied(self): """Assert correct behavior when the policies enforced by Greenwave are unsatisfied""" runner = testing.CliRunner() update = self.db.query(models.Update).all()[0] update.status = models.UpdateStatus.testing self.db.commit() with patch('bodhi.server.models.util.greenwave_api_post') as mock_greenwave: greenwave_response = { 'policies_satisfied': False, 'summary': '1 of 2 tests are failed', 'applicable_policies': ['taskotron_release_critical_tasks'], 'unsatisfied_requirements': [ {u'testcase': u'dist.rpmdeplint', u'item': {u'item': u'glibc-1.0-1.f26', u'type': u'koji_build'}, u'type': u'test-result-missing', u'scenario': None}, {u'testcase': u'dist.rpmdeplint', u'item': {u'original_spec_nvr': u'glibc-1.0-1.f26'}, u'type': u'test-result-missing', u'scenario': None}, {u'testcase': u'dist.rpmdeplint', u'item': {u'item': update.alias, u'type': u'bodhi_update'}, u'type': u'test-result-missing', u'scenario': None}]} mock_greenwave.return_value = greenwave_response result = runner.invoke(check_policies.check, []) self.assertEqual(result.exit_code, 0) update = self.db.query(models.Update).filter(models.Update.id == update.id).one() self.assertEqual(update.test_gating_status, models.TestGatingStatus.failed) # Check for the comment expected_comment = u"This update test gating status has been changed to 'failed'." self.assertEqual(update.comments[-1].text, expected_comment) expected_query = { 'product_version': 'fedora-17', 'decision_context': 'bodhi_update_push_stable', 'subject': [ {'item': u'bodhi-2.0-1.fc17', 'type': 'koji_build'}, {'original_spec_nvr': u'bodhi-2.0-1.fc17'}, {'item': u'FEDORA-{}-a3bbe1a8f2'.format(datetime.datetime.utcnow().year), 'type': 'bodhi_update'}], 'verbose': True } mock_greenwave.assert_called_once_with(config['greenwave_api_url'] + '/decision', expected_query)
def setUp(self): super().setUp() # Change the encoding for Click since Python 3 is configured to use ASCII as # encoding for the environment. if codecs.lookup(locale.getpreferredencoding()).name == 'ascii': os.environ['LANG'] = 'en_US.utf-8' # Setup beam_home in a temp directory self._home = self.tmp_dir self._beam_home = os.path.join(self._home, 'beam') self.enter_context( test_case_utils.override_env_var('BEAM_HOME', self._beam_home)) self.enter_context(test_case_utils.override_env_var( 'HOME', self._home)) # Testdata path. self._testdata_dir = os.path.join( os.path.dirname(os.path.dirname(__file__)), 'testdata') # Copy data. chicago_taxi_pipeline_dir = os.path.join( os.path.dirname( os.path.dirname( os.path.dirname(os.path.dirname( os.path.abspath(__file__))))), 'examples', 'chicago_taxi_pipeline', '') data_dir = os.path.join(chicago_taxi_pipeline_dir, 'data', 'simple') content = fileio.listdir(data_dir) assert content, 'content in {} is empty'.format(data_dir) target_data_dir = os.path.join(self._home, 'taxi', 'data', 'simple') io_utils.copy_dir(data_dir, target_data_dir) assert fileio.isdir(target_data_dir) content = fileio.listdir(target_data_dir) assert content, 'content in {} is {}'.format(target_data_dir, content) io_utils.copy_file( os.path.join(chicago_taxi_pipeline_dir, 'taxi_utils.py'), os.path.join(self._home, 'taxi', 'taxi_utils.py')) # Initialize CLI runner. self.runner = click_testing.CliRunner()
def test_bad_password(mock_download_logs, plugin_config_file): engine = "engine" user = "******" password = "******" directory = os.getcwd() mock_download_logs.side_effect = exceptions.HttpError( 401, { "type": "APIError", "details": "Invalid username or password.", "action": "Try with a different set of credentials.", "id": "exception.webservices.login.failed", }, ) runner = click_testing.CliRunner() result = runner.invoke( cli.delphix_sdk, [ "download-logs", "-e", engine, "-c", plugin_config_file, "-u", user, "--password", password, "-d", directory, ], ) assert result.exit_code == 1 assert result.output == ( "API request failed with HTTP Status 401" "\nDetails: Invalid username or password." "\nAction: Try with a different set of credentials." "\n") mock_download_logs.assert_called_once_with(engine, plugin_config_file, user, password, directory)