def test_implied_non_legacy_upload(config): ''' Some arguments should always imply legacy_upload=False. ''' c = InsightsConfig(**config) c.load_all() assert c.legacy_upload is False
def test_output_dir_file_cant_use_both(): ''' Cannot supply both --output-file and --output-dir ''' with pytest.raises(ValueError): c = InsightsConfig(output_dir='test', output_file='test') c.load_all()
def test_diagnosis_implies_legacy(): ''' --diagnosis should always imply legacy_upload=False ''' c = InsightsConfig(diagnosis=True) c.load_all() assert c.legacy_upload is False
def test_config_load_legacy_ignored(open_): open_.return_value = TextIOWrapper( BytesIO(b'[insights-client]\nusername=CASVAL\n' b'[redhat-access-insights]\nusername=SAYLA')) c = InsightsConfig() c._load_config_file() assert c.username == 'CASVAL'
def test_config_load_section_error(open_): # defaults on incorrect conf open_.return_value = TextIOWrapper( BytesIO(b'aFUHAEFJhFhlAFJKhnfjeaf\nusername=RAMBA')) c = InsightsConfig() c._load_config_file() assert c.username == DEFAULT_OPTS['username']['default']
def test_config_load_value_error(open_): # defaults on incorrect conf open_.return_value = TextIOWrapper( BytesIO(b'[insights-client]\nhttp_timeout=ZGOK')) c = InsightsConfig() c._load_config_file() assert c.http_timeout == DEFAULT_OPTS['http_timeout']['default']
def test_to_json_quiet_implies_diagnosis(): ''' --diagnosis should always imply legacy_upload=False ''' c = InsightsConfig(to_json=True, quiet=True) c.load_all() assert c.diagnosis is True assert c.legacy_upload is False
def test_cleanup_tmp(): config = InsightsConfig(keep_archive=True) arch = InsightsArchive(config) arch.cleanup_tmp() assert not os.path.exists(arch.tmp_dir) assert os.path.exists(arch.archive_tmp_dir) config.keep_archive = False arch.cleanup_tmp() assert not os.path.exists(arch.tmp_dir) assert not os.path.exists(arch.archive_tmp_dir)
def test_legacy_upload_systemd(_, path_exists, read_pidfile, systemd_notify, op, wtd): ''' Pidfile is read and systemd-notify is called for legacy upload ''' config = InsightsConfig(legacy_upload=True) config.account_number = '' # legacy registration thing client = InsightsClient(config) client.upload('test.gar.gz', 'test.content.type') read_pidfile.assert_called_once() systemd_notify.assert_called_once_with(read_pidfile.return_value)
def test_delete_archive_internal(): config = InsightsConfig(keep_archive=True) arch = InsightsArchive() _delete_archive_internal(config, arch) assert os.path.exists(arch.tmp_dir) assert os.path.exists(arch.archive_tmp_dir) config.keep_archive = False _delete_archive_internal(config, arch) assert not os.path.exists(arch.tmp_dir) assert not os.path.exists(arch.archive_tmp_dir)
def test_validate_remove_file(): tf = '/tmp/remove.cfg' with open(tf, 'wb') as f: f.write(remove_file_content) assert util.validate_remove_file( InsightsConfig(remove_file='/tmp/boop')) is False os.chmod(tf, 0o644) assert util.validate_remove_file(InsightsConfig(remove_file=tf)) is False os.chmod(tf, 0o600) assert util.validate_remove_file( InsightsConfig(remove_file=tf)) is not False os.remove(tf)
def test_offline_disables_options(): ''' Can't use certain options in conjunction with --offline ''' with pytest.raises(ValueError): InsightsConfig(to_json=True, offline=True) with pytest.raises(ValueError): InsightsConfig(test_connection=True, offline=True) with pytest.raises(ValueError): InsightsConfig(status=True, offline=True)
def test_inventory_url_from_phase(try_satellite6_configuration, try_satellite5_configuration, get_proxies, init_session, config_kwargs): """ Inventory URL is composed correctly from the default configuration. """ config = InsightsConfig(**config_kwargs) config.load_all() # Disables legacy upload. try_auto_configuration( config) # Updates base_url if legacy upload is disabled. connection = InsightsConnection(config) assert connection.inventory_url == "https://cert-api.access.redhat.com/r/insights/platform/inventory/v1"
def test_dir_returned(_): c = InsightsConfig(output_dir='test') r = {} # rm_conf d = DataCollector(c) ret = d.done(c, r) d.archive.create_tar_file.assert_not_called() assert ret == d.archive.archive_dir
def test_redact_exclude_none(_process_content_redaction): ''' Verify that the _process_content_redaction call is made with exclude == None and regex == False when the patterns key is defined but value is an empty dict ''' conf = InsightsConfig() arch = InsightsArchive(conf) arch.create_archive_dir() # put something in the archive to redact test_file = os.path.join(arch.archive_dir, 'test.file') with open(test_file, 'w') as t: t.write(test_file_data) dc = DataCollector(conf, arch) rm_conf = {'patterns': {}} if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True): dc.redact(rm_conf) _process_content_redaction.assert_called_once_with(test_file, None, False)
def test_ansible_host_no_reg_forces_legacy_false(): ''' When not specifying --register, using --ansible-host on the CLI forces legacy_upload to False ''' conf = InsightsConfig(register=False, ansible_host="test", legacy_upload=True) conf._cli_opts = ["ansible_host"] conf._imply_options() assert not conf.legacy_upload conf = InsightsConfig(register=False, ansible_host="test", legacy_upload=False) conf._cli_opts = ["ansible_host"] conf._imply_options() assert not conf.legacy_upload
def insights_client(): config = InsightsConfig(http_timeout=123) client = InsightsClient(config) client.session = Mock( **{"get.return_value.headers.items.return_value": []}) client.connection = Mock(base_url="http://www.example.com/") return client
def test_ansible_host_reg_legacy_no_change(): ''' When specifying --register, using --ansible-host on the CLI does not affect legacy_upload ''' conf = InsightsConfig(register=True, ansible_host="test", legacy_upload=True) conf._cli_opts = ["ansible_host"] conf._imply_options() assert conf.legacy_upload conf = InsightsConfig(register=True, ansible_host="test", legacy_upload=False) conf._cli_opts = ["ansible_host"] conf._imply_options() assert not conf.legacy_upload
def test_omit_symbolic_name(InsightsCommand, InsightsFile, parse_file_spec): """ Files/commands are omitted based on their symbolic name in uploader.json """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'files': [{ "file": "/etc/pam.d/vsftpd", "pattern": [], "symbolic_name": "vsftpd" }], 'commands': [{ "command": "/sbin/chkconfig --list", "pattern": [], "symbolic_name": "chkconfig" }], 'pre_commands': [] } rm_conf = {'files': ["vsftpd"], "commands": ["chkconfig"]} data_collector.run_collection(collection_rules, rm_conf, {}, '') parse_file_spec.assert_not_called() InsightsFile.assert_not_called() InsightsCommand.assert_not_called()
def test_symbolic_name_bc(_, InsightsArchive, InsightsFile, InsightsCommand): """ WICKED EDGE CASE: in case uploader.json is old and doesn't have symbolic names, don't crash """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'files': [{ "file": "/etc/pam.d/vsftpd", "pattern": [] }], 'commands': [{ "command": "/sbin/chkconfig --list", "pattern": [] }], 'pre_commands': [] } rm_conf = {'files': ["vsftpd"], "commands": ["chkconfig"]} data_collector.run_collection(collection_rules, rm_conf, {}, {}) InsightsFile.assert_called_once() InsightsCommand.assert_called_once() InsightsArchive.return_value.add_to_archive.assert_has_calls( [call(InsightsFile.return_value), call(InsightsCommand.return_value)], any_order=True)
def test_archive_returned(_): c = InsightsConfig() r = {} # rm_conf d = DataCollector(c) ret = d.done(c, r) d.archive.create_tar_file.assert_called_once() assert ret == d.archive.create_tar_file.return_value
def test_config_conflicts(): ''' Ensure --payload requires --content-type ''' with raises(ValueError) as v: InsightsConfig(payload='aaa') assert str(v.value) == '--payload requires --content-type'
def test_redact_call_process_redaction(_process_content_redaction): ''' Verify that redact() calls _process_content_redaction then writes the returned data back to the same file Also verifies that the "exclude" parameter is None and the "regex" parameter is False in the _process_content_redaction call when rm_conf is empty ''' conf = InsightsConfig() arch = InsightsArchive(conf) arch.create_archive_dir() # put something in the archive to redact test_file = os.path.join(arch.archive_dir, 'test.file') with open(test_file, 'w') as t: t.write(test_file_data) dc = DataCollector(conf, arch) rm_conf = {} if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True) as mock_open: dc.redact(rm_conf) _process_content_redaction.assert_called_once_with(test_file, None, False) mock_open.assert_called_once_with(test_file, 'wb') mock_open.return_value.__enter__.return_value.write.assert_called_once_with(_process_content_redaction.return_value)
def test_payload_upload(op, post, c, _legacy_upload_archive): ''' Ensure a payload upload occurs with the right URL and params ''' conf = InsightsConfig(legacy_upload=False) c = InsightsConnection(conf) c.upload_archive('testp', 'testct', None) post.assert_called_with( c.base_url + '/ingress/v1/upload', files={ 'file': ( 'testp', ANY, 'testct' ), # ANY = return call from mocked open(), acts as filepointer here 'metadata': json.dumps({ 'test': 'facts', 'branch_info': { 'remote_branch': -1, 'remote_leaf': -1 }, 'satellite_id': -1, }) }, headers={}) _legacy_upload_archive.assert_not_called()
def test_redact_exclude_no_regex(_process_content_redaction): ''' Verify that the _process_content_redaction call is made with exclude == list of strings and regex == False when a list of pattern strings is defined in rm_conf ''' conf = InsightsConfig() arch = InsightsArchive(conf) arch.create_archive_dir() # put something in the archive to redact test_file = os.path.join(arch.archive_dir, 'test.file') with open(test_file, 'w') as t: t.write(test_file_data) dc = DataCollector(conf, arch) rm_conf = {'patterns': ['1234', 'abcd']} if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True): dc.redact(rm_conf) _process_content_redaction.assert_called_once_with(test_file, ['1234', 'abcd'], False)
def test_platform_upload(_legacy_upload, path_exists): ''' _legacy_upload not called when platform upload ''' config = InsightsConfig(legacy_upload=False) client = InsightsClient(config) client.upload('test.gar.gz', 'test.content.type') _legacy_upload.assert_not_called()
def test_reg_check_unregistered_unreachable(): # unregister the machine first config = InsightsConfig(unregister=True) client = InsightsClient(config) client.connection = FakeConnection(registered=True) client.session = True assert client.unregister() is True # reset config and try to check registration config.unregister = False client.connection = FakeConnection(registered=False) assert client.get_registation_status()['unreachable'] is True assert client.register() is None for r in constants.registered_files: assert os.path.isfile(r) is False for u in constants.unregistered_files: assert os.path.isfile(u) is True
def test_legacy_unregister(handle_unregistration): ''' handle_unregistration called when legacy upload ''' config = InsightsConfig(legacy_upload=True) client = InsightsClient(config) client.unregister() handle_unregistration.assert_called_once()
def test_platform_unregister_skip(handle_unregistration): ''' handle_registration not called when platform upload ''' config = InsightsConfig(legacy_upload=False) client = InsightsClient(config) assert client.unregister() # short circuits to True handle_unregistration.assert_not_called()
def test_legacy_upload(_legacy_upload, path_exists): ''' _legacy_upload called when legacy upload ''' config = InsightsConfig(legacy_upload=True) client = InsightsClient(config) client.upload('test.gar.gz', 'test.content.type') _legacy_upload.assert_called_once()