def test_checkin_error(): config = InsightsConfig() client = InsightsClient(config) client.connection = Mock(**{"checkin.side_effect": Exception}) client.session = True with raises(Exception): client.checkin() client.connection.checkin.assert_called_once_with()
def test_set_ansible_host(): conf = InsightsConfig() c = InsightsConnection(conf) c.session.status_code = 200 assert c.set_ansible_host('GO STICK YOUR HEAD IN A PIG') c.session.status_code = 404 assert not c.set_ansible_host('GO STICK YOUR HEAD IN A PIG') c.session.status_code = 500 c.session.text = 'oops' assert not c.set_ansible_host('GO STICK YOUR HEAD IN A PIG')
def test_registration_check_legacy_upload_on(registration_check, InsightsConnection): ''' Check registration when legacy_upload=True ''' config = InsightsConfig(legacy_upload=True) support = InsightsSupport(config) support.collect_support_info() registration_check.assert_called_once()
def test_skip_registration_check_legacy_upload_off(registration_check, InsightsConnection): ''' Don't check registration when legacy_upload=False ''' config = InsightsConfig(legacy_upload=False) support = InsightsSupport(config) support.collect_support_info() registration_check.assert_not_called()
def test_get_diagnosis(): conf = InsightsConfig() c = InsightsConnection(conf) c.session.status_code = 200 assert c.get_diagnosis() == {'big_dumb_error': 'you_done_goofed'} c.session.status_code = 404 assert c.get_diagnosis() is None c.session.status_code = 500 c.session.text = 'oops' assert c.get_diagnosis() is None
def test_get_diagnosis_with_id(): conf = InsightsConfig() c = InsightsConnection(conf) c.session.status_code = 200 assert c.get_diagnosis(TEST_REMEDIATION_ID) == {'specific_dumb_error': 'stop_goofin'} c.session.status_code = 404 assert c.get_diagnosis() is None c.session.status_code = 500 c.session.text = 'oops' assert c.get_diagnosis() is None
def test_copy_to_output_file(shutil_, _copy_soscleaner_files): ''' Test that shutil is called to copy the collection to the specified output file ''' config = InsightsConfig() client = InsightsClient(config) client.copy_to_output_file('test') shutil_.copyfile.assert_called_once() _copy_soscleaner_files.assert_not_called()
def test_register(): config = InsightsConfig(register=True) client = InsightsClient(config) client.connection = FakeConnection() client.session = True assert client.register() is True for r in constants.registered_files: assert os.path.isfile(r) is True for u in constants.unregistered_files: assert os.path.isfile(u) is False
def test_cleanup_tmp(): config = InsightsConfig(keep_archive=True) arch = InsightsArchive(config) arch.cleanup_tmp() assert not os.path.exists(arch.tmp_dir) assert os.path.exists(arch.archive_tmp_dir) config.keep_archive = False arch.cleanup_tmp() assert not os.path.exists(arch.tmp_dir) assert not os.path.exists(arch.archive_tmp_dir)
def test_get_diagnosis_success(get): ''' Verify that fetching a diagnosis without an ID succeeds and returns a dict when HTTP response is valid ''' conf = InsightsConfig() c = InsightsConnection(conf) get.return_value = MockResponse(status_code=200, text="OK", content="{\"test\": \"test\"}") assert c.get_diagnosis() == {"test": "test"}
def test_legacy_upload_systemd(_, path_exists, read_pidfile, systemd_notify, op, wtd): ''' Pidfile is read and systemd-notify is called for legacy upload ''' config = InsightsConfig(legacy_upload=True) config.account_number = '' # legacy registration thing client = InsightsClient(config) client.upload('test.gar.gz', 'test.content.type') read_pidfile.assert_called_once() systemd_notify.assert_called_once_with(read_pidfile.return_value)
def collect_args(*insights_config_args, **insights_config_custom_kwargs): """ Instantiates InsightsConfig with a default logging_file argument. """ all_insights_config_kwargs = { "logging_file": "/tmp/insights.log", "remove_file": conf_remove_file } all_insights_config_kwargs.update(insights_config_custom_kwargs) return InsightsConfig(*insights_config_args, **all_insights_config_kwargs), Mock()
def test_legacy_upload(op, session): ''' Ensure an Insights collected tar upload to legacy occurs with the right URL and params ''' conf = InsightsConfig() c = InsightsConnection(conf) c.upload_archive('testp', 'testct', None) c.session.post.assert_called_with( 'https://' + c.config.base_url + '/uploads/XXXXXXXX', files={'file': ('testp', ANY, 'application/gzip')}, # ANY = return call from mocked open(), acts as filepointer here headers={'x-rh-collection-time': 'None'})
def test_payload_upload(op, session): ''' Ensure a payload upload occurs with the right URL and params ''' conf = InsightsConfig(legacy_upload=False) c = InsightsConnection(conf) c.upload_archive('testp', 'testct', None) c.session.post.assert_called_with( 'https://' + c.config.base_url + '/platform/upload/api/v1/upload', files={'upload': ('testp', ANY, 'testct')}, # ANY = return call from mocked open(), acts as filepointer here headers={})
def test_raw_config_parser(): ''' Ensure that get_rm_conf and json.loads (used to load uploader.json) return the same filename ''' raw_filename = '/etc/yum/pluginconf.d/()*\\\\w+\\\\.conf' uploader_snip = json.loads('{"pattern": [], "symbolic_name": "pluginconf_d", "file": "' + raw_filename + '"}') with open(conf_remove_file, 'w') as rm_conf: rm_conf.write('[remove]\nfiles=' + raw_filename) coll = InsightsUploadConf(InsightsConfig(remove_file=conf_remove_file)) items = coll.get_rm_conf() assert items['files'][0] == uploader_snip['file']
def test_delete_archive_internal(): config = InsightsConfig(keep_archive=True) arch = InsightsArchive() _delete_archive_internal(config, arch) assert os.path.exists(arch.tmp_dir) assert os.path.exists(arch.archive_tmp_dir) config.keep_archive = False _delete_archive_internal(config, arch) assert not os.path.exists(arch.tmp_dir) assert not os.path.exists(arch.archive_tmp_dir)
def test_omit_after_parse_command(InsightsCommand, run_pre_command): """ Files are omitted based on the expanded paths of the uploader.json path """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = {'commands': [{"command": "/sbin/ethtool -i", "pattern": [], "pre_command": "iface", "symbolic_name": "ethtool"}], 'files': [], "pre_commands": {"iface": "/sbin/ip -o link | awk -F ': ' '/.*link\\/ether/ {print $2}'"}} rm_conf = {'commands': ["/sbin/ethtool -i eth0"]} data_collector.run_collection(collection_rules, rm_conf, {}) InsightsCommand.assert_not_called()
def test_soscleaner_archive_returned(_, soscleaner): ''' Test that SOSCleaner is enabled when obfuscate=True, and returns an archive by default ''' c = InsightsConfig(obfuscate=True) r = {'keywords': ['test']} d = DataCollector(c) ret = d.done(c, r) soscleaner.assert_called_once() soscleaner.return_value.clean_report.assert_called_once() assert ret == soscleaner.return_value.archive_path
def test_copy_to_output_file_obfuscate_on(shutil_, _copy_soscleaner_files): ''' Test that shutil is called to copy the collection to the specified output file, and soscleaner copy function is called ''' # obfuscate off, no soscleaner files config = InsightsConfig(obfuscate=True) client = InsightsClient(config) client.copy_to_output_file('test') shutil_.copyfile.assert_called_once() _copy_soscleaner_files.assert_called_once()
def test_inventory_url_from_phase(try_satellite6_configuration, try_satellite5_configuration, get_proxies, init_session, config_kwargs): """ Inventory URL is composed correctly from the default configuration. """ config = InsightsConfig(**config_kwargs) config.load_all() # Disables legacy upload. try_auto_configuration( config) # Updates base_url if legacy upload is disabled. connection = InsightsConnection(config) assert connection.inventory_url == "https://cert-api.access.redhat.com/r/insights/platform/inventory/v1"
def test_redact_called_classic(redact): ''' Verify that redact is always called during classic collection ''' conf = InsightsConfig() upload_conf = {'commands': [], 'files': [], 'globs': []} rm_conf = {'test': 'test'} branch_info = {'test1': 'test2'} blacklist_report = {'test3': 'test4'} dc = DataCollector(conf) dc.run_collection(upload_conf, rm_conf, branch_info, blacklist_report) redact.assert_called_once_with(rm_conf)
def test_soscleaner_dir_returned(_, soscleaner): ''' Test that SOSCleaner returns a directory when output_dir is specified. ''' c = InsightsConfig(obfuscate=True, output_dir='test') r = {'keywords': ['test']} d = DataCollector(c) ret = d.done(c, r) soscleaner.assert_called_once() soscleaner.return_value.clean_report.assert_called_once() assert ret == soscleaner.return_value.dir_path
def test_omit_before_expanded_paths(InsightsFile, parse_file_spec): """ Files are omitted based on representation of exact string matching in uploader.json """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = {'files': [{"file": "/etc/pam.d/vsftpd", "pattern": [], "symbolic_name": "vsftpd"}], 'commands': {}} rm_conf = {'files': ["/etc/pam.d/vsftpd"]} data_collector.run_collection(collection_rules, rm_conf, {}) parse_file_spec.assert_not_called() InsightsFile.assert_not_called()
def test_omit_after_expanded_paths(InsightsFile, parse_file_spec): """ Files are omitted based on the expanded paths of the uploader.json path """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = {'files': [{"file": "/etc/yum.repos.d/()*.*\\.repo", "pattern": [], "symbolic_name": "yum_repos_d"}], 'commands': {}} rm_conf = {'files': ["/etc/yum/repos.d/test.repo"]} data_collector.run_collection(collection_rules, rm_conf, {}) parse_file_spec.assert_called_once() InsightsFile.assert_not_called()
def test_redact_called_core(redact): ''' Verify that redact is always called during core collection ''' conf = InsightsConfig(core_collect=True) upload_conf = None rm_conf = {'test': 'test'} branch_info = {'test1': 'test2'} blacklist_report = {'test3': 'test4'} dc = CoreCollector(conf) dc.run_collection(upload_conf, rm_conf, branch_info, blacklist_report) redact.assert_called_once_with(rm_conf)
def test_dont_archive_when_missing_dep(write_data_to_file): """ If missing dependencies do not archive it """ arch = InsightsArchive(InsightsConfig()) cmd = MagicMock(spec=InsightsCommand) cmd.get_output.return_value = "Missing Dependencies:" cmd.archive_path = '/path/to/command' arch.add_to_archive(cmd) write_data_to_file.assert_not_called()
def test_command_line_parse_twice(): ''' Verify that running _load_command_line() twice does not raise an argparse error. Previously would raise a SystemExit due to argparse not being loaded with the correct options. ''' c = InsightsConfig() c._load_command_line() assert c.status c._load_command_line() assert c.status
def test_version(): # Hack to prevent client from parsing args to py.test tmp = sys.argv sys.argv = [] try: config = InsightsConfig(logging_file='/tmp/insights.log') client = InsightsClient(config) result = client.version() assert result == "%s-%s" % (package_info["VERSION"], package_info["RELEASE"]) finally: sys.argv = tmp
def test_upload_412_no_retry(upload_archive, handle_fail_rcs): # Hack to prevent client from parsing args to py.test tmp = sys.argv sys.argv = [] try: config = InsightsConfig(logging_file='/tmp/insights.log', retries=3) client = InsightsClient(config) client.upload('/tmp/insights.tar.gz') upload_archive.assert_called_once() finally: sys.argv = tmp
def test_reg_check_unregistered(): # unregister the machine first config = InsightsConfig() client = InsightsClient(config) client.connection = FakeConnection(registered='unregistered') client.session = True # test function and integration in .register() assert client.get_registation_status()['status'] is False assert client.register() is False for r in constants.registered_files: assert os.path.isfile(r) is False for u in constants.unregistered_files: assert os.path.isfile(u) is True