def test_dir_returned(_): c = InsightsConfig(output_dir='test') r = {} # rm_conf d = DataCollector(c) ret = d.done(c, r) d.archive.create_tar_file.assert_not_called() assert ret == d.archive.archive_dir
def test_omit_symbolic_name(InsightsCommand, InsightsFile, parse_file_spec): """ Files/commands are omitted based on their symbolic name in uploader.json """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'files': [{ "file": "/etc/pam.d/vsftpd", "pattern": [], "symbolic_name": "vsftpd" }], 'commands': [{ "command": "/sbin/chkconfig --list", "pattern": [], "symbolic_name": "chkconfig" }], 'pre_commands': [] } rm_conf = {'files': ["vsftpd"], "commands": ["chkconfig"]} data_collector.run_collection(collection_rules, rm_conf, {}, '') parse_file_spec.assert_not_called() InsightsFile.assert_not_called() InsightsCommand.assert_not_called()
def test_read_pidfile_called(read_pidfile): ''' Pidfile is read when collection starts ''' dc = DataCollector(MagicMock(display_name=None)) dc.run_collection({'commands': [], 'files': []}, None, None, '') read_pidfile.assert_called_once()
def test_redact_exclude_none(_process_content_redaction): ''' Verify that the _process_content_redaction call is made with exclude == None and regex == False when the patterns key is defined but value is an empty dict ''' conf = InsightsConfig() arch = InsightsArchive(conf) arch.create_archive_dir() # put something in the archive to redact test_file = os.path.join(arch.archive_dir, 'test.file') with open(test_file, 'w') as t: t.write(test_file_data) dc = DataCollector(conf, arch) rm_conf = {'patterns': {}} if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True): dc.redact(rm_conf) _process_content_redaction.assert_called_once_with(test_file, None, False)
def test_symbolic_name_bc(_, InsightsArchive, InsightsFile, InsightsCommand): """ WICKED EDGE CASE: in case uploader.json is old and doesn't have symbolic names, don't crash """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'files': [{ "file": "/etc/pam.d/vsftpd", "pattern": [] }], 'commands': [{ "command": "/sbin/chkconfig --list", "pattern": [] }], 'pre_commands': [] } rm_conf = {'files': ["vsftpd"], "commands": ["chkconfig"]} data_collector.run_collection(collection_rules, rm_conf, {}, {}) InsightsFile.assert_called_once() InsightsCommand.assert_called_once() InsightsArchive.return_value.add_to_archive.assert_has_calls( [call(InsightsFile.return_value), call(InsightsCommand.return_value)], any_order=True)
def test_redact_exclude_no_regex(_process_content_redaction): ''' Verify that the _process_content_redaction call is made with exclude == list of strings and regex == False when a list of pattern strings is defined in rm_conf ''' conf = InsightsConfig() arch = InsightsArchive(conf) arch.create_archive_dir() # put something in the archive to redact test_file = os.path.join(arch.archive_dir, 'test.file') with open(test_file, 'w') as t: t.write(test_file_data) dc = DataCollector(conf, arch) rm_conf = {'patterns': ['1234', 'abcd']} if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True): dc.redact(rm_conf) _process_content_redaction.assert_called_once_with(test_file, ['1234', 'abcd'], False)
def test_redact_call_process_redaction(_process_content_redaction): ''' Verify that redact() calls _process_content_redaction then writes the returned data back to the same file Also verifies that the "exclude" parameter is None and the "regex" parameter is False in the _process_content_redaction call when rm_conf is empty ''' conf = InsightsConfig() arch = InsightsArchive(conf) arch.create_archive_dir() # put something in the archive to redact test_file = os.path.join(arch.archive_dir, 'test.file') with open(test_file, 'w') as t: t.write(test_file_data) dc = DataCollector(conf, arch) rm_conf = {} if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True) as mock_open: dc.redact(rm_conf) _process_content_redaction.assert_called_once_with(test_file, None, False) mock_open.assert_called_once_with(test_file, 'wb') mock_open.return_value.__enter__.return_value.write.assert_called_once_with(_process_content_redaction.return_value)
def test_archive_returned(_): c = InsightsConfig() r = {} # rm_conf d = DataCollector(c) ret = d.done(c, r) d.archive.create_tar_file.assert_called_once() assert ret == d.archive.create_tar_file.return_value
def test_omit_after_parse_command(InsightsCommand, run_pre_command): """ Files are omitted based on the expanded paths of the uploader.json path """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = {'commands': [{"command": "/sbin/ethtool -i", "pattern": [], "pre_command": "iface", "symbolic_name": "ethtool"}], 'files': [], "pre_commands": {"iface": "/sbin/ip -o link | awk -F ': ' '/.*link\\/ether/ {print $2}'"}} rm_conf = {'commands': ["/sbin/ethtool -i eth0"]} data_collector.run_collection(collection_rules, rm_conf, {}) InsightsCommand.assert_not_called()
def test_soscleaner_archive_returned(_, soscleaner): ''' Test that SOSCleaner is enabled when obfuscate=True, and returns an archive by default ''' c = InsightsConfig(obfuscate=True) r = {'keywords': ['test']} d = DataCollector(c) ret = d.done(c, r) soscleaner.assert_called_once() soscleaner.return_value.clean_report.assert_called_once() assert ret == soscleaner.return_value.archive_path
def test_soscleaner_dir_returned(_, soscleaner): ''' Test that SOSCleaner returns a directory when output_dir is specified. ''' c = InsightsConfig(obfuscate=True, output_dir='test') r = {'keywords': ['test']} d = DataCollector(c) ret = d.done(c, r) soscleaner.assert_called_once() soscleaner.return_value.clean_report.assert_called_once() assert ret == soscleaner.return_value.dir_path
def test_redact_called_classic(redact): ''' Verify that redact is always called during classic collection ''' conf = InsightsConfig() upload_conf = {'commands': [], 'files': [], 'globs': []} rm_conf = {'test': 'test'} branch_info = {'test1': 'test2'} blacklist_report = {'test3': 'test4'} dc = DataCollector(conf) dc.run_collection(upload_conf, rm_conf, branch_info, blacklist_report) redact.assert_called_once_with(rm_conf)
def test_omit_before_expanded_paths(InsightsFile, parse_file_spec): """ Files are omitted based on representation of exact string matching in uploader.json """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = {'files': [{"file": "/etc/pam.d/vsftpd", "pattern": [], "symbolic_name": "vsftpd"}], 'commands': {}} rm_conf = {'files': ["/etc/pam.d/vsftpd"]} data_collector.run_collection(collection_rules, rm_conf, {}) parse_file_spec.assert_not_called() InsightsFile.assert_not_called()
def test_omit_after_expanded_paths(InsightsFile, parse_file_spec): """ Files are omitted based on the expanded paths of the uploader.json path """ c = InsightsConfig() data_collector = DataCollector(c) collection_rules = {'files': [{"file": "/etc/yum.repos.d/()*.*\\.repo", "pattern": [], "symbolic_name": "yum_repos_d"}], 'commands': {}} rm_conf = {'files': ["/etc/yum/repos.d/test.repo"]} data_collector.run_collection(collection_rules, rm_conf, {}) parse_file_spec.assert_called_once() InsightsFile.assert_not_called()
def test_cmd_blacklist(): config, pconn = collect_args() dc = DataCollector(config) assert dc._blacklist_check('rm') assert dc._blacklist_check('reboot') assert dc._blacklist_check('kill') assert dc._blacklist_check('shutdown') assert dc._blacklist_check('echo ""; shutdown') assert dc._blacklist_check('/bin/bash -c "rm -rf /"') assert dc._blacklist_check('echo ""; /bin/bash -c "rm -rf /"; reboot')
def test_redact_call_walk(walk): ''' Verify that redact() calls os.walk and when an an archive structure is present in /var/tmp/**/insights-* ''' conf = InsightsConfig() arch = InsightsArchive(conf) arch.create_archive_dir() dc = DataCollector(conf, arch) rm_conf = {} dc.redact(rm_conf) walk.assert_called_once_with(arch.archive_dir)
def test_redact_call_walk_core(walk): ''' Verify that redact() calls os.walk and when an an archive structure is present in /var/tmp/**/insights-* With core collection, /data is added to the path ''' conf = InsightsConfig(core_collect=True) arch = InsightsArchive(conf) arch.create_archive_dir() dc = DataCollector(conf, arch) rm_conf = {} dc.redact(rm_conf) walk.assert_called_once_with(os.path.join(arch.archive_dir, 'data'))
def test_run_collection_logs_skipped_commands_by_symbolic_name(warn): c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'commands': [{ 'command': '/bin/date', 'pattern': [], 'symbolic_name': 'date' }], 'files': [], 'globs': [] } rm_conf = {'commands': ["date"]} data_collector.run_collection(collection_rules, rm_conf, {}, '') warn.assert_called_once_with("WARNING: Skipping command %s", "/bin/date")
def test_redact_bad_location(_process_content_redaction, walk): ''' Verify that redact() raises a RuntimeError if the directory present in InsightsArchive is in a location other than /var/tmp/**/insights-* ''' conf = InsightsConfig() arch = InsightsArchive(conf) for bad_path in ['/', '/home', '/etc', '/var/log/', '/home/test', '/var/tmp/f22D1d/ins2ghts']: arch.archive_dir = bad_path dc = DataCollector(conf, arch) rm_conf = {} with pytest.raises(RuntimeError): dc.redact(rm_conf) walk.assert_not_called() _process_content_redaction.assert_not_called()
def test_run_collection_logs_skipped_files_by_symbolic_name(warn): c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'commands': [], 'files': [{ 'file': '/etc/machine-id', 'pattern': [], 'symbolic_name': 'etc_machine_id' }], 'globs': [] } rm_conf = {'files': ["etc_machine_id"]} data_collector.run_collection(collection_rules, rm_conf, {}, '') warn.assert_called_once_with("WARNING: Skipping file %s", "/etc/machine-id")
def test_run_collection_logs_skipped_globs(warn, parse_glob_spec): c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'commands': [], 'files': [], 'globs': [{ 'glob': '/etc/yum.repos.d/*.repo', 'symbolic_name': 'yum_repos_d', 'pattern': [] }] } rm_conf = {'files': ["/etc/yum.repos.d/test.repo"]} data_collector.run_collection(collection_rules, rm_conf, {}, '') warn.assert_called_once_with("WARNING: Skipping file %s", "/etc/yum.repos.d/test.repo")
def test_run_collection_logs_skipped_files_by_wildcard(warn, parse_file_spec): c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'commands': [], 'files': [{ 'file': '/etc/sysconfig/network-scripts/()*ifcfg-.*', 'pattern': [], 'symbolic_name': 'ifcfg' }], 'globs': [] } rm_conf = {'files': ["/etc/sysconfig/network-scripts/ifcfg-enp0s3"]} data_collector.run_collection(collection_rules, rm_conf, {}, '') warn.assert_called_once_with( "WARNING: Skipping file %s", "/etc/sysconfig/network-scripts/ifcfg-enp0s3")
def test_egg_release_file_read_memory_error(archive, remove): ''' Verify that a memory error on the egg release file read is not fatal. ''' if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True) as mock_open: file_mock = mock.mock_open().return_value file_mock.read.side_effect = MemoryError() mock_open.side_effect = [file_mock] c = InsightsConfig() d = DataCollector(c) d._write_egg_release() remove.assert_called_once_with(constants.egg_release_file) d.archive.add_metadata_to_archive.assert_called_once_with( '', '/egg_release')
def test_egg_release_file_read_and_written_no_read(archive, remove): ''' Verify that when the egg release file cannot be read, a blank string is written to the archive ''' if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' remove.side_effect = OSError('test') with patch(open_name, create=True) as mock_open: mock_open.side_effect = IOError('test') c = InsightsConfig() d = DataCollector(c) d._write_egg_release() remove.assert_called_once_with(constants.egg_release_file) d.archive.add_metadata_to_archive.assert_called_once_with( '', '/egg_release')
def test_egg_release_file_read_and_written(archive, remove): ''' Verify the egg release file is read from file and written to the archive ''' if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True) as mock_open: mock_open.side_effect = [ mock.mock_open(read_data='/testvalue').return_value ] c = InsightsConfig() d = DataCollector(c) d._write_egg_release() remove.assert_called_once_with(constants.egg_release_file) d.archive.add_metadata_to_archive.assert_called_once_with( '/testvalue', '/egg_release')
def test_egg_release_file_write_os_error(archive, remove): ''' Verify that an OS Error (e.g. no space left) on the egg release file write is not fatal - an empty file is written instead. ''' if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True) as mock_open: mock_open.side_effect = [ mock.mock_open(read_data='/testvalue').return_value ] c = InsightsConfig() d = DataCollector(c) d._write_egg_release() remove.assert_called_once_with(constants.egg_release_file) failed_call = call('/testvalue', '/egg_release') rescue_call = call('', '/egg_release') expected_calls = [failed_call, rescue_call] d.archive.add_metadata_to_archive.assert_has_calls(expected_calls)
def test_run_collection_logs_skipped_commands_by_pre_command( warn, parse_command_spec): c = InsightsConfig() data_collector = DataCollector(c) collection_rules = { 'commands': [{ 'command': '/sbin/ethtool', 'pattern': [], 'pre_command': 'iface', 'symbolic_name': 'ethtool' }], 'files': [], 'globs': [], 'pre_commands': { 'iface': '/sbin/ip -o link | awk -F \': \' \'/.*link\\/ether/ {print $2}\'' } } rm_conf = {'commands': ["/sbin/ethtool enp0s3"]} data_collector.run_collection(collection_rules, rm_conf, {}, '') warn.assert_called_once_with("WARNING: Skipping command %s", "/sbin/ethtool enp0s3")