def testFindDisksToCopy(self, mock_StartAnalysisVm, mock_get_instance, mock_list_disks, mock_get_disk, mock_GetBootDisk, mock_GoogleComputeBaseResource): """Tests the FindDisksToCopy function with different SetUp() calls.""" test_state = state.DFTimewolfState(config.Config) gcloud_collector = gcloud.GoogleCloudCollector(test_state) mock_StartAnalysisVm.return_value = (mock_GoogleComputeBaseResource, None) mock_list_disks.return_value = ['bootdisk', 'disk1'] mock_get_disk.side_effect = ReturnFakeDisk mock_get_instance.return_value = FAKE_INSTANCE mock_GetBootDisk.return_value = FAKE_BOOT_DISK # Nothing is specified, GoogleCloudCollector should collect the instance's # boot disk gcloud_collector.SetUp( 'test-analysis-project-name', 'test-target-project-name', 'fake_incident_id', 'fake_zone', 42.0, 16, remote_instance_name='my-owned-instance', ) disks = gcloud_collector._FindDisksToCopy() self.assertEqual(len(disks), 1) self.assertEqual(disks[0].name, 'bootdisk') mock_GetBootDisk.assert_called_once() # Specifying all_disks should return all disks for the instance # (see mock_list_disks return value) gcloud_collector.SetUp('test-analysis-project-name', 'test-target-project-name', 'fake_incident_id', 'fake_zone', 42.0, 16, remote_instance_name='my-owned-instance', all_disks=True) disks = gcloud_collector._FindDisksToCopy() self.assertEqual(len(disks), 2) self.assertEqual(disks[0].name, 'bootdisk') self.assertEqual(disks[1].name, 'disk1') # If a list of disks is passed, that disk only should be returned gcloud_collector.SetUp('test-analysis-project-name', 'test-target-project-name', 'fake_incident_id', 'fake_zone', 42.0, 16, remote_instance_name='my-owned-instance', disk_names='disk1') disks = gcloud_collector._FindDisksToCopy() self.assertEqual(len(disks), 1) self.assertEqual(disks[0].name, 'disk1')
def testSetupError(self, mock_subprocess_call): """Tests that the specified directory is used if created.""" mock_subprocess_call.return_value = -1 test_state = state.DFTimewolfState(config.Config) scp_exporter = scp_ex.SCPExporter(test_state) scp_exporter.SetUp('/path1,/path2', '/destination', 'fakeuser', 'fakehost', 'fakeid', True) self.assertEqual(test_state.errors[0], ('Unable to connect to host.', True))
def testOutput(self, mock_exists): """Tests that the module ouput is consistent with the input.""" test_state = state.DFTimewolfState(config.Config) filesystem_collector = filesystem.FilesystemCollector(test_state) fake_paths = '/fake/path/1,/fake/path/2' filesystem_collector.setup(paths=fake_paths) mock_exists.return_value = True filesystem_collector.process() expected_output = [('1', '/fake/path/1'), ('2', '/fake/path/2')] self.assertEqual(test_state.output, expected_output)
def testSetupManualDir(self, mock_makedirs): """Tests that the specified directory is used if created.""" mock_makedirs.return_value = True test_state = state.DFTimewolfState(config.Config) local_filesystem_copy = local_filesystem.LocalFilesystemCopy( test_state) local_filesystem_copy.SetUp(target_directory='/nonexistent') # pylint: disable=protected-access self.assertEqual(local_filesystem_copy._target_directory, '/nonexistent')
def setUp(self): self.test_state = state.DFTimewolfState(config.Config) self.grr_flow_collector = grr_hosts.GRRFlowCollector(self.test_state) self.grr_flow_collector.setup(host='tomchop', flow_id='F:12345', reason='random reason', grr_server_url='http://fake/endpoint', grr_username='******', grr_password='******', approvers='approver1,approver2')
def setUp(self): self.test_state = state.DFTimewolfState(config.Config) self.grr_hunt_downloader = grr_hunt.GRRHuntDownloader(self.test_state) self.grr_hunt_downloader.SetUp(hunt_id='H:12345', reason='random reason', grr_server_url='http://fake/endpoint', grr_username='******', grr_password='******', approvers='approver1,approver2') self.grr_hunt_downloader.output_path = '/tmp/test'
def setUp(self): self.test_state = state.DFTimewolfState(config.Config) self.grr_flow_module = grr_hosts.GRRFlow(self.test_state) self.grr_flow_module.setup( reason='random reason', grr_server_url='http://fake/endpoint', grr_username='******', grr_password='******', approvers='[email protected],[email protected]', verify=True)
def testPlasoCheckFail(self, mock_exists): """Tests that SetUp fails when no plaso executable is found.""" test_state = state.DFTimewolfState(config.Config) mock_exists.return_value = False local_plaso_processor = localplaso.LocalPlasoProcessor(test_state) with self.assertRaises(errors.DFTimewolfError) as error: local_plaso_processor.SetUp() self.assertEqual( ('log2timeline.py was not found in your PATH. To fix: \n' ' apt install plaso-tools'), error.exception.message)
def testSetup(self, mock_mkdtemp): """Tests that the specified directory is used if created.""" mock_mkdtemp.return_value = '/fake/random' test_state = state.DFTimewolfState(config.Config) local_filesystem_copy = local_filesystem.LocalFilesystemCopy( test_state) local_filesystem_copy.SetUp() # pylint: disable=protected-access self.assertEqual(local_filesystem_copy._target_directory, '/fake/random')
def testPlasoCheck(self, mock_exists): """Tests that a plaso executable is correctly located.""" test_state = state.DFTimewolfState(config.Config) mock_exists.return_value = True local_plaso_processor = localplaso.LocalPlasoProcessor(test_state) # We're testing module internals here. # pylint: disable=protected-access local_plaso_processor._DeterminePlasoPath() self.assertEqual(local_plaso_processor._plaso_path, '/fake/path/log2timeline.py')
def setUp(self): self.test_state = state.DFTimewolfState(config.Config) self.grr_file_collector = grr_hosts.GRRFileCollector(self.test_state) self.grr_file_collector.setup(hosts='tomchop,tomchop2', files='/etc/passwd', use_tsk=True, reason='random reason', grr_server_url='http://fake/endpoint', grr_username='******', grr_password='******', approvers='approver1,approver2')
def testProcess(self, mock_subprocess_call): """Tests that the specified directory is used if created.""" mock_subprocess_call.return_value = 0 test_state = state.DFTimewolfState(config.Config) scp_exporter = scp_ex.SCPExporter(test_state) scp_exporter.SetUp('/path1,/path2', '/destination', 'fakeuser', 'fakehost', 'fakeid', True) scp_exporter.Process() mock_subprocess_call.assert_called_with( ['scp', '/path1', '/path2', 'fakeuser@fakehost:/destination'])
def testCleanup(self, mock_call): """Tests that the SSH CLI is called with the expected arguments.""" mock_call.return_value = 0 test_state = state.DFTimewolfState(config.Config) ssh_multi = ssh_multiplexer.SSHMultiplexer(test_state) ssh_multi.SetUp('fakeuser', 'fakehost', None) ssh_multi.CleanUp() mock_call.assert_called_with([ 'ssh', '-O', 'exit', '-o', 'ControlPath=~/.ssh/ctrl-%C', 'fakehost' ])
def testLoadRecipeWithRuntimeNames(self): """Tests that a recipe can be loaded correctly.""" test_state = state.DFTimewolfState(config.Config) test_state.LoadRecipe(test_recipe.named_modules_contents) # pylint: disable=protected-access self.assertIn('DummyModule1', test_state._module_pool) self.assertIn('DummyModule2', test_state._module_pool) self.assertIn('DummyModule1-2', test_state._module_pool) self.assertIn('DummyModule2-2', test_state._module_pool) self.assertIn('DummyPreflightModule-runtime', test_state._module_pool) self.assertEqual(len(test_state._module_pool), 5)
def testSetup(self): """Tests that no paths specified in setup will generate an error.""" test_state = state.DFTimewolfState(config.Config) filesystem_collector = filesystem.FilesystemCollector(test_state) with self.assertRaises(errors.DFTimewolfError) as error: filesystem_collector.SetUp(paths=None) self.assertEqual( 'No `paths` argument provided in recipe, bailing', error.exception.message) self.assertIsNone(filesystem_collector._paths) # pylint: disable=protected-access
def setUp(self): self.test_state = state.DFTimewolfState(config.Config) self.grr_hunt_file_collector = grr_hunt.GRRHuntFileCollector( self.test_state) self.grr_hunt_file_collector.SetUp( file_path_list='/etc/passwd,/etc/shadow', reason='random reason', grr_server_url='http://fake/endpoint', grr_username='******', grr_password='******', approvers='approver1,approver2')
def testSetupError(self, mock_subprocess_call): """Tests that the specified directory is used if created.""" mock_subprocess_call.return_value = -1 test_state = state.DFTimewolfState(config.Config) scp_exporter = scp_ex.SCPExporter(test_state) with self.assertRaises(errors.DFTimewolfError) as error: scp_exporter.SetUp('/path1,/path2', '/destination', 'fakeuser', 'fakehost', 'fakeid', True) self.assertEqual(test_state.errors[0], error.exception) self.assertEqual(error.exception.message, 'Unable to connect to host.') self.assertTrue(error.exception.critical)
def testStreamingCallbackNotCalled(self, mock_callback): """Tests that registered callbacks are called only on types for which they are registered.""" test_state = state.DFTimewolfState(config.Config) test_state.LoadRecipe(test_recipe.contents) test_state.SetupModules() # DummyModule1's registered StreamingConsumer only consumes Reports, not # TicketAtttributes attributes = containers.TicketAttribute( type_='asd', name='asd', value='asd') test_state.StreamContainer(attributes) mock_callback.assert_not_called()
def testSetupError(self, mock_subprocess_call): """Tests that recipe errors out if connection check fails.""" mock_subprocess_call.return_value = -1 test_state = state.DFTimewolfState(config.Config) scp_exporter = scp_ex.SCPExporter(test_state) with self.assertRaises(errors.DFTimewolfError) as error: scp_exporter.SetUp('/path1,/path2', '/destination', 'fakeuser', 'fakehost', 'fakeid', 'upload', False, True) self.assertEqual(test_state.errors[0], error.exception) self.assertEqual(error.exception.message, 'Unable to connect to fakehost.') self.assertTrue(error.exception.critical)
def testDownloadFilesFromGCS(self, mock_GCSOutputWriter): """Tests _DownloadFilesFromGCS""" test_state = state.DFTimewolfState(config.Config) turbinia_processor = turbinia.TurbiniaProcessor(test_state) local_mock = mock.MagicMock() local_mock.copy_from.return_value = '/fake/local/hashes.json' mock_GCSOutputWriter.return_value = local_mock fake_paths = ['gs://hashes.json'] # pylint: disable=protected-access local_paths = turbinia_processor._DownloadFilesFromGCS( 'fake', fake_paths) self.assertEqual(local_paths, [('fake', '/fake/local/hashes.json')])
def testSetupError(self, mock_copytree, mock_isdir): """Tests that an error is generated if target_directory is unavailable.""" mock_copytree.side_effect = OSError('FAKEERROR') mock_isdir.return_value = False test_state = state.DFTimewolfState(config.Config) test_state.StoreContainer( containers.File(name='blah', path='/sourcefile')) local_filesystem_copy = local_filesystem.LocalFilesystemCopy( test_state) local_filesystem_copy.SetUp(target_directory="/nonexistent") local_filesystem_copy.Process() self.assertEqual(len(test_state.errors), 1)
def testSetupError(self, mock_makedirs): """Tests that an error is generated if target_directory is unavailable.""" mock_makedirs.side_effect = OSError('FAKEERROR') test_state = state.DFTimewolfState() local_filesystem_copy = local_filesystem.LocalFilesystemCopy( test_state) local_filesystem_copy.setup(target_directory="/nonexistent") # pylint: disable=protected-access self.assertIsNone(local_filesystem_copy._target_directory) self.assertEquals(test_state.errors[0][0], 'An unknown error occurred: FAKEERROR') self.assertEquals(test_state.errors[0][1], True)
def testSetupNamedModules(self, mock_setup1, mock_setup2): """Tests that module's setup functions are correctly called.""" test_state = state.DFTimewolfState(config.Config) test_state.command_line_options = {} test_state.LoadRecipe(test_recipe.named_modules_contents) test_state.SetupModules() self.assertEqual( mock_setup1.call_args_list, [mock.call(runtime_value='1-1'), mock.call(runtime_value='1-2')]) self.assertEqual( mock_setup2.call_args_list, [mock.call(runtime_value='2-1'), mock.call(runtime_value='2-2')])
def setUp(self): self.test_state = state.DFTimewolfState(config.Config) self.grr_hunt_artifact_collector = grr_hunt.GRRHuntArtifactCollector( self.test_state) self.grr_hunt_artifact_collector.SetUp( artifacts='RandomArtifact', use_tsk=True, reason='random reason', grr_server_url='http://fake/endpoint', grr_username='******', grr_password='******', approvers='approver1,approver2')
def setUp(self, mock_InitHttp): self.mock_grr_api = mock.Mock() mock_InitHttp.return_value = self.mock_grr_api self.test_state = state.DFTimewolfState(config.Config) self.grr_flow_collector = grr_hosts.GRRFlowCollector(self.test_state) self.grr_flow_collector.SetUp(host='tomchop', flow_id='F:12345', reason='random reason', grr_server_url='http://fake/endpoint', grr_username='******', grr_password='******', approvers='approver1,approver2')
def setUp(self, mock_InitHttp): self.mock_grr_api = mock.Mock() mock_InitHttp.return_value = self.mock_grr_api self.test_state = state.DFTimewolfState(config.Config) self.grr_flow_module = grr_hosts.GRRFlow(self.test_state) self.grr_flow_module.SetUp( reason='random reason', grr_server_url='http://fake/endpoint', grr_username='******', grr_password='******', approvers='[email protected],[email protected]', verify=True)
def testProcess(self, _mock_TurbiniaClient, mock_GoogleCloudDisk, mock_GCSOutputWriter, mock_exists): """Tests that the processor processes data correctly.""" test_state = state.DFTimewolfState(config.Config) turbinia_processor = turbinia.TurbiniaProcessor(test_state) turbinia_processor.setup( disk_name='disk-1', project='turbinia-project', turbinia_zone='europe-west1') turbinia_processor.client.get_task_data.return_value = [{ 'saved_paths': [ '/fake/data.plaso', '/fake/data2.plaso', 'gs://bucket/data3.plaso' ] }] # Return true so the tests assumes the above file exists mock_exists.return_value = True # Our GS path will be downloaded to this fake local path local_mock = mock.MagicMock() local_mock.copy_from.return_value = '/fake/local/path' mock_GCSOutputWriter.return_value = local_mock turbinia_processor.process() mock_GoogleCloudDisk.assert_called_with( disk_name='disk-1', project='turbinia-project', zone='europe-west1') # These are mock classes, so there is a member # pylint: disable=no-member turbinia_processor.client.send_request.assert_called() turbinia_processor.client.get_task_data.assert_called() # pylint: disable=protected-access mock_GCSOutputWriter.assert_called_with( 'gs://bucket/data3.plaso', local_output_dir=turbinia_processor._output_path ) self.assertEqual(test_state.errors, []) self.assertEqual(test_state.output, [ ('turbinia-project-disk-1', '/fake/data.plaso'), ('turbinia-project-disk-1', '/fake/data2.plaso'), ('turbinia-project-disk-1', '/fake/local/path') ])
def testFindVolumesToCopy(self, mock_StartAnalysisVm, mock_GetInstanceById, mock_ListVolumes, mock_GetVolumeById, mock_GetBootVolume, mock_loader): """Tests the FindVolumesToCopy function with different SetUp() calls.""" test_state = state.DFTimewolfState(config.Config) aws_collector = aws.AWSCollector(test_state) mock_StartAnalysisVm.return_value = (FAKE_INSTANCE, None) mock_loader.return_value = None mock_ListVolumes.return_value = { FAKE_BOOT_VOLUME.volume_id: FAKE_BOOT_VOLUME, FAKE_VOLUME.volume_id: FAKE_VOLUME } mock_GetVolumeById.return_value = FAKE_VOLUME mock_GetInstanceById.return_value = FAKE_INSTANCE mock_GetBootVolume.return_value = FAKE_BOOT_VOLUME # Nothing is specified, AWSCollector should collect the instance's # boot volume aws_collector.SetUp('test-remote-profile-name', 'test-remote-zone', 'fake_incident_id', remote_instance_id='my-owned-instance-id') volumes = aws_collector._FindVolumesToCopy() self.assertEqual(1, len(volumes)) self.assertEqual('fake-boot-volume-id', volumes[0].volume_id) mock_GetInstanceById.assert_called_once() mock_GetBootVolume.assert_called_once() mock_ListVolumes.assert_not_called() # Specifying all_volumes should return all volumes for the instance # (see mock_ListVolumes return value) aws_collector.SetUp('test-remote-profile-name', 'test-remote-zone', 'fake_incident_id', remote_instance_id='my-owned-instance-id', all_volumes=True) volumes = aws_collector._FindVolumesToCopy() self.assertEqual(2, len(volumes)) self.assertEqual('fake-boot-volume-id', volumes[0].volume_id) self.assertEqual('fake-volume-id', volumes[1].volume_id) mock_ListVolumes.assert_called_once() # If a list of 1 volume ID is passed, that volume only should be returned aws_collector.SetUp('test-remote-profile-name', 'test-remote-zone', 'fake_incident_id', remote_instance_id='', volume_ids=FAKE_VOLUME.volume_id) volumes = aws_collector._FindVolumesToCopy() self.assertEqual(1, len(volumes)) self.assertEqual('fake-volume-id', volumes[0].volume_id) mock_GetVolumeById.assert_called_once()
def testOutput(self, mock_exists): """Tests that the module output is consistent with the input.""" test_state = state.DFTimewolfState(config.Config) filesystem_collector = filesystem.FilesystemCollector(test_state) fake_paths = '/fake/path/1,/fake/path/2' filesystem_collector.SetUp(paths=fake_paths) mock_exists.return_value = True filesystem_collector.Process() files = test_state.GetContainers(containers.File) self.assertEqual(files[0].path, '/fake/path/1') self.assertEqual(files[0].name, '1') self.assertEqual(files[1].path, '/fake/path/2') self.assertEqual(files[1].name, '2')
def testGetClientByHostname(self, mock_SearchClients): """Tests that GetClientByHostname fetches the most recent GRR client.""" mock_SearchClients.return_value = mock_grr_hosts.MOCK_CLIENT_LIST test_state = state.DFTimewolfState() base_grr_flow_collector = grr_hosts.GRRFlow(test_state) base_grr_flow_collector.setup( 'random reason', 'http://fake/endpoint', ('admin', 'admin'), '[email protected],[email protected]') # pylint: disable=protected-access client = base_grr_flow_collector._get_client_by_hostname('tomchop') mock_SearchClients.assert_called_with('tomchop') self.assertEqual(client.data.client_id, mock_grr_hosts.MOCK_CLIENT_RECENT.data.client_id)