def testUnequalCloudDiskEmbeddedArgs(self, mock_copyDisk, _): """Test unequal number of args for cloud embedded disk evidence type.""" # Fail when zones don't match config.SHARED_FILESYSTEM = False config.TASK_MANAGER = 'PSQ' self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2', '--project', 'proj1,proj2,proj3', '--embedded_path', 'path1,path2,path3' ]) # Fail when embedded path don't match self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--embedded_path', 'path1,path2' ]) # Fail when name don't match self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddiskembedded', '--disk_name', 'disk1,disk2', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--name', 'name1,name2', '--embedded_path', 'path1,path2,path3' ]) # Fail when mount source don't match self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--source', 'source1,source2', '--embedded_path', 'path1,path2,path3' ]) # Fail when project don't match self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2', '--source', 'source1,source2', '--embedded_path', 'path1,path2,path3' ]) # Pass when all the args match turbiniactl.process_evidence = mock.MagicMock(return_value=None) mock_copyDisk.return_value = compute.GoogleComputeDisk( 'fake-proj', 'fake-zone', 'fake-disk-copy') turbiniactl.process_args([ 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--embedded_path', 'path1,path2,path3' ]) self.assertTrue(turbiniactl.process_evidence.called) # Raise error when running locally config.SHARED_FILESYSTEM = True with self.assertRaisesRegex(TurbiniaException, 'Cloud only'): turbiniactl.process_args([ 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--embedded_path', 'path1,path2,path3' ])
from libcloudforensics.providers.gcp.internal import project as gcp_project from libcloudforensics.providers.gcp.internal import compute from dftimewolf import config from dftimewolf.lib import state from dftimewolf.lib.containers import containers from dftimewolf.lib.collectors import gcloud FAKE_PROJECT = gcp_project.GoogleCloudProject('test-target-project-name', 'fake_zone') FAKE_ANALYSIS_VM = compute.GoogleComputeInstance(FAKE_PROJECT.project_id, 'fake_zone', 'fake-analysis-vm') FAKE_INSTANCE = compute.GoogleComputeInstance(FAKE_PROJECT.project_id, 'fake_zone', 'fake-instance') FAKE_DISK = compute.GoogleComputeDisk(FAKE_PROJECT.project_id, 'fake_zone', 'disk1') FAKE_BOOT_DISK = compute.GoogleComputeDisk(FAKE_PROJECT.project_id, 'fake_zone', 'bootdisk') FAKE_SNAPSHOT = compute.GoogleComputeSnapshot(FAKE_DISK, 'fake_snapshot') FAKE_DISK_COPY = compute.GoogleComputeDisk(FAKE_PROJECT.project_id, 'fake_zone', 'disk1-copy') class GoogleCloudCollectorTest(unittest.TestCase): """Tests for the GCloud collector.""" def testInitialization(self): """Tests that the collector can be initialized.""" test_state = state.DFTimewolfState(config.Config) gcloud_collector = gcloud.GoogleCloudCollector(test_state) self.assertIsNotNone(gcloud_collector)
'fake-zone') FAKE_ANALYSIS_VM = compute.GoogleComputeInstance( FAKE_ANALYSIS_PROJECT.project_id, 'fake-zone', 'fake-analysis-vm') FAKE_IMAGE = compute.GoogleComputeImage(FAKE_ANALYSIS_PROJECT.project_id, '', 'fake-image') # Source project with the instance that needs forensicating FAKE_SOURCE_PROJECT = gcp_project.GoogleCloudProject('fake-source-project', 'fake-zone') FAKE_INSTANCE = compute.GoogleComputeInstance(FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-instance') FAKE_DISK = compute.GoogleComputeDisk(FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-disk') FAKE_BOOT_DISK = compute.GoogleComputeDisk(FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-boot-disk') FAKE_SNAPSHOT = compute.GoogleComputeSnapshot(FAKE_DISK, 'fake-snapshot') FAKE_SNAPSHOT_LONG_NAME = compute.GoogleComputeSnapshot( FAKE_DISK, 'this-is-a-kind-of-long-fake-snapshot-name-and-is-definitely-over-63-chars' ) FAKE_DISK_COPY = compute.GoogleComputeDisk(FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-disk-copy') FAKE_LOGS = gcp_log.GoogleCloudLog('fake-target-project')
def testUnequalCloudDiskArgs(self, mockParser, mock_copyDisk, _): """Test unequal number of args for cloud disk evidence type.""" config.SHARED_FILESYSTEM = False config.TASK_MANAGER = 'PSQ' mockArgs = argparse.Namespace( all_fields=False, command='googleclouddisk', config_file=None, copy_only=False, debug=False, debug_tasks=False, decryption_keys=[], disk_name=['disk1', 'disk2', 'disk3'], dump_json=False, embedded_path=['path1', 'path2', 'path3'], filter_patterns_file=None, force_evidence=False, jobs_allowlist=[], jobs_denylist=[], log_file=None, mount_partition=None, name=None, output_dir=None, poll_interval=60, project=['proj1', 'proj2', 'proj3'], quiet=False, recipe=None, recipe_path=None, request_id=None, server=False, skip_recipe_validation=False, source=[None], verbose=True, wait=False, yara_rules_file=None, zone=['zone1', 'zone2'], group_name=None, reason=None, all_args=None) mockParser.return_value = mockArgs # Fail when zones dont match self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2', '--project', 'proj1,proj2,proj3' ]) # Fail when projects don't match mockArgs.zone = ['zone1', 'zone2', 'zone3'] mockArgs.project = ['proj1', 'proj2'] mockParser.return_value = mockArgs self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2' ]) #Fail when names dont match mockArgs.project = ['proj1', 'proj2', 'proj3'] mockArgs.name = ['name1', 'name2'] mockParser.return_value = mockArgs self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--name', 'name1,name2' ]) mockArgs.name = ['name1', 'name2', 'name3'] mockArgs.source = ['source1', 'source2'] self.assertRaises(TurbiniaException, turbiniactl.process_args, [ 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--source', 'source1,source2' ]) mockArgs.source = ['source1', 'source2', 'source3'] turbiniactl.process_evidence = mock.MagicMock(return_value=None) mock_copyDisk.return_value = compute.GoogleComputeDisk( 'fake-proj', 'fake-zone', 'fake-disk-copy') turbiniactl.process_args([ 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3' ]) self.assertTrue(turbiniactl.process_evidence.called)
import unittest import mock from libcloudforensics.providers.gcp.internal import project as gcp_project from libcloudforensics.providers.gcp.internal import compute from dftimewolf import config from dftimewolf.lib import state from dftimewolf.lib.containers import containers from dftimewolf.lib.exporters import gce_disk_export FAKE_SOURCE_PROJECT = gcp_project.GoogleCloudProject( 'fake-source-project', 'fake-zone') FAKE_DISK = compute.GoogleComputeDisk( FAKE_SOURCE_PROJECT.project_id, 'fake_zone', 'fake-source-disk') FAKE_IMAGE = compute.GoogleComputeImage( FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-source-disk-image-df-export-temp') class GoogleCloudDiskExportTest(unittest.TestCase): """Tests for the Google Cloud disk exporter.""" def testInitialization(self): """Tests that the disk exporter can be initialized.""" test_state = state.DFTimewolfState(config.Config) google_disk_export = gce_disk_export.GoogleCloudDiskExport( test_state)