def StartAnalysisVm(args: 'argparse.Namespace') -> None: """Start forensic analysis VM. Args: args (argparse.Namespace): Arguments from ArgumentParser. """ attach_disks = [] if args.attach_disks: attach_disks = args.attach_disks.split(',') # Check if attach_disks parameter exists and if there # are any empty entries. if not (attach_disks and all(elements for elements in attach_disks)): logger.error('parameter --attach_disks: {0:s}'.format( args.attach_disks)) return logger.info('Starting analysis VM...') vm = forensics.StartAnalysisVm(args.project, args.instance_name, args.zone, int(args.disk_size), args.disk_type, int(args.cpu_cores), attach_disks=attach_disks) logger.info('Analysis VM started.') logger.info('Name: {0:s}, Started: {1:s}'.format(vm[0].name, str(vm[1])))
def test_end_to_end_boot_disk(self): """End to end test on GCP. This end-to-end test runs directly on GCP and tests that: 1. The project.py module connects to the target instance and makes a snapshot of the boot disk. 2. A new disk is created from the taken snapshot. 3. If an analysis VM already exists, the module will attach the disk copy to the VM. Otherwise, it will create a new GCP instance for analysis purpose and attach the boot disk copy to it. """ # Make a copy of the boot disk of the instance to analyse self.boot_disk_copy = forensics.CreateDiskCopy( src_proj=self.project_id, dst_proj=self.project_id, instance_name=self.instance_to_analyse, zone=self.zone, # disk_name=None by default, boot disk will be copied ) gcp_client_api = common.GoogleCloudComputeClient(self.project_id).GceApi() # The disk copy should be attached to the analysis project gce_disk_client = gcp_client_api.disks() request = gce_disk_client.get( project=self.project_id, zone=self.zone, disk=self.boot_disk_copy.name) result = request.execute() self.assertEqual(result['name'], self.boot_disk_copy.name) # Get the analysis VM and attach the evidence boot disk self.analysis_vm, _ = forensics.StartAnalysisVm( project=self.project_id, vm_name=self.analysis_vm_name, zone=self.zone, boot_disk_size=10, boot_disk_type='pd-ssd', cpu_cores=4, attach_disks=[self.boot_disk_copy.name]) # The forensic instance should be live in the analysis GCP project and # the disk should be attached gce_instance_client = gcp_client_api.instances() request = gce_instance_client.get( project=self.project_id, zone=self.zone, instance=self.analysis_vm_name) result = request.execute() self.assertEqual(result['name'], self.analysis_vm_name) for disk in result['disks']: if disk['boot']: request = gce_disk_client.get(project=self.project_id, zone=self.zone, disk=disk['source'].split('/')[-1]) boot_disk = request.execute() self.assertEqual('pd-ssd', boot_disk['type'].rsplit('/', 1)[-1]) if disk['source'].split('/')[-1] == self.boot_disk_copy.name: return self.fail( 'Error: could not find the disk {0:s} in instance {1:s}'.format( self.boot_disk_copy.name, self.analysis_vm_name))
def setUpClass(cls): try: project_info = utils.ReadProjectInfo(['project_id', 'instance', 'zone']) except (OSError, RuntimeError, ValueError) as exception: raise unittest.SkipTest(str(exception)) cls.project_id = project_info['project_id'] cls.instance_to_analyse = project_info['instance'] # Optional: test a disk other than the boot disk cls.disk_to_forensic = project_info.get('disk', None) cls.zone = project_info['zone'] cls.gcp = gcp_project.GoogleCloudProject(cls.project_id, cls.zone) cls.analysis_vm_name = 'new-vm-for-analysis' # Create and start the analysis VM cls.analysis_vm, _ = forensics.StartAnalysisVm(project=cls.project_id, vm_name=cls.analysis_vm_name, zone=cls.zone, boot_disk_size=10, boot_disk_type='pd-ssd', cpu_cores=4)
def SetUp(self, analysis_project_name, remote_project_name, incident_id, zone, boot_disk_size, boot_disk_type, cpu_cores, remote_instance_name=None, disk_names=None, all_disks=False, image_project='ubuntu-os-cloud', image_family='ubuntu-1804-lts'): """Sets up a Google Cloud Platform(GCP) collector. This method creates and starts an analysis VM in the analysis project and selects disks to copy from the remote project. If disk_names is specified, it will copy the corresponding disks from the project, ignoring disks belonging to any specific instances. If remote_instance_name is specified, two behaviors are possible: * If no other parameters are specified, it will select the instance's boot disk * if all_disks is set to True, it will select all disks in the project that are attached to the instance disk_names takes precedence over instance_names Args: analysis_project_name (str): name of the project that contains the analysis VM. remote_project_name (str): name of the remote project where the disks must be copied from. incident_id (str): incident identifier on which the name of the analysis VM will be based. zone (str): GCP zone in which new resources should be created. boot_disk_size (float): size of the analysis VM boot disk (in GB). boot_disk_type (str): Disk type to use [pd-standard, pd-ssd] cpu_cores (int): number of CPU cores to create the VM with. remote_instance_name (Optional[str]): name of the instance in the remote project containing the disks to be copied. disk_names (Optional[str]): Comma separated disk names to copy. all_disks (Optional[bool]): True if all disks attached to the source instance should be copied. image_project (Optional[str]): name of the project where the analysis VM image is hosted. image_family (Optional[str]): name of the image to use to create the analysis VM. """ if not (remote_instance_name or disk_names): self.state.AddError( 'You need to specify at least an instance name or disks to copy', critical=True) return disk_names = disk_names.split(',') if disk_names else [] self.analysis_project = gcp_project.GoogleCloudProject( analysis_project_name, default_zone=zone) self.remote_project = gcp_project.GoogleCloudProject( remote_project_name) self.remote_instance_name = remote_instance_name self.disk_names = disk_names self.incident_id = incident_id self.all_disks = all_disks self._gcp_label = {'incident_id': self.incident_id} analysis_vm_name = 'gcp-forensics-vm-{0:s}'.format(self.incident_id) self.logger.info( 'Your analysis VM will be: {0:s}'.format(analysis_vm_name)) self.logger.info('Complimentary gcloud command:') self.logger.info( 'gcloud compute ssh --project {0:s} {1:s} --zone {2:s}'.format( self.analysis_project.project_id, analysis_vm_name, zone)) self.state.StoreContainer( containers.TicketAttribute( name=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_NAME, type_=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_TYPE, value=analysis_vm_name)) try: # TODO: Make creating an analysis VM optional # pylint: disable=too-many-function-args # pylint: disable=redundant-keyword-arg self.analysis_vm, _ = gcp_forensics.StartAnalysisVm( self.analysis_project.project_id, analysis_vm_name, zone, boot_disk_size, boot_disk_type, int(cpu_cores), image_project=image_project, image_family=image_family) self.analysis_vm.AddLabels(self._gcp_label) self.analysis_vm.GetBootDisk().AddLabels(self._gcp_label) except (RefreshError, DefaultCredentialsError) as exception: msg = ( 'Something is wrong with your Application Default Credentials. ' 'Try running:\n $ gcloud auth application-default login\n') msg += str(exception) self.ModuleError(msg, critical=True)
def SetUp(self, analysis_project_name, remote_project_name, incident_id=None, zone='us-central1-f', create_analysis_vm=True, boot_disk_size=50, boot_disk_type='pd-standard', cpu_cores=4, remote_instance_name=None, disk_names=None, all_disks=False, image_project='ubuntu-os-cloud', image_family='ubuntu-1804-lts'): """Sets up a Google Cloud Platform(GCP) collector. This method creates and starts an analysis VM in the analysis project and selects disks to copy from the remote project. If analysis_project_name is not specified, analysis_project will be same as remote_project. If disk_names is specified, it will copy the corresponding disks from the project, ignoring disks belonging to any specific instances. If remote_instance_name is specified, two behaviors are possible: * If no other parameters are specified, it will select the instance's boot disk * if all_disks is set to True, it will select all disks in the project that are attached to the instance disk_names takes precedence over instance_names Args: analysis_project_name (str): Optional. name of the project that contains the analysis VM. Default is None. remote_project_name (str): name of the remote project where the disks must be copied from. incident_id (Optional[str]): Optional. Incident identifier on which the name of the analysis VM will be based. Default is None, which means add no label and format VM name as "gcp-forensics-vm-{TIMESTAMP('%Y%m%d%H%M%S')}". zone (Optional[str]): Optional. GCP zone in which new resources should be created. Default is us-central1-f. create_analysis_vm (Optional[bool]): Optional. Create analysis VM in the analysis project. Default is True. boot_disk_size (Optional[float]): Optional. Size of the analysis VM boot disk (in GB). Default is 50. boot_disk_type (Optional[str]): Optional. Disk type to use. Default is pd-standard. cpu_cores (Optional[int]): Optional. Number of CPU cores to create the VM with. Default is 4. remote_instance_name (Optional[str]): Optional. Name of the instance in the remote project containing the disks to be copied. disk_names (Optional[str]): Optional. Comma separated disk names to copy. all_disks (Optional[bool]): Optional. True if all disks attached to the source instance should be copied. image_project (Optional[str]): Optional. Name of the project where the analysis VM image is hosted. image_family (Optional[str]): Optional. Name of the image to use to create the analysis VM. """ if not (remote_instance_name or disk_names): self.ModuleError( 'You need to specify at least an instance name or disks to copy', critical=True) return disk_names = disk_names.split(',') if disk_names else [] self.remote_project = gcp_project.GoogleCloudProject( remote_project_name, default_zone=zone) if analysis_project_name: self.analysis_project = gcp_project.GoogleCloudProject( analysis_project_name, default_zone=zone) else: self.analysis_project = self.remote_project self.remote_instance_name = remote_instance_name self.disk_names = disk_names self.all_disks = all_disks if incident_id: self.incident_id = incident_id self._gcp_label = {'incident_id': self.incident_id} try: if self.remote_instance_name: self.remote_project.compute.GetInstance( self.remote_instance_name) except ResourceNotFoundError as exception: self.ModuleError( message='Instance "{0:s}" not found or insufficient permissions' .format(self.remote_instance_name), critical=True) return if create_analysis_vm: if self.incident_id: analysis_vm_name = 'gcp-forensics-vm-{0:s}'.format( self.incident_id) else: analysis_vm_name = common.GenerateUniqueInstanceName( 'gcp-forensics-vm', common.COMPUTE_NAME_LIMIT) self.logger.info( 'Your analysis VM will be: {0:s}'.format(analysis_vm_name)) self.logger.info('Complimentary gcloud command:') self.logger.info( 'gcloud compute ssh --project {0:s} {1:s} --zone {2:s}'.format( self.analysis_project.project_id, analysis_vm_name, self.analysis_project.default_zone)) self.state.StoreContainer( containers.TicketAttribute( name=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_NAME, type_=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_TYPE, value=analysis_vm_name)) try: # pylint: disable=too-many-function-args # pylint: disable=redundant-keyword-arg self.analysis_vm, _ = gcp_forensics.StartAnalysisVm( self.analysis_project.project_id, analysis_vm_name, self.analysis_project.default_zone, boot_disk_size, boot_disk_type, int(cpu_cores), image_project=image_project, image_family=image_family) if self._gcp_label: self.analysis_vm.AddLabels(self._gcp_label) self.analysis_vm.GetBootDisk().AddLabels(self._gcp_label) except (RefreshError, DefaultCredentialsError) as exception: msg = ( 'Something is wrong with your Application Default Credentials. ' 'Try running:\n $ gcloud auth application-default login\n' ) msg += str(exception) self.ModuleError(msg, critical=True)