コード例 #1
0
 def testStreamingCallbackNotCalled(self, mock_callback):
   """Tests that registered callbacks are called only on types for which
   they are registered."""
   test_state = state.DFTimewolfState(config.Config)
   test_state.LoadRecipe(test_recipe.contents)
   test_state.SetupModules()
   # DummyModule1's registered StreamingConsumer only consumes Reports, not
   # TicketAtttributes
   attributes = containers.TicketAttribute(
       type_='asd', name='asd', value='asd')
   test_state.StreamContainer(attributes)
   mock_callback.assert_not_called()
コード例 #2
0
    def SetUp(self,
              analysis_project_name,
              remote_project_name,
              incident_id,
              zone,
              boot_disk_size,
              boot_disk_type,
              cpu_cores,
              remote_instance_name=None,
              disk_names=None,
              all_disks=False,
              image_project='ubuntu-os-cloud',
              image_family='ubuntu-1804-lts'):
        """Sets up a Google Cloud Platform(GCP) collector.

    This method creates and starts an analysis VM in the analysis project and
    selects disks to copy from the remote project.

    If disk_names is specified, it will copy the corresponding disks from the
    project, ignoring disks belonging to any specific instances.

    If remote_instance_name is specified, two behaviors are possible:
    * If no other parameters are specified, it will select the instance's boot
      disk
    * if all_disks is set to True, it will select all disks in the project
      that are attached to the instance

    disk_names takes precedence over instance_names

    Args:
      analysis_project_name (str): name of the project that contains
          the analysis VM.
      remote_project_name (str): name of the remote project where the disks
          must be copied from.
      incident_id (str): incident identifier on which the name of the analysis
          VM will be based.
      zone (str): GCP zone in which new resources should be created.
      boot_disk_size (float): size of the analysis VM boot disk (in GB).
      boot_disk_type (str): Disk type to use [pd-standard, pd-ssd]
      cpu_cores (int): number of CPU cores to create the VM with.
      remote_instance_name (Optional[str]): name of the instance in
          the remote project containing the disks to be copied.
      disk_names (Optional[str]): Comma separated disk names to copy.
      all_disks (Optional[bool]): True if all disks attached to the source
          instance should be copied.
      image_project (Optional[str]): name of the project where the analysis
          VM image is hosted.
      image_family (Optional[str]): name of the image to use to create the
          analysis VM.
    """
        if not (remote_instance_name or disk_names):
            self.state.AddError(
                'You need to specify at least an instance name or disks to copy',
                critical=True)
            return

        disk_names = disk_names.split(',') if disk_names else []

        self.analysis_project = gcp_project.GoogleCloudProject(
            analysis_project_name, default_zone=zone)
        self.remote_project = gcp_project.GoogleCloudProject(
            remote_project_name)

        self.remote_instance_name = remote_instance_name
        self.disk_names = disk_names
        self.incident_id = incident_id
        self.all_disks = all_disks
        self._gcp_label = {'incident_id': self.incident_id}

        analysis_vm_name = 'gcp-forensics-vm-{0:s}'.format(self.incident_id)

        self.logger.info(
            'Your analysis VM will be: {0:s}'.format(analysis_vm_name))
        self.logger.info('Complimentary gcloud command:')
        self.logger.info(
            'gcloud compute ssh --project {0:s} {1:s} --zone {2:s}'.format(
                self.analysis_project.project_id, analysis_vm_name, zone))

        self.state.StoreContainer(
            containers.TicketAttribute(
                name=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_NAME,
                type_=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_TYPE,
                value=analysis_vm_name))

        try:
            # TODO: Make creating an analysis VM optional
            # pylint: disable=too-many-function-args
            # pylint: disable=redundant-keyword-arg
            self.analysis_vm, _ = gcp_forensics.StartAnalysisVm(
                self.analysis_project.project_id,
                analysis_vm_name,
                zone,
                boot_disk_size,
                boot_disk_type,
                int(cpu_cores),
                image_project=image_project,
                image_family=image_family)
            self.analysis_vm.AddLabels(self._gcp_label)
            self.analysis_vm.GetBootDisk().AddLabels(self._gcp_label)

        except (RefreshError, DefaultCredentialsError) as exception:
            msg = (
                'Something is wrong with your Application Default Credentials. '
                'Try running:\n  $ gcloud auth application-default login\n')
            msg += str(exception)
            self.ModuleError(msg, critical=True)
コード例 #3
0
ファイル: aws.py プロジェクト: Ctfbuster/dftimewolf
  def SetUp(self,
            remote_profile_name,
            remote_zone,
            incident_id,
            remote_instance_id=None,
            volume_ids=None,
            all_volumes=False,
            analysis_profile_name=None,
            analysis_zone=None,
            boot_volume_size=50,
            cpu_cores=16,
            ami=None):
    """Sets up an Amazon web Services (AWS) collector.

    This method creates and starts an analysis VM in the AWS account and
    selects volumes to copy from the target instance / list of volumes passed
    in parameter.

    If volume_ids is specified, it will copy the corresponding volumes from the
    account, ignoring volumes belonging to any specific instances.

    If remote_instance_id is specified, two behaviors are possible:
    * If no other parameters are specified, it will select the instance's boot
      volume.
    * if all_volumes is set to True, it will select all volumes in the account
      that are attached to the instance.

    volume_ids takes precedence over remote_instance_id.

    Args:
      remote_profile_name (str): The AWS account in which the
          volume(s) exist(s). This is the profile name that is defined in
          your AWS credentials file.
      remote_zone (str): The AWS zone in which the source volume(s) exist(s).
      incident_id (str): Incident identifier used to name the analysis VM.
      remote_instance_id (str): Optional. Instance ID that needs forensicating.
      volume_ids (str): Optional. Comma-separated list of volume ids to
          copy.
      all_volumes (bool): Optional. True if all volumes attached to the source
          instance should be copied.
      analysis_profile_name (str): Optional. The AWS account in which to
          create the analysis VM. This is the profile name that is defined in
          your AWS credentials file.
      analysis_zone (str): Optional. The AWS zone in which to create the VM.
          If not specified, the VM will be created in the same zone where the
          volume(s) exist(s).
      boot_volume_size (int): Optional. The size (in GB) of the boot volume
          for the analysis VM. Default is 50 GB.
      cpu_cores (int): Optional. The number of CPU cores to use for the
          analysis VM. Default is 16.
      ami (str): Optional. The Amazon Machine Image ID to use to create the
          analysis VM. If not specified, will default to selecting Ubuntu 18.04
          TLS.
    """

    if not (remote_instance_id or volume_ids):
      self.ModuleError(
          'You need to specify at least an instance name or volume ids to copy',
          critical=True)
      return

    if not (remote_profile_name and remote_zone):
      self.ModuleError('You must specify "remote_profile_name" and "zone" '
                       'parameters', critical=True)
      return

    self.remote_profile_name = remote_profile_name
    self.remote_zone = remote_zone
    self.source_account = aws_account.AWSAccount(
        self.remote_zone, aws_profile=self.remote_profile_name)

    self.incident_id = incident_id
    self.remote_instance_id = remote_instance_id

    self.volume_ids = volume_ids.split(',') if volume_ids else []
    self.all_volumes = all_volumes
    self.analysis_zone = analysis_zone or remote_zone
    self.analysis_profile_name = analysis_profile_name or remote_profile_name

    analysis_vm_name = 'aws-forensics-vm-{0:s}'.format(self.incident_id)
    print('Your analysis VM will be: {0:s}'.format(analysis_vm_name))
    self.state.StoreContainer(
        containers.TicketAttribute(
            name=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_NAME,
            type_=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_TYPE,
            value=analysis_vm_name))
    self.analysis_vm, _ = aws_forensics.StartAnalysisVm(
        analysis_vm_name,
        self.analysis_zone,
        boot_volume_size,
        ami=ami,
        cpu_cores=cpu_cores,
        dst_profile=self.analysis_profile_name,
    )
コード例 #4
0
    def SetUp(self,
              analysis_project_name,
              remote_project_name,
              incident_id=None,
              zone='us-central1-f',
              create_analysis_vm=True,
              boot_disk_size=50,
              boot_disk_type='pd-standard',
              cpu_cores=4,
              remote_instance_name=None,
              disk_names=None,
              all_disks=False,
              image_project='ubuntu-os-cloud',
              image_family='ubuntu-1804-lts'):
        """Sets up a Google Cloud Platform(GCP) collector.

    This method creates and starts an analysis VM in the analysis project and
    selects disks to copy from the remote project.

    If analysis_project_name is not specified, analysis_project will be same
    as remote_project.

    If disk_names is specified, it will copy the corresponding disks from the
    project, ignoring disks belonging to any specific instances.

    If remote_instance_name is specified, two behaviors are possible:
    * If no other parameters are specified, it will select the instance's boot
      disk
    * if all_disks is set to True, it will select all disks in the project
      that are attached to the instance

    disk_names takes precedence over instance_names

    Args:
      analysis_project_name (str): Optional. name of the project that contains
          the analysis VM. Default is None.
      remote_project_name (str): name of the remote project where the disks
          must be copied from.
      incident_id (Optional[str]): Optional. Incident identifier on which the
          name of the analysis VM will be based. Default is None, which means
          add no label and format VM name as
          "gcp-forensics-vm-{TIMESTAMP('%Y%m%d%H%M%S')}".
      zone (Optional[str]): Optional. GCP zone in which new resources should
          be created. Default is us-central1-f.
      create_analysis_vm (Optional[bool]): Optional. Create analysis VM in
          the analysis project. Default is True.
      boot_disk_size (Optional[float]): Optional. Size of the analysis VM boot
          disk (in GB). Default is 50.
      boot_disk_type (Optional[str]): Optional. Disk type to use.
          Default is pd-standard.
      cpu_cores (Optional[int]): Optional. Number of CPU cores to
          create the VM with. Default is 4.
      remote_instance_name (Optional[str]): Optional. Name of the instance in
          the remote project containing the disks to be copied.
      disk_names (Optional[str]): Optional. Comma separated disk names to copy.
      all_disks (Optional[bool]): Optional. True if all disks attached to the
          source instance should be copied.
      image_project (Optional[str]): Optional. Name of the project where the
          analysis VM image is hosted.
      image_family (Optional[str]): Optional. Name of the image to use to
          create the analysis VM.
    """
        if not (remote_instance_name or disk_names):
            self.ModuleError(
                'You need to specify at least an instance name or disks to copy',
                critical=True)
            return

        disk_names = disk_names.split(',') if disk_names else []
        self.remote_project = gcp_project.GoogleCloudProject(
            remote_project_name, default_zone=zone)
        if analysis_project_name:
            self.analysis_project = gcp_project.GoogleCloudProject(
                analysis_project_name, default_zone=zone)
        else:
            self.analysis_project = self.remote_project

        self.remote_instance_name = remote_instance_name
        self.disk_names = disk_names
        self.all_disks = all_disks
        if incident_id:
            self.incident_id = incident_id
            self._gcp_label = {'incident_id': self.incident_id}

        try:
            if self.remote_instance_name:
                self.remote_project.compute.GetInstance(
                    self.remote_instance_name)
        except ResourceNotFoundError as exception:
            self.ModuleError(
                message='Instance "{0:s}" not found or insufficient permissions'
                .format(self.remote_instance_name),
                critical=True)
            return

        if create_analysis_vm:
            if self.incident_id:
                analysis_vm_name = 'gcp-forensics-vm-{0:s}'.format(
                    self.incident_id)
            else:
                analysis_vm_name = common.GenerateUniqueInstanceName(
                    'gcp-forensics-vm', common.COMPUTE_NAME_LIMIT)

            self.logger.info(
                'Your analysis VM will be: {0:s}'.format(analysis_vm_name))
            self.logger.info('Complimentary gcloud command:')
            self.logger.info(
                'gcloud compute ssh --project {0:s} {1:s} --zone {2:s}'.format(
                    self.analysis_project.project_id, analysis_vm_name,
                    self.analysis_project.default_zone))

            self.state.StoreContainer(
                containers.TicketAttribute(
                    name=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_NAME,
                    type_=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_TYPE,
                    value=analysis_vm_name))

            try:
                # pylint: disable=too-many-function-args
                # pylint: disable=redundant-keyword-arg
                self.analysis_vm, _ = gcp_forensics.StartAnalysisVm(
                    self.analysis_project.project_id,
                    analysis_vm_name,
                    self.analysis_project.default_zone,
                    boot_disk_size,
                    boot_disk_type,
                    int(cpu_cores),
                    image_project=image_project,
                    image_family=image_family)
                if self._gcp_label:
                    self.analysis_vm.AddLabels(self._gcp_label)
                    self.analysis_vm.GetBootDisk().AddLabels(self._gcp_label)

            except (RefreshError, DefaultCredentialsError) as exception:
                msg = (
                    'Something is wrong with your Application Default Credentials. '
                    'Try running:\n  $ gcloud auth application-default login\n'
                )
                msg += str(exception)
                self.ModuleError(msg, critical=True)
コード例 #5
0
    def SetUp(self,
              remote_profile_name,
              analysis_resource_group_name,
              incident_id,
              ssh_public_key,
              remote_instance_name=None,
              disk_names=None,
              all_disks=False,
              analysis_profile_name=None,
              analysis_region=None,
              boot_disk_size=50,
              cpu_cores=4,
              memory_in_mb=8192):
        """Sets up a Microsoft Azure collector.

    This method creates and starts an analysis VM in the analysis account and
    selects disks to copy from the remote account.

    If disk_names is specified, it will copy the corresponding disks from the
    account, ignoring disks belonging to any specific instances.

    If remote_instance_name is specified, two behaviors are possible:
    * If no other parameters are specified, it will select the instance's boot
      disk
    * if all_disks is set to True, it will select all disks in the account
      that are attached to the instance

    disk_names takes precedence over instance_names

    Args:
      remote_profile_name (str): The Azure account in which the disk(s)
          exist(s). This is the profile name that is defined in your credentials
          file.
      analysis_resource_group_name (str): The Azure resource group name in
          which to create the VM.
      incident_id (str): Incident identifier used to name the analysis VM.
      ssh_public_key (str): The public SSH key to attach to the instance.
      remote_instance_name (str): Instance ID that needs forensicating.
      disk_names (str): Comma-separated list of disk names to copy.
      all_disks (bool): True if all disk attached to the source
          instance should be copied.
      analysis_profile_name (str): The Azure account in which to create the
          analysis VM. This is the profile name that is defined in your
          credentials file.
      analysis_region (str): The Azure region in which to create the VM.
      boot_disk_size (int): Optional. The size (in GB) of the boot disk
          for the analysis VM. Default is 50 GB.
      cpu_cores (int): Optional. The number of CPU cores to use for the
          analysis VM. Default is 4.
      memory_in_mb (int): Optional. The amount of memory in mb to use for the
          analysis VM. Default is 8Gb.
    """
        if not (remote_instance_name or disk_names):
            self.ModuleError(
                'You need to specify at least an instance name or disks to copy',
                critical=True)
            return

        if not ssh_public_key:
            self.ModuleError(
                'You need to specify a SSH public key to add to the '
                'VM.',
                critical=True)
            return

        if not (remote_profile_name and analysis_resource_group_name):
            self.ModuleError(
                'You must specify "remote_profile_name" and '
                '"analysis_resource_group_name" parameters',
                critical=True)
            return

        self.remote_profile_name = remote_profile_name
        self.analysis_resource_group_name = analysis_resource_group_name
        self.analysis_profile_name = analysis_profile_name
        self.source_account = account.AZAccount(
            self.analysis_resource_group_name,
            profile_name=self.remote_profile_name)

        self.incident_id = incident_id
        self.remote_instance_name = remote_instance_name

        self.disk_names = disk_names.split(',') if disk_names else []
        self.all_disks = all_disks
        self.analysis_region = analysis_region
        self.analysis_profile_name = analysis_profile_name or remote_profile_name

        analysis_vm_name = 'azure-forensics-vm-{0:s}'.format(self.incident_id)
        print('Your analysis VM will be: {0:s}'.format(analysis_vm_name))
        self.state.StoreContainer(
            containers.TicketAttribute(
                name=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_NAME,
                type_=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_TYPE,
                value=analysis_vm_name))
        self.analysis_vm, _ = az_forensics.StartAnalysisVm(
            self.analysis_resource_group_name,
            analysis_vm_name,
            boot_disk_size,
            ssh_public_key=ssh_public_key,
            cpu_cores=cpu_cores,
            memory_in_mb=memory_in_mb,
            region=self.analysis_region,
            dst_profile=self.analysis_profile_name,
        )