Ejemplo n.º 1
0
 def setUp(self):
     self.project = libcloudforensics.GoogleCloudProject('test-project')
     self.project.GceApi = mock.MagicMock()
     self.project.GceOperation = mock.MagicMock()
Ejemplo n.º 2
0
    def setup(self,
              analysis_project_name,
              remote_project_name,
              incident_id,
              zone,
              boot_disk_size,
              remote_instance_name=None,
              disk_names=None,
              all_disks=False,
              image_project='ubuntu-os-cloud',
              image_family='ubuntu-1604-lts'):
        """Sets up a Google cloud collector.

    This method creates and starts an analysis VM in the analysis project and
    selects disks to copy from the remote project.

    If disk_names is specified, it will copy the corresponding disks from the
    project, ignoring disks belonging to any specific instances.

    If remote_instance_name is specified, two behaviors are possible:
      - If no other parameters are specified, it will select the instance's boot
        disk
      - if all_disks is set to True, it will select all disks in the project
        that are attached to the instance

    disk_names takes precedence over instance_names

    Args:
      analysis_project_name: The name of the project that contains the analysis
          VM (string).
      remote_project_name: The name of the remote project where the disks must
          be copied from (string).
      incident_id: The incident ID on which the name of the analysis VM will be
          based (string).
      zone: The zone in which new resources should be created (string).
      boot_disk_size: The size of the analysis VM boot disk (in GB) (float).
      remote_instance_name: The name of the instance in the remote project
          containing the disks to be copied (string).
      disk_names: Comma separated string with disk names to copy (string).
      all_disks: Copy all disks attached to the source instance (bool).
      image_project: Name of the project where the analysis VM image is hosted.
      image_family: Name of the image to use to create the analysis VM.
    """

        disk_names = disk_names.split(",") if disk_names else []

        self.analysis_project = libcloudforensics.GoogleCloudProject(
            analysis_project_name, default_zone=zone)
        remote_project = libcloudforensics.GoogleCloudProject(
            remote_project_name)

        if not (remote_instance_name or disk_names):
            self.state.add_error(
                "You need to specify at least an instance name or disks to copy",
                critical=True)
            return

        self.incident_id = incident_id
        analysis_vm_name = "gcp-forensics-vm-{0:s}".format(incident_id)
        print("Your analysis VM will be: {0:s}".format(analysis_vm_name))
        print("Complimentary gcloud command:")
        print("gcloud compute ssh --project {0:s} {1:s} --zone {2:s}".format(
            analysis_project_name, analysis_vm_name, zone))

        try:
            # TODO: Make creating an analysis VM optional
            # pylint: disable=too-many-function-args
            self.analysis_vm, _ = libcloudforensics.start_analysis_vm(
                self.analysis_project.project_id, analysis_vm_name, zone,
                boot_disk_size, image_project, image_family)

            if disk_names:
                for name in disk_names:
                    try:
                        self.disks_to_copy.append(
                            remote_project.get_disk(name))
                    except RuntimeError:
                        self.state.add_error(
                            "Disk '{0:s}' was not found in project {1:s}".
                            format(name, remote_project_name),
                            critical=True)
                        break

            elif remote_instance_name:
                remote_instance = remote_project.get_instance(
                    remote_instance_name)

                if all_disks:
                    self.disks_to_copy = [
                        remote_project.get_disk(disk_name)
                        for disk_name in remote_instance.list_disks()
                    ]
                else:
                    self.disks_to_copy = [remote_instance.get_boot_disk()]

                if not self.disks_to_copy:
                    self.state.add_error("Could not find any disks to copy",
                                         critical=True)

        except AccessTokenRefreshError as err:
            self.state.add_error(
                "Something is wrong with your gcloud access token.")
            self.state.add_error(err, critical=True)

        except HttpError as err:
            if err.resp.status == 403:
                self.state.add_error(
                    "Make sure you have the appropriate permissions on the project"
                )
            if err.resp.status == 404:
                self.state.add_error(
                    "GCP resource not found. Maybe a typo in the project / instance / "
                    "disk name?")
            self.state.add_error(err, critical=True)
Ejemplo n.º 3
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests the GoogleCloudCollector."""

from __future__ import unicode_literals

import unittest

import mock
from turbinia.lib import libcloudforensics

from dftimewolf import config
from dftimewolf.lib import state
from dftimewolf.lib.collectors import gcloud

FAKE_PROJECT = libcloudforensics.GoogleCloudProject('test-target-project-name',
                                                    'fake_zone')
FAKE_ANALYSIS_VM = libcloudforensics.GoogleComputeInstance(
    FAKE_PROJECT, 'fake_zone', 'fake-analysis-vm')
FAKE_INSTANCE = libcloudforensics.GoogleComputeInstance(
    FAKE_PROJECT, 'fake_zone', 'fake-instance')
FAKE_DISK = libcloudforensics.GoogleComputeDisk(FAKE_PROJECT, 'fake_zone',
                                                'disk1')
FAKE_BOOT_DISK = libcloudforensics.GoogleComputeDisk(FAKE_PROJECT, 'fake_zone',
                                                     'bootdisk')
FAKE_SNAPSHOT = libcloudforensics.GoogleComputeSnapshot(
    FAKE_DISK, FAKE_PROJECT)
FAKE_DISK_COPY = libcloudforensics.GoogleComputeDisk(FAKE_PROJECT, 'fake_zone',
                                                     'disk1-copy')


def ReturnFakeDisk(disk_name):
Ejemplo n.º 4
0
    def SetUp(self,
              analysis_project_name,
              remote_project_name,
              incident_id,
              zone,
              boot_disk_size,
              cpu_cores,
              remote_instance_name=None,
              disk_names=None,
              all_disks=False,
              image_project='ubuntu-os-cloud',
              image_family='ubuntu-1804-lts'):
        """Sets up a Google Cloud (GCP) collector.

    This method creates and starts an analysis VM in the analysis project and
    selects disks to copy from the remote project.

    If disk_names is specified, it will copy the corresponding disks from the
    project, ignoring disks belonging to any specific instances.

    If remote_instance_name is specified, two behaviors are possible:
    * If no other parameters are specified, it will select the instance's boot
      disk
    * if all_disks is set to True, it will select all disks in the project
      that are attached to the instance

    disk_names takes precedence over instance_names

    Args:
      analysis_project_name (str): name of the project that contains
          the analysis VM.
      remote_project_name (str): name of the remote project where the disks
          must be copied from.
      incident_id (str): incident identifier on which the name of the analysis
          VM will be based.
      zone (str): GCP zone in which new resources should be created.
      boot_disk_size (float): size of the analysis VM boot disk (in GB).
      cpu_cores (int): number of CPU cores to create the VM with.
      remote_instance_name (Optional[str]): name of the instance in
          the remote project containing the disks to be copied.
      disk_names (Optional[str]): Comma separated disk names to copy.
      all_disks (Optional[bool]): True if all disks attached to the source
          instance should be copied.
      image_project (Optional[str]): name of the project where the analysis
          VM image is hosted.
      image_family (Optional[str]): name of the image to use to create the
          analysis VM.
    """
        disk_names = disk_names.split(',') if disk_names else []

        self.analysis_project = libcloudforensics.GoogleCloudProject(
            analysis_project_name, default_zone=zone)
        self.remote_project = libcloudforensics.GoogleCloudProject(
            remote_project_name)

        self.remote_instance_name = remote_instance_name
        self.disk_names = disk_names
        self.incident_id = incident_id
        self.all_disks = all_disks

        analysis_vm_name = 'gcp-forensics-vm-{0:s}'.format(self.incident_id)

        print('Your analysis VM will be: {0:s}'.format(analysis_vm_name))
        print('Complimentary gcloud command:')
        print('gcloud compute ssh --project {0:s} {1:s} --zone {2:s}'.format(
            self.analysis_project.project_id, analysis_vm_name, zone))

        self.state.StoreContainer(
            containers.TicketAttribute(
                name=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_NAME,
                type_=self._ANALYSIS_VM_CONTAINER_ATTRIBUTE_TYPE,
                value=analysis_vm_name))

        try:
            # TODO: Make creating an analysis VM optional
            # pylint: disable=too-many-function-args

            self.analysis_vm, _ = libcloudforensics.start_analysis_vm(
                self.analysis_project.project_id,
                analysis_vm_name,
                zone,
                boot_disk_size,
                int(cpu_cores),
                attach_disk=None,
                image_project=image_project,
                image_family=image_family)

        except AccessTokenRefreshError as exception:
            self.state.AddError(
                'Something is wrong with your gcloud access token.')
            self.state.AddError(exception, critical=True)

        except ApplicationDefaultCredentialsError as exception:
            self.state.AddError(
                'Something is wrong with your Application Default Credentials. '
                'Try running:\n  $ gcloud auth application-default login')
            self.state.AddError(exception, critical=True)
Ejemplo n.º 5
0
    def SetUp(self,
              analysis_project_name,
              remote_project_name,
              incident_id,
              zone,
              boot_disk_size,
              cpu_cores,
              remote_instance_name=None,
              disk_names=None,
              all_disks=False,
              image_project='ubuntu-os-cloud',
              image_family='ubuntu-1804-lts'):
        """Sets up a Google Cloud (GCP) collector.

    This method creates and starts an analysis VM in the analysis project and
    selects disks to copy from the remote project.

    If disk_names is specified, it will copy the corresponding disks from the
    project, ignoring disks belonging to any specific instances.

    If remote_instance_name is specified, two behaviors are possible:
    * If no other parameters are specified, it will select the instance's boot
      disk
    * if all_disks is set to True, it will select all disks in the project
      that are attached to the instance

    disk_names takes precedence over instance_names

    Args:
      analysis_project_name (str): name of the project that contains
          the analysis VM.
      remote_project_name (str): name of the remote project where the disks
          must be copied from.
      incident_id (str): incident identifier on which the name of the analysis
          VM will be based.
      zone (str): GCP zone in which new resources should be created.
      boot_disk_size (float): size of the analysis VM boot disk (in GB).
      cpu_cores (int): number of CPU cores to create the VM with.
      remote_instance_name (Optional[str]): name of the instance in
          the remote project containing the disks to be copied.
      disk_names (Optional[str]): Comma separated disk names to copy.
      all_disks (Optional[bool]): True if all disks attached to the source
          instance should be copied.
      image_project (Optional[str]): name of the project where the analysis
          VM image is hosted.
      image_family (Optional[str]): name of the image to use to create the
          analysis VM.
    """
        disk_names = disk_names.split(',') if disk_names else []

        self.analysis_project = libcloudforensics.GoogleCloudProject(
            analysis_project_name, default_zone=zone)
        remote_project = libcloudforensics.GoogleCloudProject(
            remote_project_name)

        if not (remote_instance_name or disk_names):
            self.state.AddError(
                'You need to specify at least an instance name or disks to copy',
                critical=True)
            return

        self.incident_id = incident_id
        analysis_vm_name = 'gcp-forensics-vm-{0:s}'.format(incident_id)
        print('Your analysis VM will be: {0:s}'.format(analysis_vm_name))
        print('Complimentary gcloud command:')
        print('gcloud compute ssh --project {0:s} {1:s} --zone {2:s}'.format(
            analysis_project_name, analysis_vm_name, zone))

        try:
            # TODO: Make creating an analysis VM optional
            # pylint: disable=too-many-function-args
            self.analysis_vm, _ = libcloudforensics.start_analysis_vm(
                self.analysis_project.project_id,
                analysis_vm_name,
                zone,
                boot_disk_size,
                int(cpu_cores),
                attach_disk=None,
                image_project=image_project,
                image_family=image_family)

            if disk_names:
                for name in disk_names:
                    try:
                        self.disks_to_copy.append(
                            remote_project.get_disk(name))
                    except RuntimeError:
                        self.state.AddError(
                            'Disk "{0:s}" was not found in project {1:s}'.
                            format(name, remote_project_name),
                            critical=True)
                        break

            elif remote_instance_name:
                remote_instance = remote_project.get_instance(
                    remote_instance_name)

                if all_disks:
                    self.disks_to_copy = [
                        remote_project.get_disk(disk_name)
                        for disk_name in remote_instance.list_disks()
                    ]
                else:
                    self.disks_to_copy = [remote_instance.get_boot_disk()]

                if not self.disks_to_copy:
                    self.state.AddError('Could not find any disks to copy',
                                        critical=True)

        except AccessTokenRefreshError as err:
            self.state.AddError(
                'Something is wrong with your gcloud access token.')
            self.state.AddError(err, critical=True)

        except ApplicationDefaultCredentialsError as err:
            self.state.AddError(
                'Something is wrong with your Application Default Credentials. '
                'Try running:\n  $ gcloud auth application-default login')
            self.state.AddError(err, critical=True)

        except HttpError as err:
            if err.resp.status == 403:
                self.state.AddError(
                    'Make sure you have the appropriate permissions on the project'
                )
            if err.resp.status == 404:
                self.state.AddError(
                    'GCP resource not found. Maybe a typo in the project / instance / '
                    'disk name?')
            self.state.AddError(err, critical=True)