def ListInstances(self) -> Dict[str, 'GoogleComputeInstance']:
        """List instances in project.

    Returns:
      Dict[str, GoogleComputeInstance]: Dictionary mapping instance names (str)
          to their respective GoogleComputeInstance object.
    """

        instances = {}
        gce_instance_client = self.GceApi().instances()
        responses = common.ExecuteRequest(gce_instance_client,
                                          'aggregatedList',
                                          {'project': self.project_id})

        for response in responses:
            for zone in response['items']:
                try:
                    for instance in response['items'][zone]['instances']:
                        _, zone = instance['zone'].rsplit('/', 1)
                        name = instance['name']
                        instances[name] = GoogleComputeInstance(
                            self.project_id,
                            zone,
                            name,
                            labels=instance.get('labels'))
                except KeyError:
                    pass

        return instances
Exemple #2
0
 def setup_publisher(self):
     """Set up the pubsub publisher."""
     config.LoadConfig()
     # the publisher we will use the pubsub client in googleapiclient.discovery
     # for more information on using the APIs, see
     # https://cloud.google.com/pubsub/docs/reference/rest
     self.pubsub_api_client = gcp_common.CreateService('pubsub', 'v1')
     self.topic_path = 'projects/{0:s}/topics/{1:s}'.format(
         config.TURBINIA_PROJECT, self.topic_name)
     try:
         log.debug('Trying to create pubsub topic {0:s}'.format(
             self.topic_path))
         topics_client = self.pubsub_api_client.projects().topics()
         # the ExecuteRequest takes API URI, method name as string and parameters
         # as a dict, it executes the API call, handles paging and return response.
         gcp_common.ExecuteRequest(topics_client, 'create',
                                   {'name': self.topic_path})
     except HttpError as exception:
         if exception.resp.status == 409:
             log.debug('PubSub topic {0:s} already exists.'.format(
                 self.topic_path))
         else:
             raise TurbiniaException(
                 'Unknown error occurred when creating Topic:'
                 ' {0!s}'.format(exception), __name__) from exception
     log.debug('Setup PubSub publisher at {0:s}'.format(self.topic_path))
    def ListDisks(self) -> Dict[str, 'GoogleComputeDisk']:
        """List disks in project.

    Returns:
      Dict[str, GoogleComputeDisk]: Dictionary mapping disk names (str) to
          their respective GoogleComputeDisk object.
    """

        disks = {}
        gce_disk_client = self.GceApi().disks()
        responses = common.ExecuteRequest(gce_disk_client, 'aggregatedList',
                                          {'project': self.project_id})

        for response in responses:
            for zone in response['items']:
                try:
                    for disk in response['items'][zone]['disks']:
                        _, zone = disk['zone'].rsplit('/', 1)
                        name = disk['name']
                        disks[name] = GoogleComputeDisk(
                            self.project_id,
                            zone,
                            name,
                            labels=disk.get('labels'))
                except KeyError:
                    pass

        return disks
Exemple #4
0
    def ExecuteQuery(self, qfilter: str) -> List[Dict[str, Any]]:
        """Query logs in GCP project.

    Args:
      qfilter (str): The query filter to use.

    Returns:
      List[Dict]: Log entries returned by the query, e.g. [{'projectIds':
          [...], 'resourceNames': [...]}, {...}]

    Raises:
      RuntimeError: If API call failed.
    """

        body = {
            'resourceNames': 'projects/' + self.project_id,
            'filter': qfilter,
            'orderBy': 'timestamp desc',
        }

        entries = []
        gcl_instance_client = self.GclApi().entries()
        responses = common.ExecuteRequest(gcl_instance_client,
                                          'list', {'body': body},
                                          throttle=True)

        for response in responses:
            for entry in response.get('entries', []):
                entries.append(entry)

        return entries
    def GetBucketSize(self, bucket: str, timeframe: int = 1) -> Dict[str, int]:
        """List the size of a Google Storage Bucket in a project (default: last 1
    day).

    Note: This will list the _maximum size_
          (in bytes) the bucket had in the timeframe.

    Ref: https://cloud.google.com/monitoring/api/metrics_gcp#gcp-storage

    Args:
      bucket (str):  Name of a bucket in GCS.
      timeframe (int): Optional. The number (in days) for
          which to measure activity.
          Default: 1 day.

    Returns:
      Dict[str, int]: Dictionary mapping bucket name to its size (in bytes).
    """

        start_time = common.FormatRFC3339(datetime.datetime.utcnow() -
                                          datetime.timedelta(days=timeframe))
        end_time = common.FormatRFC3339(datetime.datetime.utcnow())
        period = timeframe * 24 * 60 * 60

        assert self.project_id  # Necessary for mypy check
        gcm = gcp_monitoring.GoogleCloudMonitoring(self.project_id)
        gcm_api = gcm.GcmApi()
        gcm_timeseries_client = gcm_api.projects().timeSeries()
        qfilter = ('metric.type="storage.googleapis.com/storage/total_bytes" '
                   'resource.type="gcs_bucket"')
        qfilter += ' resource.label.bucket_name="{0:s}"'.format(bucket)

        responses = common.ExecuteRequest(
            gcm_timeseries_client, 'list', {
                'name': 'projects/{0:s}'.format(self.project_id),
                'filter': qfilter,
                'interval_startTime': start_time,
                'interval_endTime': end_time,
                'aggregation_groupByFields': 'resource.label.bucket_name',
                'aggregation_perSeriesAligner': 'ALIGN_MAX',
                'aggregation_alignmentPeriod': '{0:d}s'.format(period),
                'aggregation_crossSeriesReducer': 'REDUCE_NONE'
            })

        ret = {}
        for response in responses:
            for ts in response.get('timeSeries', []):
                bucket = ts.get('resource', {}).get('labels',
                                                    {}).get('bucket_name', '')
                if bucket:
                    points = ts.get('points', [])
                    for point in points:
                        val = point.get('value', {}).get('doubleValue', 0)
                        if bucket not in ret:
                            ret[bucket] = val
                        elif val > ret[bucket]:
                            ret[bucket] = val
        return ret
Exemple #6
0
    def ListLogs(self) -> List[str]:
        """List logs in project.

    Returns:
      List[str]: The project logs available.

    Raises:
      RuntimeError: If API call failed.
    """

        logs = []
        gcl_instance_client = self.GclApi().logs()
        responses = common.ExecuteRequest(
            gcl_instance_client, 'list',
            {'parent': 'projects/' + self.project_id})

        for response in responses:
            for logtypes in response.get('logNames', []):
                logs.append(logtypes)

        return logs
Exemple #7
0
    def ActiveServices(self, timeframe: int = 30) -> Dict[str, int]:
        """List active services in the project (default: last 30 days).

    Args:
      timeframe (int): Optional. The number (in days) for
          which to measure activity.

    Returns:
      Dict[str, int]: Dictionary mapping service name to number of uses.
    """
        start_time = common.FormatRFC3339(datetime.datetime.utcnow() -
                                          datetime.timedelta(days=timeframe))
        end_time = common.FormatRFC3339(datetime.datetime.utcnow())
        period = timeframe * 24 * 60 * 60
        service = self.GcmApi()
        gcm_timeseries_client = service.projects().timeSeries()
        responses = common.ExecuteRequest(
            gcm_timeseries_client, 'list', {
                'name': 'projects/{0:s}'.format(self.project_id),
                'filter':
                'metric.type="serviceruntime.googleapis.com/api/request_count"',
                'interval_startTime': start_time,
                'interval_endTime': end_time,
                'aggregation_groupByFields': 'resource.labels.service',
                'aggregation_perSeriesAligner': 'ALIGN_SUM',
                'aggregation_alignmentPeriod': '{0:d}s'.format(period),
                'aggregation_crossSeriesReducer': 'REDUCE_SUM',
            })
        ret = {}
        for response in responses:
            for ts in response.get('timeSeries', []):
                service = ts.get('resource', {}).get('labels',
                                                     {}).get('service', '')
                if service:
                    points = ts.get('points', [])
                    if points:
                        val = points[0].get('value', {}).get('int64Value', '')
                        if val:
                            ret[service] = int(val)
        return ret
Exemple #8
0
    def send_message(self, message):
        """Send a pubsub message.

    message: The message to send.
    """
        base64_data = base64.b64encode(message.encode('utf-8'))
        request_body = {
            "messages": [{
                "data": base64_data.decode('utf-8')  # base64 encoded string
            }]
        }
        publish_client = self.pubsub_api_client.projects().topics()
        response = gcp_common.ExecuteRequest(publish_client, 'publish', {
            'topic': self.topic_path,
            'body': request_body
        })
        # Safe to unpack since response is unpaged.
        if not response[0]['messageIds']:
            raise TurbiniaException(
                'Message {0:s} was not published to topic {1:s}'.format(
                    message, self.topic_path))
        msg_id = response[0]['messageIds'][0]
        log.info('Published message {0!s} to topic {1!s}'.format(
            msg_id, self.topic_name))