Esempio n. 1
0
  def _start_upload(self):
    # This starts the uploader thread.  We are forced to run the uploader in
    # another thread because the apitools uploader insists on taking a stream
    # as input. Happily, this also means we get asynchronous I/O to GCS.
    #
    # The uploader by default transfers data in chunks of 1024 * 1024 bytes at
    # a time, buffering writes until that size is reached.

    project_number = self._get_project_number(self._bucket)

    # Create a request count metric
    resource = resource_identifiers.GoogleCloudStorageBucket(self._bucket)
    labels = {
        monitoring_infos.SERVICE_LABEL: 'Storage',
        monitoring_infos.METHOD_LABEL: 'Objects.insert',
        monitoring_infos.RESOURCE_LABEL: resource,
        monitoring_infos.GCS_BUCKET_LABEL: self._bucket,
        monitoring_infos.GCS_PROJECT_ID_LABEL: project_number
    }
    service_call_metric = ServiceCallMetric(
        request_count_urn=monitoring_infos.API_REQUEST_COUNT_URN,
        base_labels=labels)
    try:
      self._client.objects.Insert(self._insert_request, upload=self._upload)
      service_call_metric.call('ok')
    except Exception as e:  # pylint: disable=broad-except
      service_call_metric.call(e)
      _LOGGER.error(
          'Error in _start_upload while inserting file %s: %s',
          self._path,
          traceback.format_exc())
      self._upload_thread.last_error = e
    finally:
      self._child_conn.close()
Esempio n. 2
0
    def test_uploader_monitoring_info(self):
        # Clear the process wide metric container.
        MetricsEnvironment.process_wide_container().reset()

        file_name = 'gs://gcsio-metrics-test/dummy_mode_file'
        file_size = 5 * 1024 * 1024 + 100
        random_file = self._insert_random_file(self.client, file_name,
                                               file_size)
        f = self.gcs.open(file_name, 'w')

        resource = resource_identifiers.GoogleCloudStorageBucket(
            random_file.bucket)
        labels = {
            monitoring_infos.SERVICE_LABEL: 'Storage',
            monitoring_infos.METHOD_LABEL: 'Objects.insert',
            monitoring_infos.RESOURCE_LABEL: resource,
            monitoring_infos.GCS_BUCKET_LABEL: random_file.bucket,
            monitoring_infos.GCS_PROJECT_ID_LABEL: str(DEFAULT_PROJECT_NUMBER),
            monitoring_infos.STATUS_LABEL: 'ok'
        }

        f.close()
        metric_name = MetricName(None,
                                 None,
                                 urn=monitoring_infos.API_REQUEST_COUNT_URN,
                                 labels=labels)
        metric_value = MetricsEnvironment.process_wide_container().get_counter(
            metric_name).get_cumulative()

        self.assertEqual(metric_value, 1)
Esempio n. 3
0
  def __init__(self, client, path, buffer_size, get_project_number):
    self._client = client
    self._path = path
    self._bucket, self._name = parse_gcs_path(path)
    self._buffer_size = buffer_size
    self._get_project_number = get_project_number

    project_number = self._get_project_number(self._bucket)

    # Create a request count metric
    resource = resource_identifiers.GoogleCloudStorageBucket(self._bucket)
    labels = {
        monitoring_infos.SERVICE_LABEL: 'Storage',
        monitoring_infos.METHOD_LABEL: 'Objects.get',
        monitoring_infos.RESOURCE_LABEL: resource,
        monitoring_infos.GCS_BUCKET_LABEL: self._bucket,
        monitoring_infos.GCS_PROJECT_ID_LABEL: project_number
    }
    service_call_metric = ServiceCallMetric(
        request_count_urn=monitoring_infos.API_REQUEST_COUNT_URN,
        base_labels=labels)

    # Get object state.
    self._get_request = (
        storage.StorageObjectsGetRequest(
            bucket=self._bucket, object=self._name))
    try:
      metadata = self._get_object_metadata(self._get_request)
      service_call_metric.call('ok')
    except HttpError as http_error:
      service_call_metric.call(http_error)
      if http_error.status_code == 404:
        raise IOError(errno.ENOENT, 'Not found: %s' % self._path)
      else:
        _LOGGER.error(
            'HTTP error while requesting file %s: %s', self._path, http_error)
        raise
    self._size = metadata.size

    # Ensure read is from file of the correct generation.
    self._get_request.generation = metadata.generation

    # Initialize read buffer state.
    self._download_stream = io.BytesIO()
    self._downloader = transfer.Download(
        self._download_stream,
        auto_transfer=False,
        chunksize=self._buffer_size,
        num_retries=20)

    try:
      self._client.objects.Get(self._get_request, download=self._downloader)
      service_call_metric.call('ok')
    except HttpError as e:
      service_call_metric.call(e)
Esempio n. 4
0
    def test_downloader_fail_to_get_project_number(self, mock_get):
        # Raising an error when listing GCS Bucket so that project number fails to
        # be retrieved.
        mock_get.side_effect = HttpError({'status': 403}, None, None)
        # Clear the process wide metric container.
        MetricsEnvironment.process_wide_container().reset()

        file_name = 'gs://gcsio-metrics-test/dummy_mode_file'
        file_size = 5 * 1024 * 1024 + 100
        random_file = self._insert_random_file(self.client, file_name,
                                               file_size)
        self.gcs.open(file_name, 'r')

        resource = resource_identifiers.GoogleCloudStorageBucket(
            random_file.bucket)
        labels = {
            monitoring_infos.SERVICE_LABEL: 'Storage',
            monitoring_infos.METHOD_LABEL: 'Objects.get',
            monitoring_infos.RESOURCE_LABEL: resource,
            monitoring_infos.GCS_BUCKET_LABEL: random_file.bucket,
            monitoring_infos.GCS_PROJECT_ID_LABEL: str(DEFAULT_PROJECT_NUMBER),
            monitoring_infos.STATUS_LABEL: 'ok'
        }

        metric_name = MetricName(None,
                                 None,
                                 urn=monitoring_infos.API_REQUEST_COUNT_URN,
                                 labels=labels)
        metric_value = MetricsEnvironment.process_wide_container().get_counter(
            metric_name).get_cumulative()

        self.assertEqual(metric_value, 0)

        labels_without_project_id = {
            monitoring_infos.SERVICE_LABEL: 'Storage',
            monitoring_infos.METHOD_LABEL: 'Objects.get',
            monitoring_infos.RESOURCE_LABEL: resource,
            monitoring_infos.GCS_BUCKET_LABEL: random_file.bucket,
            monitoring_infos.STATUS_LABEL: 'ok'
        }
        metric_name = MetricName(None,
                                 None,
                                 urn=monitoring_infos.API_REQUEST_COUNT_URN,
                                 labels=labels_without_project_id)
        metric_value = MetricsEnvironment.process_wide_container().get_counter(
            metric_name).get_cumulative()

        self.assertEqual(metric_value, 2)