def setUpModule(): from google.cloud.exceptions import GrpcRendezvous Config.IN_EMULATOR = os.getenv(BIGTABLE_EMULATOR) is not None if Config.IN_EMULATOR: credentials = EmulatorCreds() Config.CLIENT = Client(admin=True, credentials=credentials) else: Config.CLIENT = Client(admin=True) Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID, LOCATION_ID) if not Config.IN_EMULATOR: retry = RetryErrors(GrpcRendezvous, error_predicate=_retry_on_unavailable) instances, failed_locations = retry(Config.CLIENT.list_instances)() if len(failed_locations) != 0: raise ValueError('List instances failed in module set up.') EXISTING_INSTANCES[:] = instances # After listing, create the test instance. created_op = Config.INSTANCE.create() if not _wait_until_complete(created_op): raise RuntimeError('Instance creation exceed 5 seconds.')
def setUpModule(): from grpc._channel import _Rendezvous Config.CLIENT = Client() retry = RetryErrors(_Rendezvous, error_predicate=_retry_on_unavailable) configs = list(retry(Config.CLIENT.list_instance_configs)()) if len(configs) < 1: raise ValueError('List instance configs failed in module set up.') Config.INSTANCE_CONFIG = configs[0] config_name = configs[0].name def _list_instances(): return list(Config.CLIENT.list_instances()) instances = retry(_list_instances)() EXISTING_INSTANCES[:] = instances if CREATE_INSTANCE: Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID, config_name) created_op = Config.INSTANCE.create() created_op.result(30) # block until completion else: Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID) Config.INSTANCE.reload()
def _query_timeseries_with_retries(): def _has_timeseries(result): return len(list(result)) > 0 retry_result = RetryResult(_has_timeseries, max_tries=7)(client.query) return RetryErrors(BadRequest)(retry_result)
def setUpModule(): Config.CLIENT = vision.Client() storage_client = storage.Client() bucket_name = 'new' + unique_resource_id() Config.TEST_BUCKET = storage_client.bucket(bucket_name) # 429 Too Many Requests in case API requests rate-limited. retry_429 = RetryErrors(exceptions.TooManyRequests) retry_429(Config.TEST_BUCKET.create)()
def setUpModule(): Config.CLIENT = speech.Client() Config.USE_GAX = Config.CLIENT._use_gax # Now create a bucket for GCS stored content. storage_client = storage.Client() bucket_name = 'new' + unique_resource_id() Config.TEST_BUCKET = storage_client.bucket(bucket_name) # 429 Too Many Requests in case API requests rate-limited. retry_429 = RetryErrors(exceptions.TooManyRequests) retry_429(Config.TEST_BUCKET.create)()
def tearDown(self): from google.cloud.bigquery.dataset import Dataset from google.cloud.storage import Bucket from google.cloud.exceptions import BadRequest from google.cloud.exceptions import Conflict def _still_in_use(bad_request): return any(error['reason'] == 'resourceInUse' for error in bad_request._errors) retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use) retry_409 = RetryErrors(Conflict) for doomed in self.to_delete: if isinstance(doomed, Bucket): retry_409(doomed.delete)(force=True) elif isinstance(doomed, Dataset): retry_in_use(doomed.delete)() else: doomed.delete()
def test_reload_sink(self): SINK_NAME = 'test-reload-sink%s' % (_RESOURCE_ID, ) retry = RetryErrors(Conflict) uri = self._init_bigquery_dataset() sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) retry(sink.create)() self.to_delete.append(sink) sink.filter_ = 'BOGUS FILTER' sink.destination = 'BOGUS DESTINATION' sink.reload() self.assertEqual(sink.filter_, DEFAULT_FILTER) self.assertEqual(sink.destination, uri)
def test_reload_metric(self): METRIC_NAME = 'test-reload-metric%s' % (_RESOURCE_ID, ) retry = RetryErrors(Conflict) metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) retry(metric.create)() self.to_delete.append(metric) metric.filter_ = 'logName:other' metric.description = 'local changes' metric.reload() self.assertEqual(metric.filter_, DEFAULT_FILTER) self.assertEqual(metric.description, DEFAULT_DESCRIPTION)
def test_reload_sink(self): from gcloud.exceptions import Conflict retry = RetryErrors(Conflict) uri = self._init_bigquery_dataset() sink = Config.CLIENT.sink(DEFAULT_SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) retry(sink.create)() self.to_delete.append(sink) sink.filter_ = 'BOGUS FILTER' sink.destination = 'BOGUS DESTINATION' sink.reload() self.assertEqual(sink.filter_, DEFAULT_FILTER) self.assertEqual(sink.destination, uri)
def test_reload_metric(self): from gcloud.exceptions import Conflict retry = RetryErrors(Conflict) metric = Config.CLIENT.metric(DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) retry(metric.create)() self.to_delete.append(metric) metric.filter_ = 'logName:other' metric.description = 'local changes' metric.reload() self.assertEqual(metric.filter_, DEFAULT_FILTER) self.assertEqual(metric.description, DEFAULT_DESCRIPTION)
def test_update_sink(self): SINK_NAME = 'test-update-sink%s' % (_RESOURCE_ID, ) retry = RetryErrors(Conflict) bucket_uri = self._init_storage_bucket() dataset_uri = self._init_bigquery_dataset() UPDATED_FILTER = 'logName:syslog' sink = Config.CLIENT.sink(SINK_NAME, DEFAULT_FILTER, bucket_uri) self.assertFalse(sink.exists()) retry(sink.create)() self.to_delete.append(sink) sink.filter_ = UPDATED_FILTER sink.destination = dataset_uri sink.update() self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri)
def _list_entries(logger): """Retry-ing list entries in a logger. Retry until there are actual results and retry on any failures. :type logger: :class:`~google.cloud.logging.logger.Logger` :param logger: A Logger containing entries. :rtype: list :returns: List of all entries consumed. """ inner = RetryResult(_has_entries)(_consume_entries) outer = RetryErrors(GaxError, _retry_on_unavailable)(inner) return outer(logger)
def setUpModule(): from grpc._channel import _Rendezvous _helpers.PROJECT = TESTS_PROJECT Config.CLIENT = Client(admin=True) Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID, LOCATION_ID) retry = RetryErrors(_Rendezvous, error_predicate=_retry_on_unavailable) instances, failed_locations = retry(Config.CLIENT.list_instances)() if len(failed_locations) != 0: raise ValueError('List instances failed in module set up.') EXISTING_INSTANCES[:] = instances # After listing, create the test instance. created_op = Config.INSTANCE.create() if not _wait_until_complete(created_op): raise RuntimeError('Instance creation exceed 5 seconds.')
def test_update_metric(self): from gcloud.exceptions import Conflict retry = RetryErrors(Conflict) NEW_FILTER = 'logName:other' NEW_DESCRIPTION = 'updated' metric = Config.CLIENT.metric(DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) retry(metric.create)() self.to_delete.append(metric) metric.filter_ = NEW_FILTER metric.description = NEW_DESCRIPTION metric.update() after_metrics, _ = Config.CLIENT.list_metrics() after_info = dict((metric.name, metric) for metric in after_metrics) after = after_info[DEFAULT_METRIC_NAME] self.assertEqual(after.filter_, NEW_FILTER) self.assertEqual(after.description, NEW_DESCRIPTION)
def test_update_metric(self): METRIC_NAME = 'test-update-metric%s' % (_RESOURCE_ID, ) retry = RetryErrors(Conflict) NEW_FILTER = 'logName:other' NEW_DESCRIPTION = 'updated' metric = Config.CLIENT.metric(METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) self.assertFalse(metric.exists()) retry(metric.create)() self.to_delete.append(metric) metric.filter_ = NEW_FILTER metric.description = NEW_DESCRIPTION metric.update() after_metrics = list(Config.CLIENT.list_metrics()) after_info = {metric.name: metric for metric in after_metrics} after = after_info[METRIC_NAME] self.assertEqual(after.filter_, NEW_FILTER) self.assertEqual(after.description, NEW_DESCRIPTION)
def _list_entries(self, logger): from google.gax.errors import GaxError inner = RetryResult(_has_entries)(logger.list_entries) outer = RetryErrors(GaxError, _retry_on_unavailable)(inner) return outer()
# See the License for the specific language governing permissions and # limitations under the License. import unittest from gcloud import _helpers from gcloud.environment_vars import TESTS_PROJECT from gcloud.exceptions import InternalServerError from gcloud.exceptions import NotFound from gcloud.exceptions import ServiceUnavailable from gcloud import monitoring from retry import RetryErrors from system_test_utils import unique_resource_id retry_404 = RetryErrors(NotFound) retry_404_500 = RetryErrors((NotFound, InternalServerError)) retry_500 = RetryErrors(InternalServerError) retry_503 = RetryErrors(ServiceUnavailable) def setUpModule(): _helpers.PROJECT = TESTS_PROJECT class TestMonitoring(unittest.TestCase): def test_fetch_metric_descriptor(self): METRIC_TYPE = ( 'pubsub.googleapis.com/topic/send_message_operation_count') METRIC_KIND = monitoring.MetricKind.DELTA VALUE_TYPE = monitoring.ValueType.INT64
def tearDown(self): retry = RetryErrors(NotFound) for doomed in self.to_delete: retry(doomed.delete)() logging.getLogger().handlers = self._handlers_cache[:]
def tearDownModule(): retry = RetryErrors(exceptions.Conflict) retry(Config.TEST_BUCKET.delete)(force=True)
from google.cloud.environment_vars import PUBSUB_EMULATOR from google.cloud.pubsub import client from retry import RetryInstanceState from retry import RetryResult from retry import RetryErrors from system_test_utils import EmulatorCreds from system_test_utils import unique_resource_id def _unavailable(exc): return exc_to_code(exc) == StatusCode.UNAVAILABLE retry_unavailable = RetryErrors(GaxError, _unavailable) class Config(object): """Run-time configuration to be modified at set-up. This is a mutable stand-in to allow test set-up to modify global state. """ CLIENT = None IN_EMULATOR = False def setUpModule(): Config.IN_EMULATOR = os.getenv(PUBSUB_EMULATOR) is not None if Config.IN_EMULATOR:
def tearDown(self): from gcloud.exceptions import NotFound retry = RetryErrors(NotFound) for doomed in self.to_delete: retry(doomed.delete)() logging.getLogger().handlers = self._handlers_cache[:]
from google.cloud.exceptions import GrpcRendezvous from google.cloud.pubsub import client # pylint: enable=ungrouped-imports from retry import RetryInstanceState from retry import RetryResult from retry import RetryErrors from system_test_utils import EmulatorCreds from system_test_utils import unique_resource_id def _unavailable(exc): return exc.code() == StatusCode.UNAVAILABLE retry_unavailable = RetryErrors(GrpcRendezvous, _unavailable) class Config(object): """Run-time configuration to be modified at set-up. This is a mutable stand-in to allow test set-up to modify global state. """ CLIENT = None def setUpModule(): if os.getenv(PUBSUB_EMULATOR) is None: Config.CLIENT = client.Client() else:
def _list_entries(self, logger): from grpc._channel import _Rendezvous inner = RetryResult(_has_entries)(logger.list_entries) outer = RetryErrors(_Rendezvous, _retry_on_unavailable)(inner) return outer()
from google.cloud.environment_vars import TESTS_PROJECT from google.cloud.pubsub import client # pylint: enable=ungrouped-imports from retry import RetryInstanceState from retry import RetryResult from retry import RetryErrors from system_test_utils import EmulatorCreds from system_test_utils import unique_resource_id def _unavailable(exc): return _helpers.exc_to_code(exc) == StatusCode.UNAVAILABLE retry_unavailable = RetryErrors((GaxError, _Rendezvous), _unavailable) class Config(object): """Run-time configuration to be modified at set-up. This is a mutable stand-in to allow test set-up to modify global state. """ CLIENT = None def setUpModule(): _helpers.PROJECT = TESTS_PROJECT if os.getenv(PUBSUB_EMULATOR) is None: Config.CLIENT = client.Client()
def _make_dataset_name(prefix): return '%s%s' % (prefix, unique_resource_id()) def _rate_limit_exceeded(forbidden): """Predicate: pass only exceptions with 'rateLimitExceeded' as reason.""" return any(error['reason'] == 'rateLimitExceeded' for error in forbidden._errors) # We need to wait to stay within the rate limits. # The alternative outcome is a 403 Forbidden response from upstream, which # they return instead of the more appropriate 429. # See: https://cloud.google.com/bigquery/quota-policy retry_403 = RetryErrors(Forbidden, error_predicate=_rate_limit_exceeded) class Config(object): """Run-time configuration to be modified at set-up. This is a mutable stand-in to allow test set-up to modify global state. """ CLIENT = None def setUpModule(): Config.CLIENT = bigquery.Client()
import unittest import httplib2 import six from gcloud import _helpers from gcloud.environment_vars import TESTS_PROJECT from gcloud import exceptions from gcloud import storage from gcloud.storage._helpers import _base64_md5hash from system_test_utils import unique_resource_id from retry import RetryErrors from retry import RetryResult retry_429 = RetryErrors(exceptions.TooManyRequests) HTTP = httplib2.Http() _helpers.PROJECT = TESTS_PROJECT class Config(object): """Run-time configuration to be modified at set-up. This is a mutable stand-in to allow test set-up to modify global state. """ CLIENT = None TEST_BUCKET = None def setUpModule():
:type operation: :class:`google.cloud.operation.Operation` :param operation: Operation that has not completed. :type max_attempts: int :param max_attempts: (Optional) The maximum number of times to check if the operation has completed. Defaults to 5. :rtype: bool :returns: Boolean indicating if the operation is complete. """ retry = RetryResult(_operation_complete, max_tries=max_attempts) return retry(operation.poll)() retry_429 = RetryErrors(TooManyRequests) def set_connection(): client = client_mod.Client(admin=True) instance = client.instance(INSTANCE_ID, labels=LABELS) cluster = instance.cluster( CLUSTER_ID, location_id=LOCATION_ID, serve_nodes=SERVER_NODES ) operation = instance.create(clusters=[cluster]) operation.result(10) Config.CONNECTION = Connection(instance=instance) def setUpModule(): set_connection()
from google.cloud.storage._helpers import _base64_md5hash from system_test_utils import unique_resource_id from retry import RetryErrors HTTP = httplib2.Http() def _bad_copy(bad_request): """Predicate: pass only exceptions for a failed copyTo.""" err_msg = bad_request.message return (err_msg.startswith('No file found in request. (POST') and 'copyTo' in err_msg) retry_429 = RetryErrors(exceptions.TooManyRequests) retry_bad_copy = RetryErrors(exceptions.BadRequest, error_predicate=_bad_copy) def _empty_bucket(bucket): """Empty a bucket of all existing blobs. This accounts (partially) for the eventual consistency of the list blobs API call. """ for blob in bucket.list_blobs(): try: blob.delete() except exceptions.NotFound: # eventual consistency pass
def tearDownModule(): # 409 Conflict if the bucket is full. # 429 Too Many Requests in case API requests rate-limited. bucket_retry = RetryErrors( (exceptions.TooManyRequests, exceptions.Conflict)) bucket_retry(Config.TEST_BUCKET.delete)(force=True)
def _list_entries(self, logger): from google.cloud.exceptions import GrpcRendezvous inner = RetryResult(_has_entries)(logger.list_entries) outer = RetryErrors(GrpcRendezvous, _retry_on_unavailable)(inner) return outer()