def test_service_account_via_json_key(self): from oauth2client import service_account from gcloud._testing import _Monkey from gcloud import credentials as MUT scopes = [] PRIVATE_TEXT = 'dummy_private_key_pkcs8_text' def _get_private_key(private_key_pkcs8_text): return private_key_pkcs8_text with _Monkey(service_account, _get_private_key=_get_private_key): credentials = service_account._ServiceAccountCredentials( 'dummy_service_account_id', 'dummy_service_account_email', 'dummy_private_key_id', PRIVATE_TEXT, scopes) load_result = object() openssl_crypto = _OpenSSLCrypto(load_result, None) with _Monkey(MUT, crypto=openssl_crypto): result = self._callFUT(credentials) self.assertEqual(result, load_result) self.assertEqual(openssl_crypto._loaded, [(openssl_crypto.FILETYPE_PEM, PRIVATE_TEXT)]) self.assertEqual(openssl_crypto._signed, [])
def test_ctor_w_implicit_inputs(self): from gcloud._testing import _Monkey from gcloud.datastore import client as _MUT from gcloud import client as _base_client OTHER = "other" creds = object() default_called = [] def fallback_mock(project): default_called.append(project) return project or OTHER klass = self._getTargetClass() with _Monkey(_MUT, _determine_default_project=fallback_mock): with _Monkey(_base_client, get_credentials=lambda: creds): client = klass() self.assertEqual(client.project, OTHER) self.assertEqual(client.namespace, None) self.assertTrue(isinstance(client.connection, _MockConnection)) self.assertTrue(client.connection.credentials is creds) self.assertTrue(client.connection.http is None) self.assertTrue(client.current_batch is None) self.assertTrue(client.current_transaction is None) self.assertEqual(default_called, [None])
def test_set_implicit_with_implicit_project(self): from gcloud._testing import _Monkey from gcloud import storage from gcloud.storage import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) fake_cnxn = object() _called_args = [] _called_kwargs = [] def mock_get_connection(*args, **kwargs): _called_args.append(args) _called_kwargs.append(kwargs) return fake_cnxn PROJECT = 'project' with _Monkey(_implicit_environ, PROJECT=PROJECT): with _Monkey(storage, get_connection=mock_get_connection): self._callFUT() self.assertEqual(_implicit_environ.CONNECTION, fake_cnxn) self.assertEqual(_called_args, [(PROJECT,)]) self.assertEqual(_called_kwargs, [{}])
def test_constructor_ignores_autoconnect(self): from gcloud._testing import _Monkey from gcloud.bigtable.happybase.connection import Connection from gcloud.bigtable.happybase import pool as MUT class ConnectionWithOpen(Connection): _open_called = False def open(self): self._open_called = True # First make sure the custom Connection class does as expected. cluster_copy1 = _Cluster() cluster_copy2 = _Cluster() cluster_copy3 = _Cluster() cluster = _Cluster( copies=[cluster_copy1, cluster_copy2, cluster_copy3]) connection = ConnectionWithOpen(autoconnect=False, cluster=cluster) self.assertFalse(connection._open_called) self.assertTrue(connection._cluster is cluster_copy1) connection = ConnectionWithOpen(autoconnect=True, cluster=cluster) self.assertTrue(connection._open_called) self.assertTrue(connection._cluster is cluster_copy2) # Then make sure autoconnect=True is ignored in a pool. size = 1 with _Monkey(MUT, Connection=ConnectionWithOpen): pool = self._makeOne(size, autoconnect=True, cluster=cluster) for connection in pool._queue.queue: self.assertTrue(isinstance(connection, ConnectionWithOpen)) self.assertTrue(connection._cluster is cluster_copy3) self.assertFalse(connection._open_called)
def test_it(self): import base64 from gcloud._testing import _Monkey from gcloud import credentials as MUT _called_get_sig = [] SIG_BYTES = b'DEADBEEF' def mock_get_sig_bytes(creds, string_to_sign): _called_get_sig.append((creds, string_to_sign)) return SIG_BYTES _called_get_name = [] ACCOUNT_NAME = object() def mock_get_name(creds): _called_get_name.append((creds,)) return ACCOUNT_NAME CREDENTIALS = object() EXPIRATION = 100 STRING_TO_SIGN = 'dummy_signature' with _Monkey(MUT, _get_signature_bytes=mock_get_sig_bytes, _get_service_account_name=mock_get_name): result = self._callFUT(CREDENTIALS, EXPIRATION, STRING_TO_SIGN) self.assertEqual(result, { 'GoogleAccessId': ACCOUNT_NAME, 'Expires': str(EXPIRATION), 'Signature': base64.b64encode(b'DEADBEEF'), }) self.assertEqual(_called_get_sig, [(CREDENTIALS, STRING_TO_SIGN)]) self.assertEqual(_called_get_name, [(CREDENTIALS,)])
def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): import base64 import datetime from gcloud.pubsub import topic as MUT from gcloud._helpers import _RFC3339_MICROS from gcloud._testing import _Monkey NOW = datetime.datetime.utcnow() def _utcnow(): return NOW PAYLOAD = b'This is the message text' B64 = base64.b64encode(PAYLOAD).decode('ascii') MSGID = 'DEADBEEF' MESSAGE = { 'data': B64, 'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)}, } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] topic = self._makeOne(self.TOPIC_NAME, client=client1, timestamp_messages=True) with _Monkey(MUT, _NOW=_utcnow): msgid = topic.publish(PAYLOAD, client=client2) self.assertEqual(msgid, MSGID) self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE]))
def _run_with_fake_crypto(self, credentials, private_key_text, string_to_sign): import base64 import six from gcloud._testing import _Monkey from gcloud import credentials as MUT crypt = _Crypt() pkcs_v1_5 = _PKCS1_v1_5() rsa = _RSA() sha256 = _SHA256() with _Monkey(MUT, crypt=crypt, RSA=rsa, PKCS1_v1_5=pkcs_v1_5, SHA256=sha256): result = self._callFUT(credentials, string_to_sign) if crypt._pkcs12_key_as_pem_called: self.assertEqual(crypt._private_key_text, base64.b64encode(private_key_text)) self.assertEqual(crypt._private_key_password, 'notasecret') # sha256._string_to_sign is always bytes. if isinstance(string_to_sign, six.binary_type): self.assertEqual(sha256._string_to_sign, string_to_sign) else: self.assertEqual(sha256._string_to_sign, string_to_sign.encode('utf-8')) self.assertEqual(result, b'DEADBEEF')
def test__make_table_stub(self): from gcloud._testing import _Monkey from gcloud.bigtable import client as MUT from gcloud.bigtable.client import TABLE_ADMIN_HOST from gcloud.bigtable.client import TABLE_ADMIN_PORT from gcloud.bigtable.client import TABLE_STUB_FACTORY credentials = _Credentials() project = 'PROJECT' client = self._makeOne(project=project, credentials=credentials) fake_stub = object() make_stub_args = [] def mock_make_stub(*args): make_stub_args.append(args) return fake_stub with _Monkey(MUT, make_stub=mock_make_stub): result = client._make_table_stub() self.assertTrue(result is fake_stub) self.assertEqual(make_stub_args, [ ( client, TABLE_STUB_FACTORY, TABLE_ADMIN_HOST, TABLE_ADMIN_PORT, ), ])
def test_constructor_infers_cluster(self): from gcloud._testing import _Monkey from gcloud.bigtable.happybase.connection import Connection from gcloud.bigtable.happybase import pool as MUT size = 1 cluster_copy = _Cluster() all_copies = [cluster_copy] * size cluster = _Cluster(copies=all_copies) get_cluster_calls = [] def mock_get_cluster(timeout=None): get_cluster_calls.append(timeout) return cluster with _Monkey(MUT, _get_cluster=mock_get_cluster): pool = self._makeOne(size) for connection in pool._queue.queue: self.assertTrue(isinstance(connection, Connection)) # We know that the Connection() constructor will # call cluster.copy(). self.assertTrue(connection._cluster is cluster_copy) self.assertEqual(get_cluster_calls, [None])
def test_delete_bad_wal(self): from gcloud._testing import _Monkey from gcloud.bigtable.happybase import batch as MUT warned = [] def mock_warn(message): warned.append(message) # Raise an exception so we don't have to mock the entire # environment needed for delete(). raise RuntimeError('No need to execute the rest.') table = object() batch = self._makeOne(table) row = 'row-key' columns = [] wal = None self.assertNotEqual(wal, MUT._WAL_SENTINEL) with _Monkey(MUT, _WARN=mock_warn): with self.assertRaises(RuntimeError): batch.delete(row, columns=columns, wal=wal) self.assertEqual(warned, [MUT._WAL_WARNING])
def _start_method_helper(self, admin): from gcloud._testing import _Monkey from gcloud.bigtable import client as MUT credentials = _Credentials() project = 'PROJECT' client = self._makeOne(project=project, credentials=credentials, admin=admin) stub = _FakeStub() make_stub_args = [] def mock_make_stub(*args): make_stub_args.append(args) return stub with _Monkey(MUT, make_stub=mock_make_stub): client.start() self.assertTrue(client._data_stub_internal is stub) if admin: self.assertTrue(client._cluster_stub_internal is stub) self.assertTrue(client._operations_stub_internal is stub) self.assertTrue(client._table_stub_internal is stub) self.assertEqual(stub._entered, 4) self.assertEqual(len(make_stub_args), 4) else: self.assertTrue(client._cluster_stub_internal is None) self.assertTrue(client._operations_stub_internal is None) self.assertTrue(client._table_stub_internal is None) self.assertEqual(stub._entered, 1) self.assertEqual(len(make_stub_args), 1) self.assertEqual(stub._exited, [])
def _determine_default_helper(self, prod=None, gcd=None, gae=None, gce=None, dataset_id=None): from gcloud._testing import _Monkey from gcloud.datastore import client _callers = [] def prod_mock(): _callers.append('prod_mock') return prod def gcd_mock(): _callers.append('gcd_mock') return gcd def gae_mock(): _callers.append('gae_mock') return gae def gce_mock(): _callers.append('gce_mock') return gce patched_methods = { '_get_production_dataset_id': prod_mock, '_get_gcd_dataset_id': gcd_mock, '_app_engine_id': gae_mock, '_compute_engine_id': gce_mock, } with _Monkey(client, **patched_methods): returned_dataset_id = self._callFUT(dataset_id) return returned_dataset_id, _callers
def test_publish_single_bytes_wo_attrs_w_add_timestamp(self): import base64 import datetime from gcloud.pubsub import topic as MUT from gcloud._helpers import _RFC3339_MICROS from gcloud._testing import _Monkey NOW = datetime.datetime.utcnow() def _utcnow(): return NOW TOPIC_NAME = 'topic_name' PROJECT = 'PROJECT' PAYLOAD = b'This is the message text' B64 = base64.b64encode(PAYLOAD).decode('ascii') MSGID = 'DEADBEEF' MESSAGE = {'data': B64, 'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)}} PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) conn = _Connection({'messageIds': [MSGID]}) topic = self._makeOne(TOPIC_NAME, project=PROJECT, connection=conn, timestamp_messages=True) with _Monkey(MUT, _NOW=_utcnow): msgid = topic.publish(PAYLOAD) self.assertEqual(msgid, MSGID) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] self.assertEqual(req['method'], 'POST') self.assertEqual(req['path'], '/%s:publish' % PATH) self.assertEqual(req['data'], {'messages': [MESSAGE]})
def test_create(self): from gcloud._testing import _Monkey from gcloud.bigtable._generated import ( bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._generated import operations_pb2 from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import cluster as MUT project = 'PROJECT' zone = 'zone' cluster_id = 'cluster-id' timeout_seconds = 578 client = _Client(project, timeout_seconds=timeout_seconds) cluster = self._makeOne(zone, cluster_id, client) # Create request_pb. Just a mock since we monkey patch # _prepare_create_request request_pb = object() # Create response_pb op_id = 5678 op_begin = object() op_name = ('operations/projects/%s/zones/%s/clusters/%s/' 'operations/%d' % (project, zone, cluster_id, op_id)) current_op = operations_pb2.Operation(name=op_name) response_pb = data_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client._cluster_stub = stub = _FakeStub(response_pb) # Create expected_result. expected_result = MUT.Operation('create', op_id, op_begin) # Create the mocks. prep_create_called = [] def mock_prep_create_req(cluster): prep_create_called.append(cluster) return request_pb process_operation_called = [] def mock_process_operation(operation_pb): process_operation_called.append(operation_pb) return op_id, op_begin # Perform the method and check the result. with _Monkey(MUT, _prepare_create_request=mock_prep_create_req, _process_operation=mock_process_operation): result = cluster.create() self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'CreateCluster', (request_pb, timeout_seconds), {}, )]) self.assertEqual(prep_create_called, [cluster]) self.assertEqual(process_operation_called, [current_op])
def test_generate_signed_url_w_method_arg(self): from gcloud._testing import _Monkey from gcloud.storage import blob as MUT BLOB_NAME = 'blob-name' EXPIRATION = '2014-10-16T20:34:37.000Z' connection = _Connection() client = _Client(connection) bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' '&Expiration=2014-10-16T20:34:37.000Z') SIGNER = _Signer() with _Monkey(MUT, generate_signed_url=SIGNER): signed_uri = blob.generate_signed_url(EXPIRATION, method='POST') self.assertEqual(signed_uri, URI) PATH = '/name/%s' % (BLOB_NAME,) EXPECTED_ARGS = (_Connection.credentials,) EXPECTED_KWARGS = { 'api_access_endpoint': 'https://storage.googleapis.com', 'expiration': EXPIRATION, 'method': 'POST', 'resource': PATH, } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)])
def test_subscriber_api_w_gax(self): from gcloud.pubsub import client as MUT from gcloud._testing import _Monkey wrapped = object() _called_with = [] def _generated_api(*args, **kw): _called_with.append((args, kw)) return wrapped class _GaxSubscriberAPI(object): def __init__(self, _wrapped): self._wrapped = _wrapped creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) with _Monkey(MUT, _USE_GAX=True, GeneratedSubscriberAPI=_generated_api, GAXSubscriberAPI=_GaxSubscriberAPI): api = client.subscriber_api self.assertIsInstance(api, _GaxSubscriberAPI) self.assertTrue(api._wrapped is wrapped) # API instance is cached again = client.subscriber_api self.assertTrue(again is api)
def test_no_value(self): from gcloud._testing import _Monkey from gcloud import _helpers with _Monkey(_helpers, app_identity=None): dataset_id = self._callFUT() self.assertEqual(dataset_id, None)
def test_get_multi_max_loops(self): from gcloud._testing import _Monkey from gcloud.datastore import client as _MUT from gcloud.datastore.key import Key KIND = "Kind" ID = 1234 # Make a found entity pb to be returned from mock backend. entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, "foo", "Foo") # Make a connection to return the entity pb. creds = object() client = self._makeOne(credentials=creds) client.connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) deferred = [] missing = [] with _Monkey(_MUT, _MAX_LOOPS=-1): result = client.get_multi([key], missing=missing, deferred=deferred) # Make sure we have no results, even though the connection has been # set up as in `test_hit` to return a single result. self.assertEqual(result, []) self.assertEqual(missing, []) self.assertEqual(deferred, [])
def test_it(self): from gcloud._testing import _Monkey from gcloud.bigtable import _helpers as MUT mock_result = object() stub_inputs = [] def mock_stub_factory(host, port, metadata_transformer=None, secure=None, root_certificates=None): stub_inputs.append((host, port, metadata_transformer, secure, root_certificates)) return mock_result transformed = object() clients = [] def mock_transformer(client): clients.append(client) return transformed host = 'HOST' port = 1025 certs = 'FOOBAR' client = object() with _Monkey(MUT, get_certs=lambda: certs, MetadataTransformer=mock_transformer): result = self._callFUT(client, mock_stub_factory, host, port) self.assertTrue(result is mock_result) self.assertEqual(stub_inputs, [(host, port, transformed, True, certs)]) self.assertEqual(clients, [client])
def test_generate_signed_url_w_slash_in_name(self): from gcloud._testing import _Monkey from gcloud.storage import blob as MUT BLOB_NAME = 'parent/child' EXPIRATION = '2014-10-16T20:34:37.000Z' connection = _Connection() bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' '&Expiration=2014-10-16T20:34:37.000Z') SIGNER = _Signer() with _Monkey(MUT, generate_signed_url=SIGNER): signed_url = blob.generate_signed_url(EXPIRATION, connection=connection) self.assertEqual(signed_url, URI) EXPECTED_ARGS = (_Connection.credentials,) EXPECTED_KWARGS = { 'api_access_endpoint': 'https://storage.googleapis.com', 'expiration': EXPIRATION, 'method': 'GET', 'resource': '/name/parent%2Fchild', } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)])
def test_upload_from_file_resumable(self): from six.moves.http_client import OK from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit from tempfile import NamedTemporaryFile from gcloud._testing import _Monkey from apitools.base.py import http_wrapper from apitools.base.py import transfer BLOB_NAME = 'blob-name' UPLOAD_URL = 'http://example.com/upload/name/key' DATA = b'ABCDEF' loc_response = {'status': OK, 'location': UPLOAD_URL} chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE, 'range': 'bytes 0-4'} chunk2_response = {'status': OK} # Need valid JSON on last response, since resumable. connection = _Connection( (loc_response, b''), (chunk1_response, b''), (chunk2_response, b'{}'), ) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 # Set the threshhold low enough that we force a resumable uploada. with _Monkey(transfer, _RESUMABLE_UPLOAD_THRESHOLD=5): with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() blob.upload_from_file(fh, rewind=True, connection=connection) rq = connection.http._requested self.assertEqual(len(rq), 3) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') self.assertEqual(dict(parse_qsl(qs)), {'uploadType': 'resumable', 'name': BLOB_NAME}) headers = dict( [(x.title(), str(y)) for x, y in rq[0]['headers'].items()]) self.assertEqual(headers['X-Upload-Content-Length'], '6') self.assertEqual(headers['X-Upload-Content-Type'], 'application/octet-stream') self.assertEqual(rq[1]['method'], 'PUT') self.assertEqual(rq[1]['uri'], UPLOAD_URL) headers = dict( [(x.title(), str(y)) for x, y in rq[1]['headers'].items()]) self.assertEqual(rq[1]['body'], DATA[:5]) headers = dict( [(x.title(), str(y)) for x, y in rq[1]['headers'].items()]) self.assertEqual(headers['Content-Range'], 'bytes 0-4/6') self.assertEqual(rq[2]['method'], 'PUT') self.assertEqual(rq[2]['uri'], UPLOAD_URL) self.assertEqual(rq[2]['body'], DATA[5:]) headers = dict( [(x.title(), str(y)) for x, y in rq[2]['headers'].items()]) self.assertEqual(headers['Content-Range'], 'bytes 5-5/6')
def test_it_with_stubs(self): from gcloud._testing import _Monkey from gcloud.storage import _helpers as MUT class _Buffer(object): def __init__(self, return_vals): self.return_vals = return_vals self._block_sizes = [] def read(self, block_size): self._block_sizes.append(block_size) return self.return_vals.pop() BASE64 = _Base64() DIGEST_VAL = object() BYTES_TO_SIGN = b'BYTES_TO_SIGN' BUFFER = _Buffer([b'', BYTES_TO_SIGN]) MD5 = _MD5(DIGEST_VAL) with _Monkey(MUT, base64=BASE64, MD5=MD5): SIGNED_CONTENT = self._callFUT(BUFFER) self.assertEqual(BUFFER._block_sizes, [8192, 8192]) self.assertTrue(SIGNED_CONTENT is DIGEST_VAL) self.assertEqual(BASE64._called_b64encode, [DIGEST_VAL]) self.assertEqual(MD5._new_called, [None]) self.assertEqual(MD5.hash_obj.num_digest_calls, 1) self.assertEqual(MD5.hash_obj._blocks, [BYTES_TO_SIGN])
def test_ctor_defaults(self): from gcloud._testing import _Monkey from gcloud import client PROJECT = 'PROJECT' CREDENTIALS = object() FUNC_CALLS = [] def mock_determine_proj(project): FUNC_CALLS.append((project, '_determine_default_project')) return PROJECT def mock_get_credentials(): FUNC_CALLS.append('get_credentials') return CREDENTIALS with _Monkey(client, get_credentials=mock_get_credentials, _determine_default_project=mock_determine_proj): client_obj = self._makeOne() self.assertEqual(client_obj.project, PROJECT) self.assertTrue(isinstance(client_obj.connection, _MockConnection)) self.assertTrue(client_obj.connection.credentials is CREDENTIALS) self.assertEqual( FUNC_CALLS, [(None, '_determine_default_project'), 'get_credentials'])
def _determine_default_helper(self, prod=None, gae=None, gce=None, project=None): from gcloud._testing import _Monkey from gcloud import _helpers _callers = [] def prod_mock(): _callers.append('prod_mock') return prod def gae_mock(): _callers.append('gae_mock') return gae def gce_mock(): _callers.append('gce_mock') return gce patched_methods = { '_get_production_project': prod_mock, '_app_engine_id': gae_mock, '_compute_engine_id': gce_mock, } with _Monkey(_helpers, **patched_methods): returned_project = self._callFUT(project) return returned_project, _callers
def test__make_operations_stub(self): from gcloud._testing import _Monkey from gcloud.bigtable import client as MUT from gcloud.bigtable.client import OPERATIONS_API_HOST_V2 from gcloud.bigtable.client import OPERATIONS_API_PORT_V2 from gcloud.bigtable.client import OPERATIONS_STUB_FACTORY_V2 credentials = _Credentials() project = 'PROJECT' client = self._makeOne(project=project, credentials=credentials) fake_stub = object() make_stub_args = [] def mock_make_stub(*args): make_stub_args.append(args) return fake_stub with _Monkey(MUT, _make_stub=mock_make_stub): result = client._make_operations_stub() self.assertTrue(result is fake_stub) self.assertEqual(make_stub_args, [ ( client, OPERATIONS_STUB_FACTORY_V2, OPERATIONS_API_HOST_V2, OPERATIONS_API_PORT_V2, ), ])
def test_get_multi_max_loops(self): from gcloud._testing import _Monkey from gcloud.datastore import client as _MUT from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection KIND = 'Kind' ID = 1234 # Make a found entity pb to be returned from mock backend. entity_pb = _make_entity_pb(self.DATASET_ID, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. connection = _Connection(entity_pb) client = self._makeOne(connection=connection) key = Key(KIND, ID, dataset_id=self.DATASET_ID) deferred = [] missing = [] with _Monkey(_MUT, _MAX_LOOPS=-1): result = client.get_multi([key], missing=missing, deferred=deferred) # Make sure we have no results, even though the connection has been # set up as in `test_hit` to return a single result. self.assertEqual(result, []) self.assertEqual(missing, []) self.assertEqual(deferred, [])
def test_families(self): from gcloud._testing import _Monkey from gcloud.bigtable.happybase import table as MUT name = 'table-name' connection = None table = self._makeOne(name, connection) table._low_level_table = _MockLowLevelTable() # Mock the column families to be returned. col_fam_name = 'fam' gc_rule = object() col_fam = _MockLowLevelColumnFamily(col_fam_name, gc_rule=gc_rule) col_fams = {col_fam_name: col_fam} table._low_level_table.column_families = col_fams to_dict_result = object() to_dict_calls = [] def mock_gc_rule_to_dict(gc_rule): to_dict_calls.append(gc_rule) return to_dict_result with _Monkey(MUT, _gc_rule_to_dict=mock_gc_rule_to_dict): result = table.families() self.assertEqual(result, {col_fam_name: to_dict_result}) self.assertEqual(table._low_level_table.list_column_families_calls, 1) self.assertEqual(to_dict_calls, [gc_rule])
def test_it(self): from gcloud._testing import _Monkey from gcloud import storage BUCKET = object() PROJECT = object() CONNECTION = object() SET_BUCKET_CALLED = [] def call_set_bucket(bucket=None): SET_BUCKET_CALLED.append(bucket) SET_PROJECT_CALLED = [] def call_set_project(project=None): SET_PROJECT_CALLED.append(project) SET_CONNECTION_CALLED = [] def call_set_connection(connection=None): SET_CONNECTION_CALLED.append(connection) with _Monkey(storage, set_default_bucket=call_set_bucket, set_default_connection=call_set_connection, set_default_project=call_set_project): self._callFUT(bucket=BUCKET, project=PROJECT, connection=CONNECTION) self.assertEqual(SET_PROJECT_CALLED, [PROJECT]) self.assertEqual(SET_CONNECTION_CALLED, [CONNECTION]) self.assertEqual(SET_BUCKET_CALLED, [BUCKET])
def test_batch(self): from gcloud._testing import _Monkey from gcloud.bigtable.happybase import table as MUT name = 'table-name' connection = None table = self._makeOne(name, connection) timestamp = object() batch_size = 42 transaction = False # Must be False when batch_size is non-null wal = object() with _Monkey(MUT, Batch=_MockBatch): result = table.batch(timestamp=timestamp, batch_size=batch_size, transaction=transaction, wal=wal) self.assertTrue(isinstance(result, _MockBatch)) self.assertEqual(result.args, (table,)) expected_kwargs = { 'timestamp': timestamp, 'batch_size': batch_size, 'transaction': transaction, 'wal': wal, } self.assertEqual(result.kwargs, expected_kwargs)
def _run_test_with_credentials(self, credentials, account_name): import base64 from gcloud._testing import _Monkey from gcloud import credentials as MUT crypt = _Crypt() pkcs_v1_5 = _PKCS1_v1_5() rsa = _RSA() sha256 = _SHA256() EXPIRATION = '100' SIGNATURE_STRING = b'dummy_signature' with _Monkey(MUT, crypt=crypt, RSA=rsa, PKCS1_v1_5=pkcs_v1_5, SHA256=sha256): result = self._callFUT(credentials, EXPIRATION, SIGNATURE_STRING) if crypt._pkcs12_key_as_pem_called: self.assertEqual(crypt._private_key_text, base64.b64encode(b'dummy_private_key_text')) self.assertEqual(crypt._private_key_password, 'notasecret') self.assertEqual(sha256._signature_string, SIGNATURE_STRING) SIGNED = base64.b64encode(b'DEADBEEF') expected_query = { 'Expires': EXPIRATION, 'GoogleAccessId': account_name, 'Signature': SIGNED, } self.assertEqual(result, expected_query)
def _monkey(self, connection): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey return _Monkey(_implicit_environ, CONNECTION=connection)
def test_upload_from_file_resumable(self): from six.moves.http_client import OK from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit from tempfile import NamedTemporaryFile from gcloud._testing import _Monkey from _gcloud_vendor.apitools.base.py import http_wrapper from _gcloud_vendor.apitools.base.py import transfer BLOB_NAME = 'blob-name' UPLOAD_URL = 'http://example.com/upload/name/key' DATA = b'ABCDEF' loc_response = {'status': OK, 'location': UPLOAD_URL} chunk1_response = { 'status': http_wrapper.RESUME_INCOMPLETE, 'range': 'bytes 0-4' } chunk2_response = {'status': OK} connection = _Connection( (loc_response, b''), (chunk1_response, b''), (chunk2_response, b''), ) bucket = _Bucket(connection) blob = self._makeOne(BLOB_NAME, bucket=bucket) blob.CHUNK_SIZE = 5 # Set the threshhold low enough that we force a resumable uploada. with _Monkey(transfer, _RESUMABLE_UPLOAD_THRESHOLD=5): with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() blob.upload_from_file(fh, rewind=True) rq = connection.http._requested self.assertEqual(len(rq), 3) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') self.assertEqual(dict(parse_qsl(qs)), { 'uploadType': 'resumable', 'name': BLOB_NAME }) headers = dict([(x.title(), str(y)) for x, y in rq[0]['headers'].items()]) self.assertEqual(headers['X-Upload-Content-Length'], '6') self.assertEqual(headers['X-Upload-Content-Type'], 'application/octet-stream') self.assertEqual(rq[1]['method'], 'PUT') self.assertEqual(rq[1]['uri'], UPLOAD_URL) headers = dict([(x.title(), str(y)) for x, y in rq[1]['headers'].items()]) self.assertEqual(rq[1]['body'], DATA[:5]) headers = dict([(x.title(), str(y)) for x, y in rq[1]['headers'].items()]) self.assertEqual(headers['Content-Range'], 'bytes 0-4/6') self.assertEqual(rq[2]['method'], 'PUT') self.assertEqual(rq[2]['uri'], UPLOAD_URL) self.assertEqual(rq[2]['body'], DATA[5:]) headers = dict([(x.title(), str(y)) for x, y in rq[2]['headers'].items()]) self.assertEqual(headers['Content-Range'], 'bytes 5-5/6')
def _monkey_defaults(*args, **kwargs): mock_defaults = _DefaultsContainer(*args, **kwargs) return _Monkey(_implicit_environ, _DEFAULTS=mock_defaults)
def test_update(self): from google.longrunning import operations_pb2 from gcloud._testing import _Monkey from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import cluster as MUT project = 'PROJECT' zone = 'zone' cluster_id = 'cluster-id' serve_nodes = 81 display_name = 'display_name' timeout_seconds = 9 client = _Client(project, timeout_seconds=timeout_seconds) cluster = self._makeOne(zone, cluster_id, client, display_name=display_name, serve_nodes=serve_nodes) # Create request_pb cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) request_pb = data_pb2.Cluster( name=cluster_name, display_name=display_name, serve_nodes=serve_nodes, ) # Create response_pb current_op = operations_pb2.Operation() response_pb = data_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client._cluster_stub = stub = _FakeStub(response_pb) # Create expected_result. op_id = 5678 op_begin = object() expected_result = MUT.Operation('update', op_id, op_begin, cluster=cluster) # Create mocks process_operation_called = [] def mock_process_operation(operation_pb): process_operation_called.append(operation_pb) return op_id, op_begin # Perform the method and check the result. with _Monkey(MUT, _process_operation=mock_process_operation): result = cluster.update() self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'UpdateCluster', (request_pb, timeout_seconds), {}, )]) self.assertEqual(process_operation_called, [current_op])
def _monkey(self, implicit_project): import os from gcloud.storage import _PROJECT_ENV_VAR_NAME from gcloud._testing import _Monkey environ = {_PROJECT_ENV_VAR_NAME: implicit_project} return _Monkey(os, getenv=environ.get)
def _monkey(self, dataset_id): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey return _Monkey(_implicit_environ, DATASET_ID=dataset_id)
def _monkeyImplicit(self, connection): from gcloud._testing import _Monkey from gcloud.storage import _implicit_environ return _Monkey(_implicit_environ, CONNECTION=connection)
def test_create(self): from google.longrunning import operations_pb2 from gcloud._testing import _Monkey from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import cluster as MUT project = 'PROJECT' zone = 'zone' cluster_id = 'cluster-id' timeout_seconds = 578 client = _Client(project, timeout_seconds=timeout_seconds) cluster = self._makeOne(zone, cluster_id, client) # Create request_pb. Just a mock since we monkey patch # _prepare_create_request request_pb = object() # Create response_pb op_id = 5678 op_begin = object() op_name = ('operations/projects/%s/zones/%s/clusters/%s/' 'operations/%d' % (project, zone, cluster_id, op_id)) current_op = operations_pb2.Operation(name=op_name) response_pb = data_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client._cluster_stub = stub = _FakeStub(response_pb) # Create expected_result. expected_result = MUT.Operation('create', op_id, op_begin, cluster=cluster) # Create the mocks. prep_create_called = [] def mock_prep_create_req(cluster): prep_create_called.append(cluster) return request_pb process_operation_called = [] def mock_process_operation(operation_pb): process_operation_called.append(operation_pb) return op_id, op_begin # Perform the method and check the result. with _Monkey(MUT, _prepare_create_request=mock_prep_create_req, _process_operation=mock_process_operation): result = cluster.create() self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'CreateCluster', (request_pb, timeout_seconds), {}, )]) self.assertEqual(prep_create_called, [cluster]) self.assertEqual(process_operation_called, [current_op])
def _monkeyDatasetID(self, dataset_id=_DEFAULT_DATASET): from gcloud._testing import _Monkey from gcloud.datastore import _implicit_environ return _Monkey(_implicit_environ, DATASET_ID=dataset_id)
def test_it(self): from gcloud._testing import _Monkey from gcloud.bigtable import client as MUT mock_result = object() stub_inputs = [] SSL_CREDS = object() METADATA_CREDS = object() COMPOSITE_CREDS = object() CHANNEL = object() class _ImplementationsModule(object): def __init__(self): self.ssl_channel_credentials_args = None self.metadata_call_credentials_args = None self.composite_channel_credentials_args = None self.secure_channel_args = None def ssl_channel_credentials(self, *args): self.ssl_channel_credentials_args = args return SSL_CREDS def metadata_call_credentials(self, *args, **kwargs): self.metadata_call_credentials_args = (args, kwargs) return METADATA_CREDS def composite_channel_credentials(self, *args): self.composite_channel_credentials_args = args return COMPOSITE_CREDS def secure_channel(self, *args): self.secure_channel_args = args return CHANNEL implementations_mod = _ImplementationsModule() def mock_stub_factory(channel): stub_inputs.append(channel) return mock_result metadata_plugin = object() clients = [] def mock_plugin(client): clients.append(client) return metadata_plugin host = 'HOST' port = 1025 client = object() with _Monkey(MUT, implementations=implementations_mod, _MetadataPlugin=mock_plugin): result = self._callFUT(client, mock_stub_factory, host, port) self.assertTrue(result is mock_result) self.assertEqual(stub_inputs, [CHANNEL]) self.assertEqual(clients, [client]) self.assertEqual(implementations_mod.ssl_channel_credentials_args, (None, None, None)) self.assertEqual(implementations_mod.metadata_call_credentials_args, ((metadata_plugin,), {'name': 'google_creds'})) self.assertEqual( implementations_mod.composite_channel_credentials_args, (SSL_CREDS, METADATA_CREDS)) self.assertEqual(implementations_mod.secure_channel_args, (host, port, COMPOSITE_CREDS))
def test_w_negative_jitter_lt_max_wait(self): import random from gcloud._testing import _Monkey with _Monkey(random, uniform=lambda lower, upper: lower): self.assertEqual(self._callFUT(1, 60), 1.5)
def test_it(self): from gcloud._testing import _Monkey from gcloud import _helpers as MUT mock_result = object() stub_inputs = [] SSL_CREDS = object() METADATA_CREDS = object() COMPOSITE_CREDS = object() CHANNEL = object() class _GRPCModule(object): def __init__(self): self.ssl_channel_credentials_args = None self.metadata_call_credentials_args = None self.composite_channel_credentials_args = None self.secure_channel_args = None def ssl_channel_credentials(self, *args): self.ssl_channel_credentials_args = args return SSL_CREDS def metadata_call_credentials(self, *args, **kwargs): self.metadata_call_credentials_args = (args, kwargs) return METADATA_CREDS def composite_channel_credentials(self, *args): self.composite_channel_credentials_args = args return COMPOSITE_CREDS def secure_channel(self, *args): self.secure_channel_args = args return CHANNEL grpc_mod = _GRPCModule() def mock_stub_class(channel): stub_inputs.append(channel) return mock_result metadata_plugin = object() plugin_args = [] def mock_plugin(*args): plugin_args.append(args) return metadata_plugin host = 'HOST' port = 1025 credentials = object() user_agent = 'USER_AGENT' with _Monkey(MUT, grpc=grpc_mod, MetadataPlugin=mock_plugin): result = self._callFUT(credentials, user_agent, mock_stub_class, host, port) self.assertTrue(result is mock_result) self.assertEqual(stub_inputs, [CHANNEL]) self.assertEqual(plugin_args, [(credentials, user_agent)]) self.assertEqual(grpc_mod.ssl_channel_credentials_args, ()) self.assertEqual(grpc_mod.metadata_call_credentials_args, ((metadata_plugin, ), { 'name': 'google_creds' })) self.assertEqual(grpc_mod.composite_channel_credentials_args, (SSL_CREDS, METADATA_CREDS)) target = '%s:%d' % (host, port) self.assertEqual(grpc_mod.secure_channel_args, (target, COMPOSITE_CREDS))
def test_create_table(self): import operator from gcloud._testing import _Monkey from gcloud.bigtable.happybase import connection as MUT instance = _Instance() # Avoid implicit environ check. connection = self._makeOne(autoconnect=False, instance=instance) mock_gc_rule = object() called_options = [] def mock_parse_family_option(option): called_options.append(option) return mock_gc_rule name = 'table-name' col_fam1 = 'cf1' col_fam_option1 = object() col_fam2 = u'cf2' col_fam_option2 = object() col_fam3 = b'cf3' col_fam_option3 = object() families = { col_fam1: col_fam_option1, # A trailing colon is also allowed. col_fam2 + ':': col_fam_option2, col_fam3 + b':': col_fam_option3, } tables_created = [] def make_table(*args, **kwargs): result = _MockLowLevelTable(*args, **kwargs) tables_created.append(result) return result with _Monkey(MUT, _LowLevelTable=make_table, _parse_family_option=mock_parse_family_option): connection.create_table(name, families) # Just one table would have been created. table_instance, = tables_created self.assertEqual(table_instance.args, (name, instance)) self.assertEqual(table_instance.kwargs, {}) self.assertEqual(table_instance.create_calls, 1) # Check if our mock was called twice, but we don't know the order. self.assertEqual( set(called_options), set([col_fam_option1, col_fam_option2, col_fam_option3])) # We expect three column family instances created, but don't know the # order due to non-deterministic dict.items(). col_fam_created = table_instance.col_fam_created self.assertEqual(len(col_fam_created), 3) col_fam_created.sort(key=operator.attrgetter('column_family_id')) self.assertEqual(col_fam_created[0].column_family_id, col_fam1) self.assertEqual(col_fam_created[0].gc_rule, mock_gc_rule) self.assertEqual(col_fam_created[1].column_family_id, col_fam2) self.assertEqual(col_fam_created[1].gc_rule, mock_gc_rule) self.assertEqual(col_fam_created[2].column_family_id, col_fam3.decode('utf-8')) self.assertEqual(col_fam_created[2].gc_rule, mock_gc_rule)
def test_w_positive_jitter_gt_max_wait(self): import random from gcloud._testing import _Monkey with _Monkey(random, uniform=lambda lower, upper: upper): self.assertEqual(self._callFUT(4, 10), 10)
def test_upload_from_file_resumable(self): from six.moves.http_client import OK from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit from gcloud._testing import _Monkey from gcloud._testing import _NamedTemporaryFile from gcloud.streaming import http_wrapper from gcloud.streaming import transfer BLOB_NAME = 'blob-name' UPLOAD_URL = 'http://example.com/upload/name/key' DATA = b'ABCDEF' loc_response = {'status': OK, 'location': UPLOAD_URL} chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE, 'range': 'bytes 0-4'} chunk2_response = {'status': OK} # Need valid JSON on last response, since resumable. connection = _Connection( (loc_response, b''), (chunk1_response, b''), (chunk2_response, b'{}'), ) client = _Client(connection) bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 # Set the threshhold low enough that we force a resumable uploada. with _Monkey(transfer, RESUMABLE_UPLOAD_THRESHOLD=5): with _NamedTemporaryFile() as temp: with open(temp.name, 'wb') as file_obj: file_obj.write(DATA) with open(temp.name, 'rb') as file_obj: blob.upload_from_file(file_obj, rewind=True) rq = connection.http._requested self.assertEqual(len(rq), 3) # Requested[0] headers = dict( [(x.title(), str(y)) for x, y in rq[0].pop('headers').items()]) self.assertEqual(headers['X-Upload-Content-Length'], '6') self.assertEqual(headers['X-Upload-Content-Type'], 'application/octet-stream') uri = rq[0].pop('uri') scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') self.assertEqual(dict(parse_qsl(qs)), {'uploadType': 'resumable', 'name': BLOB_NAME}) self.assertEqual(rq[0], { 'method': 'POST', 'body': '', 'connection_type': None, 'redirections': 5, }) # Requested[1] headers = dict( [(x.title(), str(y)) for x, y in rq[1].pop('headers').items()]) self.assertEqual(headers['Content-Range'], 'bytes 0-4/6') self.assertEqual(rq[1], { 'method': 'PUT', 'uri': UPLOAD_URL, 'body': DATA[:5], 'connection_type': None, 'redirections': 5, }) # Requested[2] headers = dict( [(x.title(), str(y)) for x, y in rq[2].pop('headers').items()]) self.assertEqual(headers['Content-Range'], 'bytes 5-5/6') self.assertEqual(rq[2], { 'method': 'PUT', 'uri': UPLOAD_URL, 'body': DATA[5:], 'connection_type': None, 'redirections': 5, })
def _monkey(self, implicit_dataset_id): import os from gcloud.datastore import _DATASET_ENV_VAR_NAME from gcloud._testing import _Monkey environ = {_DATASET_ENV_VAR_NAME: implicit_dataset_id} return _Monkey(os, getenv=environ.get)