def testSetOptions(self): datastore._conn = None self.mox.StubOutWithMock(os, 'getenv') self.mox.StubOutWithMock(helper, 'get_credentials_from_env') os.getenv('DATASTORE_HOST').AndReturn('http://localhost:8080') helper.get_credentials_from_env().AndReturn(FakeCredentialsFromEnv()) self.mox.ReplayAll() datastore.set_options(dataset='bar') conn = datastore.get_default_connection() self.assertEqual('http://localhost:8080/datastore/v1beta1/datasets/bar/', conn._url) self.assertEqual(FakeCredentialsFromEnv, type(conn._credentials)) self.mox.VerifyAll()
def get_default_connection(): """Return the default datastore connection. Defaults dataset to os.getenv('DATASTORE_DATASET'), host to os.getenv('DATASTORE_HOST'), and credentials to helper.get_credentials_from_env(). Use set_options to override defaults. """ tid = id(threading.current_thread()) conn = _conn_holder.get(tid) if not conn: with(_rlock): # No other thread would insert a value in our slot, so no need # to recheck existence inside the lock. if 'dataset' not in _options: _options['dataset'] = os.getenv('DATASTORE_DATASET') if 'host' not in _options: _options['host'] = os.getenv('DATASTORE_HOST') if 'credentials' not in _options: _options['credentials'] = helper.get_credentials_from_env() # We still need the lock when caching the thread local connection so we # don't race with _conn_holder.clear() in set_options(). _conn_holder[tid] = conn = Datastore(**_options) return conn
def get_default_connection(): """Return the default datastore connection. dataset defaults to helper.get_dataset_from_env(), host to os.getenv('DATASTORE_HOST'), and credentials to helper.get_credentials_from_env(). Use set_options to override defaults. """ tid = id(threading.current_thread()) conn = _conn_holder.get(tid) if not conn: with (_rlock): # No other thread would insert a value in our slot, so no need # to recheck existence inside the lock. if 'dataset' not in _options: _options['dataset'] = os.getenv('DATASTORE_DATASET') if 'host' not in _options: _options['host'] = os.getenv('DATASTORE_HOST') if 'credentials' not in _options: _options['credentials'] = helper.get_credentials_from_env() # We still need the lock when caching the thread local connection so we # don't race with _conn_holder.clear() in set_options(). _conn_holder[tid] = conn = Datastore(**_options) return conn
def _make_cloud_datastore_context(app_id, external_app_ids=(), host=None): """Creates a new context to connect to a remote Cloud Datastore instance. This should only be used outside of Google App Engine. Args: app_id: The application id to connect to. This differs from the project id as it may have an additional prefix, e.g. "s~" or "e~". external_app_ids: A list of apps that may be referenced by data in your application. For example, if you are connected to s~my-app and store keys for s~my-other-app, you should include s~my-other-app in the external_apps list. host: The hostname to provide to the datastore connection. If None, the default is used. Returns: An ndb.Context that can connect to a Remote Cloud Datastore. You can use this context by passing it to ndb.set_context. """ from . import model # Late import to deal with circular imports. # Late import since it might not exist. if not datastore_pbs._CLOUD_DATASTORE_ENABLED: raise datastore_errors.BadArgumentError(datastore_pbs.MISSING_CLOUD_DATASTORE_MESSAGE) import googledatastore try: from google.appengine.datastore import cloud_datastore_v1_remote_stub except ImportError: from google3.apphosting.datastore import cloud_datastore_v1_remote_stub current_app_id = os.environ.get("APPLICATION_ID", None) if current_app_id and current_app_id != app_id: # TODO(pcostello): We should support this so users can connect to different # applications. raise ValueError( "Cannot create a Cloud Datastore context that connects " "to an application (%s) that differs from the application " "already connected to (%s)." % (app_id, current_app_id) ) os.environ["APPLICATION_ID"] = app_id id_resolver = datastore_pbs.IdResolver((app_id,) + tuple(external_app_ids)) project_id = id_resolver.resolve_project_id(app_id) from googledatastore import helper credentials = helper.get_credentials_from_env() datastore = googledatastore.Datastore(project_id, credentials=credentials, host=host) conn = model.make_connection(_api_version=datastore_rpc._CLOUD_DATASTORE_V1, _id_resolver=id_resolver) # If necessary, install the stubs try: stub = cloud_datastore_v1_remote_stub.CloudDatastoreV1RemoteStub(datastore) apiproxy_stub_map.apiproxy.RegisterStub(datastore_rpc._CLOUD_DATASTORE_V1, stub) except: pass # The stub is already installed. # TODO(pcostello): Ensure the current stub is connected to the right project. return make_context(conn=conn)
def testGetCredentialsFromEnvCompute(self): self.mox.StubOutWithMock(gce, 'AppAssertionCredentials') credentials = self.mox.CreateMockAnything() gce.AppAssertionCredentials(connection.SCOPE).AndReturn(credentials) credentials.authorize(mox.IsA(httplib2.Http)) credentials.refresh(mox.IsA(httplib2.Http)) self.mox.ReplayAll() self.assertIs(credentials, helper.get_credentials_from_env()) self.mox.VerifyAll()
def testGetCredentialsFromEnvJwt(self): self.mox.StubOutWithMock(os, 'getenv') self.mox.StubOutWithMock(client, 'SignedJwtAssertionCredentials') credentials = self.mox.CreateMockAnything() os.getenv('DATASTORE_SERVICE_ACCOUNT').AndReturn('*****@*****.**') os.getenv('DATASTORE_PRIVATE_KEY_FILE').AndReturn(self.certificate.name) client.SignedJwtAssertionCredentials('*****@*****.**', 'not-a-secret-key', connection.SCOPE).AndReturn( credentials) self.mox.ReplayAll() self.assertIs(credentials, helper.get_credentials_from_env()) self.mox.VerifyAll()
def testGetCredentialsFromEnvJwt(self): self.mox.StubOutWithMock(os, 'getenv') self.mox.StubOutWithMock(client, 'SignedJwtAssertionCredentials') credentials = self.mox.CreateMockAnything() os.getenv('DATASTORE_SERVICE_ACCOUNT').AndReturn('*****@*****.**') os.getenv('DATASTORE_PRIVATE_KEY_FILE').AndReturn( self.certificate.name) client.SignedJwtAssertionCredentials( '*****@*****.**', 'not-a-secret-key', connection.SCOPE).AndReturn(credentials) self.mox.ReplayAll() self.assertIs(credentials, helper.get_credentials_from_env()) self.mox.VerifyAll()
def get_default_connection(): """Return the default datastore connection. Defaults dataset to os.getenv('DATASTORE_DATASET'), host to os.getenv('DATASTORE_HOST'), and credentials to helper.get_credentials_from_env(). Use set_options to override defaults. """ global _conn if not _conn: if 'dataset' not in _options: _options['dataset'] = os.getenv('DATASTORE_DATASET') if 'host' not in _options: _options['host'] = os.getenv('DATASTORE_HOST') if 'credentials' not in _options: _options['credentials'] = helper.get_credentials_from_env() _conn = Datastore(**_options) return _conn
def testSetOptions(self): other_thread_conn = [] lock1 = threading.Lock() lock2 = threading.Lock() lock1.acquire() lock2.acquire() def target(): # Grab two connections other_thread_conn.append(datastore.get_default_connection()) other_thread_conn.append(datastore.get_default_connection()) lock1.release( ) # Notify that we have grabbed the first 2 connections. lock2.acquire() # Wait for the signal to grab the 3rd. other_thread_conn.append(datastore.get_default_connection()) other_thread = threading.Thread(target=target) # Resetting options and state. datastore._options = {} datastore.set_options(dataset='foo') self.mox.StubOutWithMock(os, 'getenv') self.mox.StubOutWithMock(helper, 'get_credentials_from_env') os.getenv('DATASTORE_HOST').AndReturn('http://localhost:8080') os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) helper.get_credentials_from_env().AndReturn(FakeCredentialsFromEnv()) self.mox.ReplayAll() # Start the thread and wait for the first lock. other_thread.start() lock1.acquire() t1_conn1 = datastore.get_default_connection() t2_conn1, t2_conn1b = other_thread_conn other_thread_conn = [] # The two threads get different connections. self.assertIsNot(t1_conn1, t2_conn1) # Multiple calls on the same thread get the same connection. self.assertIs(t1_conn1, datastore.get_default_connection()) self.assertIs(t2_conn1, t2_conn1b) # Change the global options and grab the connections again. datastore.set_options(dataset='bar') lock2.release() other_thread.join() t1_conn2 = datastore.get_default_connection() t2_conn2 = other_thread_conn[0] # Changing the options causes all threads to create new connections. self.assertIsNot(t1_conn1, t1_conn2) self.assertIsNot(t2_conn1, t2_conn2) # The new connections are still different for each thread. self.assertIsNot(t1_conn2, t2_conn2) # The old connections has the old settings. self.assertEqual( 'http://localhost:8080/datastore/v1beta2/datasets/foo/', t1_conn1._url) self.assertEqual( 'http://localhost:8080/datastore/v1beta2/datasets/foo/', t2_conn1._url) # The new connections has the new settings. self.assertEqual( 'http://localhost:8080/datastore/v1beta2/datasets/bar/', t1_conn2._url) self.assertEqual( 'http://localhost:8080/datastore/v1beta2/datasets/bar/', t2_conn2._url) self.assertEqual(FakeCredentialsFromEnv, type(t1_conn2._credentials)) self.assertEqual(FakeCredentialsFromEnv, type(t2_conn2._credentials)) self.mox.VerifyAll()
def testSetOptions(self): other_thread_conn = [] lock1 = threading.Lock() lock2 = threading.Lock() lock1.acquire() lock2.acquire() def target(): # Grab two connections other_thread_conn.append(datastore.get_default_connection()) other_thread_conn.append(datastore.get_default_connection()) lock1.release() # Notify that we have grabbed the first 2 connections. lock2.acquire() # Wait for the signal to grab the 3rd. other_thread_conn.append(datastore.get_default_connection()) other_thread = threading.Thread(target=target) # Resetting options and state. datastore._options = {} datastore.set_options(project_id="foo") self.mox.StubOutWithMock(helper, "get_credentials_from_env") self.mox.StubOutWithMock(helper, "get_project_endpoint_from_env") endpoint = "http://localhost:8080/datastore/v1/projects/%s" helper.get_project_endpoint_from_env(project_id="foo").AndReturn(endpoint % "foo") helper.get_project_endpoint_from_env(project_id="foo").AndReturn(endpoint % "foo") helper.get_project_endpoint_from_env(project_id="bar").AndReturn(endpoint % "bar") helper.get_project_endpoint_from_env(project_id="bar").AndReturn(endpoint % "bar") helper.get_credentials_from_env().AndReturn(FakeCredentialsFromEnv()) self.mox.ReplayAll() # Start the thread and wait for the first lock. other_thread.start() lock1.acquire() t1_conn1 = datastore.get_default_connection() t2_conn1, t2_conn1b = other_thread_conn other_thread_conn = [] # The two threads get different connections. self.assertIsNot(t1_conn1, t2_conn1) # Multiple calls on the same thread get the same connection. self.assertIs(t1_conn1, datastore.get_default_connection()) self.assertIs(t2_conn1, t2_conn1b) # Change the global options and grab the connections again. datastore.set_options(project_id="bar") lock2.release() other_thread.join() t1_conn2 = datastore.get_default_connection() t2_conn2 = other_thread_conn[0] # Changing the options causes all threads to create new connections. self.assertIsNot(t1_conn1, t1_conn2) self.assertIsNot(t2_conn1, t2_conn2) # The new connections are still different for each thread. self.assertIsNot(t1_conn2, t2_conn2) # The old connections has the old settings. self.assertEqual("http://localhost:8080/datastore/v1/projects/foo", t1_conn1._url) self.assertEqual("http://localhost:8080/datastore/v1/projects/foo", t2_conn1._url) # The new connections has the new settings. self.assertEqual("http://localhost:8080/datastore/v1/projects/bar", t1_conn2._url) self.assertEqual("http://localhost:8080/datastore/v1/projects/bar", t2_conn2._url) self.assertEqual(FakeCredentialsFromEnv, type(t1_conn2._credentials)) self.assertEqual(FakeCredentialsFromEnv, type(t2_conn2._credentials)) self.mox.VerifyAll()
def testSetOptions(self): other_thread_conn = [] lock1 = threading.Lock() lock2 = threading.Lock() lock1.acquire() lock2.acquire() def target(): # Grab two connections other_thread_conn.append(datastore.get_default_connection()) other_thread_conn.append(datastore.get_default_connection()) lock1.release() # Notify that we have grabbed the first 2 connections. lock2.acquire() # Wait for the signal to grab the 3rd. other_thread_conn.append(datastore.get_default_connection()) other_thread = threading.Thread(target=target) # Resetting options and state. datastore._options = {} datastore.set_options(dataset='foo') self.mox.StubOutWithMock(os, 'getenv') self.mox.StubOutWithMock(helper, 'get_credentials_from_env') os.getenv('DATASTORE_HOST').AndReturn('http://localhost:8080') os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) helper.get_credentials_from_env().AndReturn(FakeCredentialsFromEnv()) self.mox.ReplayAll() # Start the thread and wait for the first lock. other_thread.start() lock1.acquire() t1_conn1 = datastore.get_default_connection() t2_conn1, t2_conn1b = other_thread_conn other_thread_conn = [] # The two threads get different connections. self.assertIsNot(t1_conn1, t2_conn1) # Multiple calls on the same thread get the same connection. self.assertIs(t1_conn1, datastore.get_default_connection()) self.assertIs(t2_conn1, t2_conn1b) # Change the global options and grab the connections again. datastore.set_options(dataset='bar') lock2.release() other_thread.join() t1_conn2 = datastore.get_default_connection() t2_conn2 = other_thread_conn[0] # Changing the options causes all threads to create new connections. self.assertIsNot(t1_conn1, t1_conn2) self.assertIsNot(t2_conn1, t2_conn2) # The new connections are still different for each thread. self.assertIsNot(t1_conn2, t2_conn2) # The old connections has the old settings. self.assertEqual('http://localhost:8080/datastore/v1beta2/datasets/foo/', t1_conn1._url) self.assertEqual('http://localhost:8080/datastore/v1beta2/datasets/foo/', t2_conn1._url) # The new connections has the new settings. self.assertEqual('http://localhost:8080/datastore/v1beta2/datasets/bar/', t1_conn2._url) self.assertEqual('http://localhost:8080/datastore/v1beta2/datasets/bar/', t2_conn2._url) self.assertEqual(FakeCredentialsFromEnv, type(t1_conn2._credentials)) self.assertEqual(FakeCredentialsFromEnv, type(t2_conn2._credentials)) self.mox.VerifyAll()
def _make_cloud_datastore_context(app_id, external_app_ids=(), host=None): """Creates a new context to connect to a remote Cloud Datastore instance. This should only be used outside of Google App Engine. Args: app_id: The application id to connect to. This differs from the project id as it may have an additional prefix, e.g. "s~" or "e~". external_app_ids: A list of apps that may be referenced by data in your application. For example, if you are connected to s~my-app and store keys for s~my-other-app, you should include s~my-other-app in the external_apps list. host: The hostname to provide to the datastore connection. If None, the default is used. Returns: An ndb.Context that can connect to a Remote Cloud Datastore. You can use this context by passing it to ndb.set_context. """ from . import model # Late import to deal with circular imports. # Late import since it might not exist. if not datastore_pbs._CLOUD_DATASTORE_ENABLED: raise datastore_errors.BadArgumentError( datastore_pbs.MISSING_CLOUD_DATASTORE_MESSAGE) import googledatastore try: from google.appengine.datastore import cloud_datastore_v1_remote_stub except ImportError: from google3.apphosting.datastore import cloud_datastore_v1_remote_stub current_app_id = os.environ.get('APPLICATION_ID', None) if current_app_id and current_app_id != app_id: # TODO(pcostello): We should support this so users can connect to different # applications. raise ValueError( 'Cannot create a Cloud Datastore context that connects ' 'to an application (%s) that differs from the application ' 'already connected to (%s).' % (app_id, current_app_id)) os.environ['APPLICATION_ID'] = app_id id_resolver = datastore_pbs.IdResolver((app_id, ) + tuple(external_app_ids)) project_id = id_resolver.resolve_project_id(app_id) from googledatastore import helper credentials = helper.get_credentials_from_env() datastore = googledatastore.Datastore(project_id, credentials=credentials, host=host) conn = model.make_connection( _api_version=datastore_rpc._CLOUD_DATASTORE_V1, _id_resolver=id_resolver) # If necessary, install the stubs try: stub = cloud_datastore_v1_remote_stub.CloudDatastoreV1RemoteStub( datastore) apiproxy_stub_map.apiproxy.RegisterStub( datastore_rpc._CLOUD_DATASTORE_V1, stub) except: pass # The stub is already installed. # TODO(pcostello): Ensure the current stub is connected to the right project. return make_context(conn=conn)