def test_to_json_and_from_json(self): credentials = AppAssertionCredentials( scope=['http://example.com/a', 'http://example.com/b']) json = credentials.to_json() credentials_from_json = Credentials.new_from_json(json) self.assertEqual(credentials.access_token, credentials_from_json.access_token)
def test_create_scoped(self, warn_mock): credentials = AppAssertionCredentials() new_credentials = credentials.create_scoped(['dummy_scope']) self.assertNotEqual(credentials, new_credentials) self.assertTrue(isinstance(new_credentials, AppAssertionCredentials)) self.assertEqual('dummy_scope', new_credentials.scope) warn_mock.assert_called_once_with(_SCOPES_WARNING)
def test_serialize_deserialize(self, get_metadata): credentials = AppAssertionCredentials() credentials_from_json = Credentials.new_from_json( credentials.to_json()) self.assertEqual( credentials.service_account_info, credentials_from_json.service_account_info)
def test_retrieve_scopes_bad_email(self, metadata): http_request = mock.MagicMock() http_mock = mock.MagicMock(request=http_request) credentials = AppAssertionCredentials(email='*****@*****.**') with self.assertRaises(httplib2.HttpLib2Error): credentials.retrieve_scopes(http_mock) metadata.assert_called_once_with(http_request, service_account='*****@*****.**')
def test_refresh_token(self, metadata): credentials = AppAssertionCredentials() self.assertIsNone(credentials.access_token) credentials.get_access_token() self.assertEqual(credentials.access_token, 'A') self.assertTrue(credentials.access_token_expired) credentials.get_access_token() self.assertEqual(credentials.access_token, 'B') self.assertFalse(credentials.access_token_expired)
def test_refresh_failure_400(self): http = mock.MagicMock() content = '{}' http.request = mock.MagicMock( return_value=(mock.Mock(status=http_client.BAD_REQUEST), content)) credentials = AppAssertionCredentials() with self.assertRaises(HttpAccessTokenRefreshError): credentials.refresh(http)
def EnableGceAuth(self): """Selects to use local metadata service for authentication. The project ID and project number are also retrieved from the metadata service. It is done lazily from the worker thread. The motivation is to speed up initialization and be able to recover from failures. """ self._credentials = AppAssertionCredentials() self._project_id = lambda: self._QueryGcpProject('project-id') self._project_number = lambda: self._QueryGcpProject('numeric-project-id')
def test_refresh_token_failed_fetch(self): http_request = request_mock( http_client.NOT_FOUND, 'application/json', json.dumps({'access_token': 'a', 'expires_in': 100}) ) credentials = AppAssertionCredentials() with self.assertRaises(HttpAccessTokenRefreshError): credentials._refresh(http_request=http_request)
def test_refresh_token_failed_fetch(self): http_request = request_mock( http_client.NOT_FOUND, 'application/json', json.dumps({'access_token': 'a', 'expires_in': 100}) ) credentials = AppAssertionCredentials() credentials.invalid = False credentials.service_account_email = '*****@*****.**' with self.assertRaises(HttpAccessTokenRefreshError): credentials._refresh(http_request)
def test_retrieve_scopes(self, metadata): http_request = mock.MagicMock() http_mock = mock.MagicMock(request=http_request) credentials = AppAssertionCredentials() self.assertTrue(credentials.invalid) self.assertIsNone(credentials.scopes) scopes = credentials.retrieve_scopes(http_mock) self.assertEqual(scopes, SERVICE_ACCOUNT_INFO['scopes']) self.assertFalse(credentials.invalid) credentials.retrieve_scopes(http_mock) # Assert scopes weren't refetched metadata.assert_called_once_with(http_request, service_account='default')
def test_scopes_failure(self): # Set-up the mock. http = mock.MagicMock() content = '{}' http.request = mock.MagicMock( return_value=(mock.Mock(status=http_client.NOT_FOUND), content)) # Test the failure. credentials = AppAssertionCredentials() with self.assertRaises(MetadataServerHttpError): credentials._retrieve_scopes(http.request) self.assertEqual(credentials._service_account_info, {})
def test_get_access_token(self): http = mock.MagicMock() http.request = mock.MagicMock( return_value=(mock.Mock(status=200), '{"accessToken": "this-is-a-token"}')) credentials = AppAssertionCredentials(['dummy_scope']) token = credentials.get_access_token(http=http) self.assertEqual('this-is-a-token', token.access_token) self.assertEqual(None, token.expires_in) http.request.assert_called_once_with( 'http://metadata.google.internal/0.1/meta-data/service-accounts/' 'default/acquire?scope=dummy_scope')
def test_get_access_token(self): http = mock.MagicMock() http.request = mock.MagicMock( return_value=(mock.Mock(status=http_client.OK), '{"accessToken": "this-is-a-token"}')) credentials = AppAssertionCredentials(['dummy_scope']) token = credentials.get_access_token(http=http) self.assertEqual('this-is-a-token', token.access_token) self.assertEqual(None, token.expires_in) http.request.assert_called_once_with( 'http://metadata.google.internal/0.1/meta-data/service-accounts/' 'default/acquire?scope=dummy_scope')
def test_get_access_token(self): http = mock.MagicMock() http.request = mock.MagicMock(return_value=(mock.Mock( status=http_client.OK), '{"access_token": "this-is-a-token"}')) credentials = AppAssertionCredentials() token = credentials.get_access_token(http=http) self.assertEqual('this-is-a-token', token.access_token) self.assertEqual(None, token.expires_in) http.request.assert_called_once_with( 'http://metadata.google.internal/computeMetadata/v1/instance/' 'service-accounts/default/token', headers={'Metadata-Flavor': 'Google'})
def test_refresh_failure_400(self): http = mock.MagicMock() content = '{}' http.request = mock.MagicMock(return_value=(mock.Mock( status=http_client.BAD_REQUEST), content)) credentials = AppAssertionCredentials() exception_caught = None try: credentials.refresh(http) except AccessTokenRefreshError as exc: exception_caught = exc self.assertNotEqual(exception_caught, None) self.assertEqual(str(exception_caught), content)
def test_get_access_token(self): http = mock.MagicMock() http.request = mock.MagicMock( return_value=(mock.Mock(status=http_client.OK), '{"accessToken": "this-is-a-token"}')) credentials = AppAssertionCredentials(['dummy_scope']) token = credentials.get_access_token(http=http) self.assertEqual('this-is-a-token', token.access_token) self.assertEqual(None, token.expires_in) http.request.assert_called_once_with( 'http://metadata.google.internal/computeMetadata/v1/instance/' 'service-accounts/default/acquire?scope=dummy_scope', headers={'Metadata-Flavor': 'Google'})
def test_refresh_failure_400(self): http = mock.MagicMock() content = '{}' http.request = mock.MagicMock( return_value=(mock.Mock(status=http_client.BAD_REQUEST), content)) credentials = AppAssertionCredentials() exception_caught = None try: credentials.refresh(http) except AccessTokenRefreshError as exc: exception_caught = exc self.assertNotEqual(exception_caught, None) self.assertEqual(str(exception_caught), content)
def test_service_account_email(self, get_email): credentials = AppAssertionCredentials([]) self.assertIsNone(credentials._service_account_email) self.assertEqual(credentials.service_account_email, get_email.return_value[1]) self.assertIsNotNone(credentials._service_account_email) get_email.assert_called_once_with()
def test_constructor(self): scope = 'http://example.com/a http://example.com/b' scopes = scope.split() credentials = AppAssertionCredentials(scope=scopes, foo='bar') self.assertEqual(credentials.scope, scope) self.assertEqual(credentials.kwargs, {'foo': 'bar'}) self.assertEqual(credentials.assertion_type, None)
def test_refresh_failure_404(self): http = mock.MagicMock() content = '{}' http.request = mock.MagicMock( return_value=(mock.Mock(status=http_client.NOT_FOUND), content)) credentials = AppAssertionCredentials() exception_caught = None try: credentials.refresh(http) except AccessTokenRefreshError as exc: exception_caught = exc self.assertNotEqual(exception_caught, None) expanded_content = content + (' This can occur if a VM was created' ' with no service account or scopes.') self.assertEqual(str(exception_caught), expanded_content)
def test_constructor_with_scopes(self, warn_mock): scope = 'http://example.com/a http://example.com/b' scopes = scope.split() credentials = AppAssertionCredentials(scope=scopes, foo='bar') self.assertEqual(credentials.scope, scope) self.assertEqual(credentials.kwargs, {'foo': 'bar'}) self.assertEqual(credentials.assertion_type, None) warn_mock.assert_called_once_with(_SCOPES_WARNING)
def test_refresh_failure_404(self): http = mock.MagicMock() content = '{}' http.request = mock.MagicMock(return_value=(mock.Mock( status=http_client.NOT_FOUND), content)) credentials = AppAssertionCredentials() exception_caught = None try: credentials.refresh(http) except AccessTokenRefreshError as exc: exception_caught = exc self.assertNotEqual(exception_caught, None) expanded_content = content + (' This can occur if a VM was created' ' with no service account or scopes.') self.assertEqual(str(exception_caught), expanded_content)
def test_refresh_failure_bad_json(self): http = mock.MagicMock() content = '{BADJSON' http.request = mock.MagicMock(return_value=(mock.Mock( status=http_client.OK), content)) credentials = AppAssertionCredentials() self.assertRaises(AccessTokenRefreshError, credentials.refresh, http)
def test_refresh_failure_400(self): http = mock.MagicMock() content = '{}' http.request = mock.MagicMock( return_value=(mock.Mock(status=400), content)) credentials = AppAssertionCredentials( scope=['http://example.com/a', 'http://example.com/b']) exception_caught = None try: credentials.refresh(http) except AccessTokenRefreshError as exc: exception_caught = exc self.assertNotEqual(exception_caught, None) self.assertEqual(str(exception_caught), content)
def _refresh_success_helper(self, bytes_response=False): access_token = u'this-is-a-token' return_val = json.dumps({u'access_token': access_token}) if bytes_response: return_val = _to_bytes(return_val) http = mock.MagicMock() http.request = mock.MagicMock(return_value=(mock.Mock( status=http_client.OK), return_val)) credentials = AppAssertionCredentials() self.assertEquals(None, credentials.access_token) credentials.refresh(http) self.assertEquals(access_token, credentials.access_token) base_metadata_uri = ( 'http://metadata.google.internal/computeMetadata/v1/instance/' 'service-accounts/default/token') http.request.assert_called_once_with( base_metadata_uri, headers={'Metadata-Flavor': 'Google'})
def _refresh_success_helper(self, bytes_response=False): access_token = u'this-is-a-token' return_val = json.dumps({u'access_token': access_token}) if bytes_response: return_val = _to_bytes(return_val) http = mock.MagicMock() http.request = mock.MagicMock( return_value=(mock.Mock(status=http_client.OK), return_val)) credentials = AppAssertionCredentials() self.assertEquals(None, credentials.access_token) credentials.refresh(http) self.assertEquals(access_token, credentials.access_token) base_metadata_uri = ( 'http://metadata.google.internal/computeMetadata/v1/instance/' 'service-accounts/default/token') http.request.assert_called_once_with( base_metadata_uri, headers={'Metadata-Flavor': 'Google'})
def test_refresh_token(self, get_metadata): credentials = AppAssertionCredentials() self.assertIsNone(credentials.access_token) with mock.patch('oauth2client.contrib.gce._NOW', side_effect=[datetime.datetime.min, datetime.datetime.max]): credentials.get_access_token() self.assertTrue(credentials.access_token_expired) force_refresh_time = datetime.datetime.max - datetime.timedelta( seconds=get_metadata.return_value['expires_in']) with mock.patch('oauth2client.contrib.gce._NOW', side_effect=[force_refresh_time, datetime.datetime.min, datetime.datetime.min]): credentials.get_access_token() self.assertFalse(credentials.access_token_expired)
def test_save_to_well_known_file(self): import os ORIGINAL_ISDIR = os.path.isdir try: os.path.isdir = lambda path: True credentials = AppAssertionCredentials([]) self.assertRaises(NotImplementedError, save_to_well_known_file, credentials) finally: os.path.isdir = ORIGINAL_ISDIR
def test_token_info(self): credentials = AppAssertionCredentials([]) http = httplib2.Http() # First refresh to get the access token. self.assertIsNone(credentials.access_token) credentials.refresh(http) self.assertIsNotNone(credentials.access_token) # Then check the access token against the token info API. query_params = {'access_token': credentials.access_token} token_uri = (GOOGLE_TOKEN_INFO_URI + '?' + urllib.parse.urlencode(query_params)) response, content = http.request(token_uri) self.assertEqual(response.status, http_client.OK) content = content.decode('utf-8') payload = json.loads(content) self.assertEqual(payload['access_type'], 'offline') self.assertLessEqual(int(payload['expires_in']), 3600)
def _refresh_success_helper(self, bytes_response=False): access_token = u'this-is-a-token' return_val = json.dumps({u'accessToken': access_token}) if bytes_response: return_val = _to_bytes(return_val) http = mock.MagicMock() http.request = mock.MagicMock( return_value=(mock.Mock(status=200), return_val)) scopes = ['http://example.com/a', 'http://example.com/b'] credentials = AppAssertionCredentials(scope=scopes) self.assertEquals(None, credentials.access_token) credentials.refresh(http) self.assertEquals(access_token, credentials.access_token) base_metadata_uri = ('http://metadata.google.internal/0.1/meta-data/' 'service-accounts/default/acquire') escaped_scopes = urllib.parse.quote(' '.join(scopes), safe='') request_uri = base_metadata_uri + '?scope=' + escaped_scopes http.request.assert_called_once_with(request_uri)
def _refresh_success_helper(self, bytes_response=False): access_token = u'this-is-a-token' return_val = json.dumps({u'accessToken': access_token}) if bytes_response: return_val = _to_bytes(return_val) http = mock.MagicMock() http.request = mock.MagicMock( return_value=(mock.Mock(status=http_client.OK), return_val)) scopes = ['http://example.com/a', 'http://example.com/b'] credentials = AppAssertionCredentials(scope=scopes) self.assertEquals(None, credentials.access_token) credentials.refresh(http) self.assertEquals(access_token, credentials.access_token) base_metadata_uri = ('http://metadata.google.internal/0.1/meta-data/' 'service-accounts/default/acquire') escaped_scopes = urllib.parse.quote(' '.join(scopes), safe='') request_uri = base_metadata_uri + '?scope=' + escaped_scopes http.request.assert_called_once_with(request_uri)
def main(): config.load_incluster_config() crds = client.CustomObjectsApi() creds = AppAssertionCredentials() cloudbuild = discovery_build('cloudbuild', 'v1', credentials=creds) def watch_until_done(obj, operation): name = operation["name"] while not operation.get("done", False): logging.error("Waiting on: %s", name) time.sleep(1) operation = cloudbuild.operations().get(name=name).execute() logging.error("Complete: %s", name) spec = obj["spec"] spec["Status"] = "DONE" if "error" in operation: spec["error"] = operation["error"] else: spec["response"] = operation["response"] crds.replace_namespaced_custom_object(DOMAIN, "v1", obj["metadata"]["namespace"], "builds", obj["metadata"]["name"], obj) def build(obj): spec = obj["spec"] if "Operation" in spec: return operation = cloudbuild.projects().builds().create( projectId='convoy-adapter', body=spec).execute() spec["Operation"] = operation["name"] obj = crds.replace_namespaced_custom_object( DOMAIN, "v1", obj["metadata"]["namespace"], "builds", obj["metadata"]["name"], obj) logging.error("Waiting until %s is done", operation["name"]) watch_until_done(obj, operation) # TODO(mattmoor): On startup we should start a thread to watch any in-progress builds. stream = watch.Watch().stream(crds.list_cluster_custom_object, DOMAIN, "v1", "builds") for event in stream: # TODO(mattmoor): Execute in a threadpool. try: build(event["object"]) except: logging.exception("Error handling event")
def test_service_account_email_failure(self, get_email): # Set-up the mock. bad_response = httplib2.Response({'status': http_client.NOT_FOUND}) content = b'bad-bytes-nothing-here' get_email.return_value = (bad_response, content) # Test the failure. credentials = AppAssertionCredentials([]) self.assertIsNone(credentials._service_account_email) with self.assertRaises(AttributeError) as exc_manager: getattr(credentials, 'service_account_email') error_msg = ('Failed to retrieve the email from the ' 'Google Compute Engine metadata service') self.assertEqual(exc_manager.exception.args, (error_msg, bad_response, content)) self.assertIsNone(credentials._service_account_email) get_email.assert_called_once_with()
def test_refresh_token(self, get_info, get_token): http_request = mock.MagicMock() http_mock = mock.MagicMock(request=http_request) credentials = AppAssertionCredentials() credentials.invalid = False credentials.service_account_email = '*****@*****.**' self.assertIsNone(credentials.access_token) credentials.get_access_token(http=http_mock) self.assertEqual(credentials.access_token, 'A') self.assertTrue(credentials.access_token_expired) get_token.assert_called_with(http_request, service_account='*****@*****.**') credentials.get_access_token(http=http_mock) self.assertEqual(credentials.access_token, 'B') self.assertFalse(credentials.access_token_expired) get_token.assert_called_with(http_request, service_account='*****@*****.**') get_info.assert_not_called()
def main(): config.load_incluster_config() crds = client.CustomObjectsApi() # TODO(mattmoor): Share a library with the meta controller name = os.environ["API_NAME"] domain = "%s.googleapis.com" % name version = os.environ["API_VERSION"] resource = os.environ["API_RESOURCE"] plural = resource.lower() + "s" creds = AppAssertionCredentials() api = discovery_build(name, version, credentials=creds) def call(obj): spec = obj["spec"] logging.error("TODO call %s/%s %s on %s", name, version, resource, json.dumps(obj, indent=1)) resource_version = "" while True: stream = watch.Watch().stream(crds.list_cluster_custom_object, domain, version, plural, resource_version=resource_version) for event in stream: # TODO(mattmoor): Execute in a threadpool. try: obj = event["object"] call(obj) # Configure where to resume streaming. metadata = obj.get("metadata") if metadata: resource_version = metadata["resourceVersion"] except: logging.exception("Error handling event")
def test_create_scoped_required_without_scopes(self): credentials = AppAssertionCredentials([]) self.assertTrue(credentials.create_scoped_required())
def test_serialization_data(self): credentials = AppAssertionCredentials(scope=[]) self.assertRaises(NotImplementedError, getattr, credentials, 'serialization_data')
def test_sign_blob_not_implemented(self): credentials = AppAssertionCredentials([]) with self.assertRaises(NotImplementedError): credentials.sign_blob(b'blob')
def test_to_json_and_from_json(self): credentials = AppAssertionCredentials() json = credentials.to_json() credentials_from_json = Credentials.new_from_json(json) self.assertEqual(credentials.access_token, credentials_from_json.access_token)
def test_create_scoped_required_with_scopes(self, warn_mock): credentials = AppAssertionCredentials(['dummy_scope']) self.assertFalse(credentials.create_scoped_required()) warn_mock.assert_called_once_with(_SCOPES_WARNING)
def test_from_json(self): with self.assertRaises(NotImplementedError): AppAssertionCredentials.from_json({})
def test_to_json(self): credentials = AppAssertionCredentials() with self.assertRaises(NotImplementedError): credentials.to_json()
def test_constructor(self): credentials = AppAssertionCredentials(foo='bar') self.assertEqual(credentials.scope, '') self.assertEqual(credentials.kwargs, {'foo': 'bar'}) self.assertEqual(credentials.assertion_type, None)
def test_create_scoped_required_with_scopes(self): credentials = AppAssertionCredentials(['dummy_scope']) self.assertFalse(credentials.create_scoped_required())
def test_create_scoped(self): credentials = AppAssertionCredentials([]) new_credentials = credentials.create_scoped(['dummy_scope']) self.assertNotEqual(credentials, new_credentials) self.assertTrue(isinstance(new_credentials, AppAssertionCredentials)) self.assertEqual('dummy_scope', new_credentials.scope)
def test_create_scoped_required(self): credentials = AppAssertionCredentials() self.assertFalse(credentials.create_scoped_required())
#!/usr/bin/env python from google.cloud import monitoring ''' # Using a service account with credentials in a json file: JSON_CREDS = '/path/to/json' from oauth2client.service_account import ServiceAccountCredentials scopes = ["https://www.googleapis.com/auth/monitoring",] credentials = ServiceAccountCredentials.from_json_keyfile_name( JSON_CREDS, scopes) ''' # From inside a GCE instance, with default account: from oauth2client.contrib.gce import AppAssertionCredentials credentials = AppAssertionCredentials([]) # 'project' is project ID, not name myproject = 'main-shade-732' client = monitoring.Client(project=myproject, credentials=credentials) # Delete ALL custom metrics from this project. all = client.list_metric_descriptors(type_prefix='custom.') for a in all: descriptor = client.metric_descriptor(str(a.type)) descriptor.delete()
class GcpHubClient(object): """Controller API client. Registers the debuggee, queries the active breakpoints and sends breakpoint updates to the backend. This class supports two types of authentication: metadata service and service account. The mode is selected by calling EnableServiceAccountAuth or EnableGceAuth method. GcpHubClient creates a worker thread that communicates with the backend. The thread can be stopped with a Stop function, but it is optional since the worker thread is marked as daemon. """ def __init__(self): self.on_active_breakpoints_changed = lambda x: None self.on_idle = lambda: None self._debuggee_labels = {} self._service_account_auth = False self._debuggee_id = None self._wait_token = 'init' self._breakpoints = [] self._main_thread = None self._transmission_thread = None self._transmission_thread_startup_lock = threading.Lock() self._transmission_queue = deque(maxlen=100) self._new_updates = threading.Event(False) # Disable logging in the discovery API to avoid excessive logging. class _ChildLogFilter(logging.Filter): """Filter to eliminate info-level logging when called from this module.""" def __init__(self, filter_levels=None): super(_ChildLogFilter, self).__init__() self._filter_levels = filter_levels or set(logging.INFO) # Get name without extension to avoid .py vs .pyc issues self._my_filename = os.path.splitext( inspect.getmodule(_ChildLogFilter).__file__)[0] def filter(self, record): if record.levelno not in self._filter_levels: return True callerframes = inspect.getouterframes(inspect.currentframe()) for f in callerframes: if os.path.splitext(f[1])[0] == self._my_filename: return False return True self._log_filter = _ChildLogFilter({logging.INFO}) discovery.logger.addFilter(self._log_filter) # # Configuration options (constants only modified by unit test) # # Delay before retrying failed request. self.register_backoff = Backoff() # Register debuggee. self.list_backoff = Backoff() # Query active breakpoints. self.update_backoff = Backoff() # Update breakpoint. # Maximum number of times that the message is re-transmitted before it # is assumed to be poisonous and discarded self.max_transmit_attempts = 10 def InitializeDebuggeeLabels(self, flags): """Initialize debuggee labels from environment variables and flags. The caller passes all the flags that the the debuglet got. This function will only use the flags used to label the debuggee. Flags take precedence over environment variables. Debuggee description is formatted from available flags. Project ID is not set here. It is obtained from metadata service or specified as a parameter to EnableServiceAccountAuth. Args: flags: dictionary of debuglet command line flags. """ self._debuggee_labels = {} for (label, env) in _DEBUGGEE_LABELS.iteritems(): if env and env in os.environ: # Special case for GAE_MODULE_NAME. We omit the "default" module # to stay consistent with AppEngine. if env == 'GAE_MODULE_NAME' and os.environ[env] == 'default': continue self._debuggee_labels[label] = os.environ[env] if flags: self._debuggee_labels.update( {name: value for (name, value) in flags.iteritems() if name in _DEBUGGEE_LABELS}) self._debuggee_labels['projectid'] = self._project_id() def EnableServiceAccountAuthP12(self, project_id, project_number, email, p12_file): """Selects service account authentication with a p12 file. Using this function is not recommended. Use EnableServiceAccountAuthJson for authentication, instead. The p12 file format is no longer recommended. Args: project_id: GCP project ID (e.g. myproject). project_number: numberic GCP project ID (e.g. 72386324623). email: service account identifier for use with p12_file ([email protected]). p12_file: (deprecated) path to an old-style p12 file with the private key. Raises: NotImplementedError indicates that the installed version of oauth2client does not support using a p12 file. """ try: with open(p12_file, 'rb') as f: self._credentials = oauth2client.client.SignedJwtAssertionCredentials( email, f.read(), scope=_CLOUD_PLATFORM_SCOPE) except AttributeError: raise NotImplementedError( 'P12 key files are no longer supported. Please use a JSON ' 'credentials file instead.') self._project_id = lambda: project_id self._project_number = lambda: project_number def EnableServiceAccountAuthJson(self, project_id, project_number, auth_json_file): """Selects service account authentication using Json credentials. Args: project_id: GCP project ID (e.g. myproject). project_number: numberic GCP project ID (e.g. 72386324623). auth_json_file: the JSON keyfile """ self._credentials = ( service_account.ServiceAccountCredentials .from_json_keyfile_name(auth_json_file, scopes=_CLOUD_PLATFORM_SCOPE)) self._project_id = lambda: project_id self._project_number = lambda: project_number def EnableGceAuth(self): """Selects to use local metadata service for authentication. The project ID and project number are also retrieved from the metadata service. It is done lazily from the worker thread. The motivation is to speed up initialization and be able to recover from failures. """ self._credentials = AppAssertionCredentials() self._project_id = lambda: self._QueryGcpProject('project-id') self._project_number = lambda: self._QueryGcpProject('numeric-project-id') def Start(self): """Starts the worker thread.""" self._shutdown = False self._main_thread = threading.Thread(target=self._MainThreadProc) self._main_thread.name = 'Cloud Debugger main worker thread' self._main_thread.daemon = True self._main_thread.start() def Stop(self): """Signals the worker threads to shut down and waits until it exits.""" self._shutdown = True self._new_updates.set() # Wake up the transmission thread. if self._main_thread is not None: self._main_thread.join() self._main_thread = None if self._transmission_thread is not None: self._transmission_thread.join() self._transmission_thread = None def EnqueueBreakpointUpdate(self, breakpoint): """Asynchronously updates the specified breakpoint on the backend. This function returns immediately. The worker thread is actually doing all the work. The worker thread is responsible to retry the transmission in case of transient errors. Args: breakpoint: breakpoint in either final or non-final state. """ with self._transmission_thread_startup_lock: if self._transmission_thread is None: self._transmission_thread = threading.Thread( target=self._TransmissionThreadProc) self._transmission_thread.name = 'Cloud Debugger transmission thread' self._transmission_thread.daemon = True self._transmission_thread.start() self._transmission_queue.append((breakpoint, 0)) self._new_updates.set() # Wake up the worker thread to send immediately. def _BuildService(self): http = httplib2.Http(timeout=_HTTP_TIMEOUT_SECONDS) http = self._credentials.authorize(http) api = apiclient.discovery.build('clouddebugger', 'v2', http=http) return api.controller() def _MainThreadProc(self): """Entry point for the worker thread.""" registration_required = True while not self._shutdown: if registration_required: service = self._BuildService() registration_required, delay = self._RegisterDebuggee(service) if not registration_required: registration_required, delay = self._ListActiveBreakpoints(service) if self.on_idle is not None: self.on_idle() if not self._shutdown: time.sleep(delay) def _TransmissionThreadProc(self): """Entry point for the transmission worker thread.""" reconnect = True while not self._shutdown: self._new_updates.clear() if reconnect: service = self._BuildService() reconnect = False reconnect, delay = self._TransmitBreakpointUpdates(service) self._new_updates.wait(delay) def _RegisterDebuggee(self, service): """Single attempt to register the debuggee. If the registration succeeds, sets self._debuggee_id to the registered debuggee ID. Args: service: client to use for API calls Returns: (registration_required, delay) tuple """ try: request = {'debuggee': self._GetDebuggee()} try: response = service.debuggees().register(body=request).execute() self._debuggee_id = response['debuggee']['id'] native.LogInfo('Debuggee registered successfully, ID: %s' % ( self._debuggee_id)) self.register_backoff.Succeeded() return (False, 0) # Proceed immediately to list active breakpoints. except BaseException: native.LogInfo('Failed to register debuggee: %s, %s' % (request, traceback.format_exc())) except BaseException: native.LogWarning('Debuggee information not available: ' + traceback.format_exc()) return (True, self.register_backoff.Failed()) def _ListActiveBreakpoints(self, service): """Single attempt query the list of active breakpoints. Must not be called before the debuggee has been registered. If the request fails, this function resets self._debuggee_id, which triggers repeated debuggee registration. Args: service: client to use for API calls Returns: (registration_required, delay) tuple """ try: response = service.debuggees().breakpoints().list( debuggeeId=self._debuggee_id, waitToken=self._wait_token, successOnTimeout=True).execute() breakpoints = response.get('breakpoints') or [] self._wait_token = response.get('nextWaitToken') if cmp(self._breakpoints, breakpoints) != 0: self._breakpoints = breakpoints native.LogInfo( 'Breakpoints list changed, %d active, wait token: %s' % ( len(self._breakpoints), self._wait_token)) self.on_active_breakpoints_changed(copy.deepcopy(self._breakpoints)) except Exception as e: native.LogInfo('Failed to query active breakpoints: ' + traceback.format_exc()) # Forget debuggee ID to trigger repeated debuggee registration. Once the # registration succeeds, the worker thread will retry this query self._debuggee_id = None return (True, self.list_backoff.Failed()) self.list_backoff.Succeeded() return (False, 0) def _TransmitBreakpointUpdates(self, service): """Tries to send pending breakpoint updates to the backend. Sends all the pending breakpoint updates. In case of transient failures, the breakpoint is inserted back to the top of the queue. Application failures are not retried (for example updating breakpoint in a final state). Each pending breakpoint maintains a retry counter. After repeated transient failures the breakpoint is discarded and dropped from the queue. Args: service: client to use for API calls Returns: (reconnect, timeout) tuple. The first element ("reconnect") is set to true on unexpected HTTP responses. The caller should discard the HTTP connection and create a new one. The second element ("timeout") is set to None if all pending breakpoints were sent successfully. Otherwise returns time interval in seconds to stall before retrying. """ reconnect = False retry_list = [] # There is only one consumer, so two step pop is safe. while self._transmission_queue: breakpoint, retry_count = self._transmission_queue.popleft() try: service.debuggees().breakpoints().update( debuggeeId=self._debuggee_id, id=breakpoint['id'], body={'breakpoint': breakpoint}).execute() native.LogInfo('Breakpoint %s update transmitted successfully' % ( breakpoint['id'])) except apiclient.errors.HttpError as err: # Treat 400 error codes (except timeout) as application error that will # not be retried. All other errors are assumed to be transient. status = err.resp.status is_transient = ((status >= 500) or (status == 408)) if is_transient and retry_count < self.max_transmit_attempts - 1: native.LogInfo('Failed to send breakpoint %s update: %s' % ( breakpoint['id'], traceback.format_exc())) retry_list.append((breakpoint, retry_count + 1)) elif is_transient: native.LogWarning( 'Breakpoint %s retry count exceeded maximum' % breakpoint['id']) else: # This is very common if multiple instances are sending final update # simultaneously. native.LogInfo('%s, breakpoint: %s' % (err, breakpoint['id'])) except Exception: native.LogWarning( 'Fatal error sending breakpoint %s update: %s' % ( breakpoint['id'], traceback.format_exc())) reconnect = True self._transmission_queue.extend(retry_list) if not self._transmission_queue: self.update_backoff.Succeeded() # Nothing to send, wait until next breakpoint update. return (reconnect, None) else: return (reconnect, self.update_backoff.Failed()) def _QueryGcpProject(self, resource): """Queries project resource on a local metadata service.""" url = _LOCAL_METADATA_SERVICE_PROJECT_URL + resource http = httplib2.Http() response, content = http.request( url, headers={'Metadata-Flavor': 'Google'}) if response['status'] != '200': raise RuntimeError( 'HTTP error %s %s when querying local metadata service at %s' % (response['status'], content, url)) return content def _GetDebuggee(self): """Builds the debuggee structure.""" major_version = version.__version__.split('.')[0] debuggee = { 'project': self._project_number(), 'description': self._GetDebuggeeDescription(), 'labels': self._debuggee_labels, 'agentVersion': 'google.com/python2.7-' + major_version } source_context = self._ReadAppJsonFile('source-context.json') if source_context: debuggee['sourceContexts'] = [source_context] source_contexts = self._ReadAppJsonFile('source-contexts.json') if source_contexts: debuggee['extSourceContexts'] = source_contexts elif source_context: debuggee['extSourceContexts'] = [{'context': source_context}] debuggee['uniquifier'] = self._ComputeUniquifier(debuggee) return debuggee def _GetDebuggeeDescription(self): """Formats debuggee description based on debuggee labels.""" return '-'.join(self._debuggee_labels[label] for label in _DESCRIPTION_LABELS if label in self._debuggee_labels) def _ComputeUniquifier(self, debuggee): """Computes debuggee uniquifier. The debuggee uniquifier has to be identical on all instances. Therefore the uniquifier should not include any random numbers and should only be based on inputs that are guaranteed to be the same on all instances. Args: debuggee: complete debuggee message without the uniquifier Returns: Hex string of SHA1 hash of project information, debuggee labels and debuglet version. """ uniquifier = hashlib.sha1() # Project information. uniquifier.update(self._project_id()) uniquifier.update(self._project_number()) # Debuggee information. uniquifier.update(str(debuggee)) # Compute hash of application files if we don't have source context. This # way we can still distinguish between different deployments. if ('minorversion' not in debuggee.get('labels', []) and 'sourceContexts' not in debuggee and 'extSourceContexts' not in debuggee): uniquifier_computer.ComputeApplicationUniquifier(uniquifier) return uniquifier.hexdigest() def _ReadAppJsonFile(self, relative_path): """Reads JSON file from an application directory. Args: relative_path: file name relative to application root directory. Returns: Parsed JSON data or None if the file does not exist, can't be read or not a valid JSON file. """ try: with open(os.path.join(sys.path[0], relative_path), 'r') as f: return json.load(f) except (IOError, ValueError): return None
def test_service_account_email_already_set(self, get_email): credentials = AppAssertionCredentials([]) acct_name = '*****@*****.**' credentials._service_account_email = acct_name self.assertEqual(credentials.service_account_email, acct_name) get_email.assert_not_called()