def _get_bigquery_service(self): credentials = gce.AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') http = httplib2.Http() http = credentials.authorize(http) return build("bigquery", "v2", http=http)
def AcquireFromGCE(account=None): """Get credentials from a GCE metadata server. Args: account: str, The account name to use. If none, the default is used. Returns: client.Credentials, Credentials taken from the metadata server. Raises: c_gce.CannotConnectToMetadataServerException: If the metadata server cannot be reached. RefreshError: If the credentials fail to refresh. Error: If a non-default service account is used. """ default_account = c_gce.Metadata().DefaultAccount() if account is None: account = default_account if account != default_account: raise Error('Unable to use non-default GCE service accounts.') # TODO(user): Update oauth2client to fetch alternate credentials. This # inability is not currently a problem, because the metadata server does not # yet provide multiple service accounts. creds = oauth2client_gce.AppAssertionCredentials(config.CLOUDSDK_SCOPES) Refresh(creds) return creds
def testGetCredentialsFromEnvCompute(self): self.mox.StubOutWithMock(gce, 'AppAssertionCredentials') credentials = self.mox.CreateMockAnything() gce.AppAssertionCredentials(connection.SCOPE).AndReturn(credentials) credentials.authorize(mox.IsA(httplib2.Http)) credentials.refresh(mox.IsA(httplib2.Http)) self.mox.ReplayAll() self.assertIs(credentials, helper.get_credentials_from_env()) self.mox.VerifyAll()
def __init__(self, **kwargs): scope = kwargs.get("scope") or [ 'https://www.googleapis.com/auth/devstorage.full_control', 'https://www.googleapis.com/auth/bigquery' ] self.config = dict(configuration.get_config().items('gcloud')) self._project_name = kwargs.get("name", "default") self._config_name = kwargs.get("config", "default") logger.debug("GCloudClient client created client name: " + self._project_name + ", config name: " + self._config_name) auth_method = self.config.get( "api.project." + self._project_name + ".auth.method", "service") if auth_method == 'secret': secret_file = self.config.get( "api.project." + self._project_name + ".auth.secret.file", "secret.json") credentials_file = self.config.get( "api.project." + self._project_name + ".auth.credentials.file", "credentials.json") flow = oauthclient.flow_from_clientsecrets( secret_file, scope=scope, redirect_uri='urn:ietf:wg:oauth:2.0:oob') try: token_file = open(credentials_file, 'r') self.credentials = OAuth2Credentials.from_json( token_file.read()) token_file.close() except IOError: auth_uri = flow.step1_get_authorize_url() webbrowser.open(auth_uri) auth_code = raw_input('Enter the auth code: ') self.credentials = flow.step2_exchange(auth_code) token_file = open(credentials_file, 'w') token_file.write(self.credentials.to_json()) token_file.close() elif auth_method == 'service': self.credentials = gce.AppAssertionCredentials(scope=scope) elif auth_method == 'default': self.credentials = GoogleCredentials.get_application_default() self._project_number = self.config.get("api.project." + self._project_name + ".number") self._project_id = self.config.get("api.project." + self._project_name + ".id") self._staging = self.config.get("api.project." + self._project_name + ".staging") self._zone = self.config.get("api.project." + self._project_name + ".zone")
def _load_credentials(self, credentials_file_path): if credentials_file_path == GCE_CREDENTIALS: return gce.AppAssertionCredentials(self._SCOPES) if credentials_file_path == APPENGINE_CREDENTIALS: # pragma: no cover # This import doesn't work outside appengine, so delay it until it's used. from oauth2client import appengine return appengine.AppAssertionCredentials(self._SCOPES) with open(credentials_file_path, 'r') as credentials_file: credentials_json = json.load(credentials_file) if credentials_json.get('type', None): credentials = GoogleCredentials.from_stream(credentials_file_path) credentials = credentials.create_scoped(self._SCOPES) return credentials return Storage(credentials_file_path).get()
def get_credentials_from_env(): """Get datastore credentials from the environment. Try and fallback on the following credentials in that order: - Google APIs Signed JWT credentials based on DATASTORE_SERVICE_ACCOUNT and DATASTORE_PRIVATE_KEY_FILE environment variables - Compute Engine service account - No credentials (development server) Returns: datastore credentials. """ # If DATASTORE_SERVICE_ACCOUNT and DATASTORE_PRIVATE_KEY_FILE # environment variables are defined: use Google APIs Console Service # Accounts (signed JWT). Note that the corresponding service account # should be an admin of the datastore application. service_account = os.getenv('DATASTORE_SERVICE_ACCOUNT') key_path = os.getenv('DATASTORE_PRIVATE_KEY_FILE') if service_account and key_path: with open(key_path, 'rb') as f: key = f.read() credentials = client.SignedJwtAssertionCredentials( service_account, key, connection.SCOPE) logging.info('connecting using DatastoreSignedJwtCredentials') return credentials try: # Fallback on getting Compute Engine credentials from the metadata server # to connect to the datastore service. Note that the corresponding # service account should be an admin of the datastore application. credentials = gce.AppAssertionCredentials(connection.SCOPE) http = httplib2.Http() credentials.authorize(http) # Force first credentials refresh to detect if we are running on # Compute Engine. credentials.refresh(http) logging.info('connecting using compute credentials') return credentials except (client.AccessTokenRefreshError, httplib2.HttpLib2Error): # Fallback on no credentials if no DATASTORE_ environment # variables are defined and Compute Engine auth failed. Note that # it will only authorize calls to the development server. logging.info('connecting using no credentials') return None
def get_credentials_from_env(): """Get datastore credentials from DATASTORE_* environment variables. Try and fallback on the following credentials in that order: - Compute Engine service account - Google APIs Signed JWT credentials based on DATASTORE_SERVICE_ACCOUNT and DATASTORE_PRIVATE_KEY_FILE environments variables - No credentials (development server) Returns: datastore credentials. """ try: # Use Compute Engine credentials to connect to the datastore service. Note # that the corresponding service account should be an admin of the # datastore application. credentials = gce.AppAssertionCredentials(connection.SCOPE) http = httplib2.Http() credentials.authorize(http) # force first credentials refresh to detect if we are running on # Compute Engine. credentials.refresh(http) logging.info('connect using compute credentials') return credentials except (client.AccessTokenRefreshError, httplib2.HttpLib2Error): # If not running on Google Compute fallback on using Google APIs # Console Service Accounts (signed JWT). Note that the corresponding # service account should be an admin of the datastore application. if (os.getenv('DATASTORE_SERVICE_ACCOUNT') and os.getenv('DATASTORE_PRIVATE_KEY_FILE')): with open(os.getenv('DATASTORE_PRIVATE_KEY_FILE'), 'rb') as f: key = f.read() credentials = client.SignedJwtAssertionCredentials( os.getenv('DATASTORE_SERVICE_ACCOUNT'), key, connection.SCOPE) logging.info('connect using DatastoreSignedJwtCredentials') return credentials # Fallback on no credentials if no DATASTORE_ environments variables # are defined. Note that it will only authorize call to the # development server. logging.info('connect using no credentials') return None
def get_botmap_entry(slave_name): credentials = gce.AppAssertionCredentials( scope='https://www.googleapis.com/auth/userinfo.email') http = credentials.authorize(httplib2.Http()) botmap = ('https://chrome-infra-botmap.appspot.com/_ah/api/botmap/v1/bots/' '%s' % slave_name) try: response, content = http.request(botmap) if response['status'] != '200': # Request did not succeed. Try again. print 'response: %s' % response print 'content: %s' % content print 'Error requesting bot map.' raise httplib.HTTPException('HTTP status %s != 200' % response['status']) bot_entry = json.loads(content) except Exception as e: print 'Error requesting bot map. Host may be missing authentication.' print str(e) raise return bot_entry
def _GetAppCredentials(): """Returns the singleton Appengine credentials for gerrit code review.""" return gce.AppAssertionCredentials( scope='https://www.googleapis.com/auth/gerritcodereview')
def create(self, scopes): return gce.AppAssertionCredentials(scopes)
def gce_authenticate(): """Authenticate to the build service using the GCE instances credentials""" return gce.AppAssertionCredentials( scope=['https://www.googleapis.com/auth/androidbuild.internal'])
def main(argv): global client global project global app_bucket global logger init_logger() logger = logging.getLogger('main') if not os.path.exists(SERVERS_DIR): os.makedirs(SERVERS_DIR) if not os.path.exists(ARCHIVES_DIR): os.makedirs(ARCHIVES_DIR) if not os.path.exists(MINECRAFT_DIR): os.makedirs(MINECRAFT_DIR) try: with open('/coal/project_id', 'r') as f: project = f.read().strip() with open('/coal/client_id', 'r') as f: client_id = f.read().strip() with open('/coal/secret', 'r') as f: secret = f.read().strip() init_external_ip() client = ControllerClient('{0}.appspot.com'.format(project), client_id, secret) app_bucket = '{0}.appspot.com'.format(project) credentials = gce.AppAssertionCredentials(scope=TQ_API_SCOPE) http = credentials.authorize(httplib2.Http()) service = build('taskqueue', TQ_API_VERSION, http=http) while True: tasks = lease_tasks(service) if tasks: completed_tasks = [] try: completed_tasks = complete_tasks(tasks) finally: delete_tasks(service, completed_tasks) else: # Restart crashed servers servers = get_servers() if servers: for server_key, server in servers.iteritems(): server_dir = get_server_dir(server['port']) pid = get_server_pid(server_dir) if pid and not pid_exists(pid): logger.error( "Server '{0}' has crashed. Restarting minecraft." .format(server_key)) run_server_script = os.path.join( server_dir, RUN_SERVER_FILENAME) start_minecraft(server_key, server_dir, run_server_script) # Save non-uploaded archives archives = get_archives() if archives: server_key, archive_file = archives[0] upload_zip_to_gcs(server_key, archive_file, backup=True) else: time.sleep(10.0) except KeyboardInterrupt: logger.info(u"Canceled") except SystemExit: logger.info(u"System Exit") except Exception, e: logger.error(u"Unexpected {0}: {1}".format(type(e).__name__, e))
def upload_zip_to_gcs(server_key, archive_file, backup=False): name = get_gcs_archive_name(server_key) credentials = gce.AppAssertionCredentials(scope=STORAGE_API_SCOPE) http = credentials.authorize(httplib2.Http()) service = build('storage', STORAGE_API_VERSION, http=http) retry = True while retry: media = MediaFileUpload(archive_file, chunksize=CHUNKSIZE, resumable=True) if not media.mimetype(): media = MediaFileUpload(archive_file, 'application/zip', resumable=True) request = service.objects().insert(bucket=app_bucket, name=name, media_body=media) progress = previous_progress = None tries = 0 response = None while response is None: try: status, response = request.next_chunk() tries = 0 progress = int(status.progress() * 100) if status is not None else 0 if response is not None: # Done retry = False progress = 100 if progress != previous_progress: if progress % 10 == 0: logger.info( "Server {0} archive is {1}% uploaded".format( server_key, progress)) if not backup: try: client.post_event(server_key, STOP_EVENT, progress) except Exception as e: logger.exception( "Error sending controller save event for server [{0}]: {1}" .format(server_key, e)) previous_progress = progress except HttpError as e: if e.resp.status in [404]: # Start upload all over again response = None logging.error( "Error ({0}) uploading archive for server {1}. Retrying...." .format(str(e), server_key)) elif e.resp.status in [500, 502, 503, 504]: # Retry with backoff tries += 1 if tries > NUM_RETRIES: raise sleeptime = 2**min(tries, 4) logger.error( "Error ({0}) uploading archive for server {1}. Sleeping {2} seconds." .format(str(e), server_key, sleeptime)) time.sleep(sleeptime) else: raise os.remove(archive_file)
def load_zip_from_gcs(server_key): name = get_gcs_archive_name(server_key) credentials = gce.AppAssertionCredentials(scope=STORAGE_API_SCOPE) http = credentials.authorize(httplib2.Http()) service = build('storage', STORAGE_API_VERSION, http=http) try: request = service.objects().list(bucket=app_bucket, prefix=name) result = request.execute() items = result.get('items', []) if not items: return False except HttpError as e: logging.error("Error ({0}) calling {1}".format( e.resp, getattr(e, 'operationType', None))) raise archive = get_archive_file_path(server_key) retry = True while retry: with file(archive, 'w') as f: request = service.objects().get_media(bucket=app_bucket, object=name) media = MediaIoBaseDownload(f, request, chunksize=CHUNKSIZE) tries = 0 done = False progress = previous_progress = None while not done: try: status, done = media.next_chunk() tries = 0 progress = int(status.progress() * 100) if status is not None else 0 if done: # Done retry = False progress = 100 if progress != previous_progress: if progress % 10 == 0: logger.info( "Server {0} archive is {1}% downloaded".format( server_key, progress)) try: client.post_event(server_key, START_EVENT, progress) except Exception as e: logger.exception( "Error sending controller load event for server [{0}]: {1}" .format(server_key, e)) previous_progress = progress except HttpError as e: if e.resp.status in [404]: # Start download all over again os.remove(archive) done = True logging.error( "Error ({0}) downloading archive for server {1}. Retrying...." .format(str(e), server_key)) elif e.resp.status in [500, 502, 503, 504]: # Retry with backoff tries += 1 if tries > NUM_RETRIES: os.remove(archive) return False sleeptime = 2**min(tries, 4) logger.error( "Error ({0}) downloading archive for server {1}. Sleeping {2} seconds." .format(str(e), server_key, sleeptime)) time.sleep(sleeptime) else: os.remove(archive) return False return True