def publish(topic, project, message, **attributes): """Publish messages to Cloud Pub/Sub. Creates the topic if it doesn't exist. Args: topic: Name of the topic to publish to. project: Name of the project the topic should exist in. message: Content of the message to publish. **attributes: Any attributes to send with the message. """ try: _publish(topic, project, message, **attributes) except net.Error as e: if e.status_code == 404: # Topic does not exist. Try to create it. ensure_topic_exists(topic, project) try: net.json_request( '%s/%s/topics/%s' % (PUBSUB_BASE_URL, project, topic), method='PUT', scopes=PUBSUB_SCOPES, ) except net.Error as e: if e.status_code != 409: # 409 is the status code when the topic already exists (maybe someone # else created it just now). Ignore 409, but raise any other error. raise # Retransmit now that the topic is created. _publish(topic, project, message, **attributes) else: # Unknown error. raise
def test_json_bad_response(self): self.mock_urlfetch([ ({ 'url': 'http://localhost/123' }, Response(200, 'not a json', {})), ]) with self.assertRaises(net.Error): net.json_request('http://localhost/123')
def test_json_with_jwt_auth_audience_and_scopes_fail(self): with self.assertRaises(ValueError): net.json_request(url='http://localhost/123', method='POST', payload={'key': 'value'}, params={ 'a': '=', 'b': '&' }, headers={'Header': 'value'}, deadline=123, max_attempts=5, scopes=['scope'], audience='my-service.appspot.com')
def test_json_with_jwt_auth_and_project_id_fail(self): with self.assertRaises(ValueError): net.json_request(url='http://localhost/123', method='POST', payload={'key': 'value'}, params={ 'a': '=', 'b': '&' }, headers={'Header': 'value'}, deadline=123, max_attempts=5, project_id='some-id', scopes=['scope'], use_jwt_auth=True)
def _subscribe(cls): """Subscribes to a Cloud Pub/Sub project.""" net.json_request( '%s/%s/subscriptions/%s' % ( PUBSUB_BASE_URL, cls.SUBSCRIPTION_PROJECT, cls.SUBSCRIPTION, ), method='PUT', payload={ 'topic': 'projects/%s/topics/%s' % (cls.TOPIC_PROJECT, cls.TOPIC), 'pushConfig': {'pushEndpoint': cls.ENDPOINT}, }, scopes=PUBSUB_SCOPES, )
def test_json_request_works(self): self.mock_urlfetch([ ({ 'deadline': 123, 'headers': { 'Authorization': 'Bearer token', 'Content-Type': 'application/json; charset=utf-8', 'Header': 'value', }, 'method': 'POST', 'payload': '{"key":"value"}', 'url': 'http://localhost/123?a=%3D&b=%26', }, Response(200, '{"a":"b"}', {})), ]) response = net.json_request(url='http://localhost/123', method='POST', payload={'key': 'value'}, params={ 'a': '=', 'b': '&' }, headers={'Header': 'value'}, scopes=['scope'], service_account_key=auth.ServiceAccountKey( 'a', 'b', 'c'), deadline=123, max_attempts=5) self.assertEqual({'a': 'b'}, response)
def get_doc(self, service, version): cache_key = 'discovery_doc/%s/%s/%s' % ( modules.get_current_version_name(), service, version) cached = memcache.get(cache_key) if cached: return cached[0] logging.info('Fetching actual discovery document') doc_url = '%s://%s/_ah/api/discovery/v1/apis/%s/%s/rest' % ( self.request.scheme, # Needed for local devserver. self.request.host, service, version) try: doc = net.json_request(url=doc_url, deadline=45) logging.info('Fetched actual discovery document') except net.NotFoundError: doc = None if doc: for key in ('baseUrl', 'basePath', 'rootUrl'): url = urlparse.urlparse(doc.get(key)) if url.path.startswith('/_ah/'): url = url._replace(path=url.path[len('/_ah'):]) doc[key] = urlparse.urlunparse(url) if 'batchPath' in doc: del doc['batchPath'] memcache.add(cache_key, (doc, )) return doc
def delete(key): """Deletes the instance group manager for the given InstanceGroupManager. Args: key: ndb.Key for a models.InstanceGroupManager entity. Raises: net.Error: HTTP status code is not 200 (deleted) or 404 (already deleted). """ url = get_instance_group_manager_to_delete(key) if not url: return try: result = net.json_request(url, method='DELETE', scopes=gce.AUTH_SCOPES) if result['targetLink'] != url: logging.warning( 'InstanceGroupManager mismatch: %s\nExpected: %s\nFound: %s', key, url, result['targetLink'], ) except net.Error as e: if e.status_code != 404: # If the instance group manager isn't found, assume it's already deleted. raise update_url(key, None)
def test_json_request_works(self): self.mock_urlfetch([ ({ 'deadline': 123, 'headers': { 'Authorization': 'Bearer token', 'Content-Type': 'application/json; charset=utf-8', 'Header': 'value', }, 'method': 'POST', 'payload': '{"key":"value"}', 'url': 'http://localhost/123?a=%3D&b=%26', }, Response(200, '{"a":"b"}', {})), ]) response = net.json_request( url='http://localhost/123', method='POST', payload={'key': 'value'}, params={'a': '=', 'b': '&'}, headers={'Header': 'value'}, scopes=['scope'], service_account_key=auth.ServiceAccountKey('a', 'b', 'c'), deadline=123, max_attempts=5) self.assertEqual({'a': 'b'}, response)
def delete(key): """Deletes the instance template for the given InstanceTemplateRevision. Args: key: ndb.Key for a models.InstanceTemplateRevision entity. Raises: net.Error: HTTP status code is not 200 (created) or 404 (already deleted). """ url = get_instance_template_to_delete(key) if not url: return try: result = net.json_request(url, method='DELETE', scopes=gce.AUTH_SCOPES) if result['targetLink'] != url: logging.warning( 'InstanceTemplateRevision mismatch: %s\nExpected: %s\nFound: %s', key, url, result['targetLink'], ) except net.Error as e: if e.status_code != 404: # If the instance template isn't found, assume it's already deleted. raise update_url(key, None)
def test_json_with_jwt_auth_works(self): self.mock_urlfetch([ ({ 'deadline': 123, 'headers': { 'Authorization': r'$^Bearer\ [a-zA-Z0-9_=-]+\.[a-zA-Z0-9_=-]+\.' '[a-zA-Z0-9_=-]+$', 'Accept': 'application/json; charset=utf-8', 'Content-Type': 'application/json; charset=utf-8', 'Header': 'value', }, 'method': 'POST', 'payload': '{"key":"value"}', 'url': 'http://localhost/123?a=%3D&b=%26', }, Response(200, ')]}\'\n{"a":"b"}', {})), ]) response = net.json_request(url='http://localhost/123', method='POST', payload={'key': 'value'}, params={ 'a': '=', 'b': '&' }, headers={'Header': 'value'}, deadline=123, max_attempts=5, use_jwt_auth=True, audience='my-service.appspot.com') self.assertEqual({'a': 'b'}, response)
def _export_builds(dataset, table_name, builds, deadline): """Saves builds to BigQuery. Logs insert errors and returns a list of ids of builds that could not be inserted. """ # BigQuery API doc: # https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll logging.info('sending %d rows', len(builds)) pairs = [(b, build_pb2.Build()) for b in builds] model.builds_to_protos_async( pairs, load_tags=True, load_input_properties=True, load_output_properties=True, load_steps=True, load_infra=True, ).get_result() # Clear fields that we don't want in BigQuery. for _, proto in pairs: proto.infra.buildbucket.hostname = '' for s in proto.steps: s.summary_markdown = '' s.ClearField('logs') res = net.json_request( url=(('https://www.googleapis.com/bigquery/v2/' 'projects/%s/datasets/%s/tables/%s/insertAll') % (app_identity.get_application_id(), dataset, table_name)), method='POST', payload={ 'kind': 'bigquery#tableDataInsertAllRequest', # Do not fail entire request because of one bad build. # We handle invalid rows below. 'skipInvalidRows': True, 'ignoreUnknownValues': False, 'rows': [{ 'insertId': str(p.id), 'json': bqh.message_to_dict(p), } for _, p in pairs], }, scopes=bqh.INSERT_ROWS_SCOPE, # deadline parameter here is duration in seconds. deadline=(deadline - utils.utcnow()).total_seconds(), ) failed_ids = [] for err in res.get('insertErrors', []): _, bp = pairs[err['index']] failed_ids.append(bp.id) logging.error('failed to insert row for build %d: %r', bp.id, err['errors']) return failed_ids
def ensure_topic_exists(topic, project): """Ensures the given Cloud Pub/Sub topic exists in the given project. Args: topic: Name of the topic which should exist. project: Name of the project the topic should exist in. """ try: net.json_request( '%s/%s/topics/%s' % (PUBSUB_BASE_URL, project, topic), method='PUT', scopes=PUBSUB_SCOPES, ) except net.Error as e: if e.status_code != 409: # 409 is the status code when the topic already exists. # Ignore 409, but raise any other error. raise
def _subscribe(cls): """Subscribes to a Cloud Pub/Sub project.""" net.json_request( '%s/%s/subscriptions/%s' % ( PUBSUB_BASE_URL, cls.SUBSCRIPTION_PROJECT, cls.SUBSCRIPTION, ), method='PUT', payload={ 'topic': 'projects/%s/topics/%s' % (cls.TOPIC_PROJECT, cls.TOPIC), 'pushConfig': { 'pushEndpoint': cls.ENDPOINT }, }, scopes=PUBSUB_SCOPES, )
def exists(instance_url): """Returns whether the given instance exists or not. Args: instance_url: URL of the instance. Returns: True if the instance exists, False otherwise. Raises: net.Error: If GCE responds with an error. """ try: net.json_request(instance_url, method='GET', scopes=gce.AUTH_SCOPES) return True except net.Error as e: if e.status_code == 404: return False raise
def add_machines(requests): """Add machines to the Machine Provider's Catalog. Args: requests: A list of rpc_messages.CatalogMachineAdditionRequest instances. """ logging.info('Sending batched add_machines request') return net.json_request( '%s/add_machines' % CATALOG_BASE_URL, method='POST', payload=utils.to_json_encodable({'requests': requests}), scopes=MACHINE_PROVIDER_SCOPES, )
def _publish(topic, project, message, **attributes): """Publish messages to Cloud Pub/Sub. Args: topic: Name of the topic to publish to. project: Name of the project the topic exists in. message: Content of the message to publish. **attributes: Any attributes to send with the message. """ net.json_request( '%s/%s/topics/%s:publish' % (PUBSUB_BASE_URL, project, topic), method='POST', payload={ 'messages': [ { 'attributes': attributes, 'data': base64.b64encode(message), }, ], }, scopes=PUBSUB_SCOPES, )
def lease_machine(request): """Lease a machine from the Machine Provider. Args: request: An rpc_messages.LeaseRequest instance. """ return net.json_request( '%s/_ah/api/machine_provider/v1/lease' % MachineProviderConfiguration.get_instance_url(), method='POST', payload=utils.to_json_encodable(request), scopes=MACHINE_PROVIDER_SCOPES, )
def update(key): """Updates instance metadata. Args: key: ndb.Key for a models.instance entity. """ entity = key.get() if not entity: logging.warning('Instance does not exist: %s', key) return if not entity.active_metadata_update: logging.warning('Instance active metadata update unspecified: %s', key) return if entity.active_metadata_update.url: return parent = key.parent().get() if not parent: logging.warning('InstanceGroupManager does not exist: %s', key.parent()) return grandparent = parent.key.parent().get() if not grandparent: logging.warning('InstanceTemplateRevision does not exist: %s', parent.key.parent()) return if not grandparent.project: logging.warning('InstanceTemplateRevision project unspecified: %s', grandparent.key) return result = net.json_request(entity.url, scopes=gce.AUTH_SCOPES) api = gce.Project(grandparent.project) operation = api.set_metadata( parent.key.id(), key.id(), result['metadata']['fingerprint'], apply_metadata_update(result['metadata']['items'], entity.active_metadata_update.metadata), ) metrics.send_machine_event('METADATA_UPDATE_SCHEDULED', key.id()) associate_metadata_operation( key, utilities.compute_checksum(entity.active_metadata_update.metadata), operation.url, )
def lease_machines(requests): """Lease machines from the Machine Provider. Args: requests: A list of rpc_messages.LeaseRequest instances. """ logging.info('Sending batched lease_machines request') return net.json_request( '%s/_ah/api/machine_provider/v1/batched_lease' % MachineProviderConfiguration.get_instance_url(), method='POST', payload=utils.to_json_encodable({'requests': requests}), scopes=MACHINE_PROVIDER_SCOPES, )
def shorten_url(long_url): link_hash = hashlib.sha256(long_url).hexdigest() cache_key = 'shortlink/' + link_hash short_url = memcache.get(cache_key) if not short_url: res = net.json_request( url='https://www.googleapis.com/urlshortener/v1/url', method='POST', payload={'longUrl': long_url}, scopes='https://www.googleapis.com/auth/urlshortener', ) short_url = res['id'] memcache.set(cache_key, short_url) return short_url
def release_machine(client_request_id): """Voluntarily releases a leased machine back to Machine Provider. Args: client_request_id: Request ID originally used by the client when creating the lease request. """ return net.json_request( '%s/_ah/api/machine_provider/v1/release' % MachineProviderConfiguration.get_instance_url(), method='POST', payload=utils.to_json_encodable({'request_id': client_request_id}), scopes=MACHINE_PROVIDER_SCOPES, )
def add_machines(requests): """Add machines to the Machine Provider's Catalog. Args: requests: A list of rpc_messages.CatalogMachineAdditionRequest instances. """ logging.info('Sending batched add_machines request') return net.json_request( '%s/_ah/api/catalog/v1/add_machines' % MachineProviderConfiguration.get_instance_url(), method='POST', payload=utils.to_json_encodable({'requests': requests}), scopes=MACHINE_PROVIDER_SCOPES, )
def update(key): """Updates instance metadata. Args: key: ndb.Key for a models.instance entity. """ entity = key.get() if not entity: logging.warning('Instance does not exist: %s', key) return if not entity.active_metadata_update: logging.warning('Instance active metadata update unspecified: %s', key) return if entity.active_metadata_update.url: return parent = key.parent().get() if not parent: logging.warning('InstanceGroupManager does not exist: %s', key.parent()) return grandparent = parent.key.parent().get() if not grandparent: logging.warning( 'InstanceTemplateRevision does not exist: %s', parent.key.parent()) return if not grandparent.project: logging.warning( 'InstanceTemplateRevision project unspecified: %s', grandparent.key) return result = net.json_request(entity.url, scopes=gce.AUTH_SCOPES) api = gce.Project(grandparent.project) operation = api.set_metadata( parent.key.id(), key.id(), result['metadata']['fingerprint'], apply_metadata_update( result['metadata']['items'], entity.active_metadata_update.metadata), ) associate_metadata_operation( key, utilities.compute_checksum(entity.active_metadata_update.metadata), operation.url, )
def _call_token_server(method, request, project_id=None): """Sends an RPC to tokenserver.minter.TokenMinter service. Args: method: name of the method to call. request: dict with request fields. project_id: if set, act with the authority of this LUCI project. Returns: Dict with response fields. Raises: PermissionError on HTTP 403 reply. MisconfigurationError if the service account is misconfigured. InternalError if the RPC fails unexpectedly. """ # Double check token server URL looks sane ('https://....'). This is checked # when it's imported from the config. This check should never fail. ts_url = auth.get_request_auth_db().token_server_url try: utils.validate_root_service_url(ts_url) except ValueError as exc: raise MisconfigurationError('Invalid token server URL %s: %s' % (ts_url, exc)) # See TokenMinter in # https://chromium.googlesource.com/infra/luci/luci-go/+/master/tokenserver/api/minter/v1/token_minter.proto # But beware that proto JSON serialization uses camelCase, not snake_case. try: return net.json_request( url='%s/prpc/tokenserver.minter.TokenMinter/%s' % (ts_url, method), method='POST', payload=request, project_id=project_id, scopes=[net.EMAIL_SCOPE]) except net.Error as exc: logging.error('Error calling %s (HTTP %s: %s):\n%s', method, exc.status_code, exc.message, exc.response) if exc.status_code == 403: raise PermissionError('HTTP 403 from the token server:\n%s' % exc.response) if exc.status_code == 400: raise MisconfigurationError('HTTP 400 from the token server:\n%s' % exc.response) # Don't put the response body into the error message, it may contain # internal details (that are public to Swarming server, but may not be # public to whoever is calling the Swarming server now). raise InternalError('Failed to call %s, see server logs' % method)
def delete_machine(dimensions): """Deletes a machine from the Machine Provider's Catalog. Args: dimensions: Dimensions for the machine. """ logging.info('Sending delete_machine request') return net.json_request( '%s/_ah/api/catalog/v1/delete_machine' % MachineProviderConfiguration.get_instance_url(), method='POST', payload=utils.to_json_encodable({ 'dimensions': dimensions, }), scopes=MACHINE_PROVIDER_SCOPES, )
def retrieve_machine(hostname, backend=None): """Requests information about a machine from the Machine Provider's Catalog. Args: hostname: Hostname of the machine to request information about. backend: Backend the machine belongs to. """ return net.json_request( '%s/_ah/api/catalog/v1/get' % MachineProviderConfiguration.get_instance_url(), method='POST', payload=utils.to_json_encodable({ 'backend': backend, 'hostname': hostname, }), scopes=MACHINE_PROVIDER_SCOPES, )
def add_machine(dimensions, policies): """Add a machine to the Machine Provider's Catalog. Args: dimensions: Dimensions for this machine. policies: Policies governing this machine. """ logging.info('Sending add_machine request') return net.json_request( '%s/_ah/api/catalog/v1/add_machine' % MachineProviderConfiguration.get_instance_url(), method='POST', payload=utils.to_json_encodable({ 'dimensions': dimensions, 'policies': policies, }), scopes=MACHINE_PROVIDER_SCOPES, )
def _send_to_bq(snapshots): """Sends the snapshots to BigQuery. Returns: Timestamps, encoded as strings, of snapshots that failed to be sent """ # See doc/Monitoring.md. dataset = 'isolated' table_name = 'stats' # BigQuery API doc: # https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll url = ( 'https://www.googleapis.com/bigquery/v2/projects/%s/datasets/%s/tables/' '%s/insertAll') % (app_identity.get_application_id(), dataset, table_name) payload = { 'kind': 'bigquery#tableDataInsertAllRequest', # Do not fail entire request because of one bad snapshot. # We handle invalid rows below. 'skipInvalidRows': True, 'ignoreUnknownValues': False, 'rows': [{ 'insertId': s.timestamp_str, 'json': bqh.message_to_dict(_to_proto(s)), } for s in snapshots], } res = net.json_request(url=url, method='POST', payload=payload, scopes=bqh.INSERT_ROWS_SCOPE, deadline=600) failed = [] for err in res.get('insertErrors', []): t = snapshots[err['index']].timestamp_str if not failed: # Log the error for the first entry, useful to diagnose schema failure. logging.error('Failed to insert row %s: %r', t, err['errors']) failed.append(t) return failed
def instruct_machine(request_id, swarming_server): """Instruct a leased machine to connect to a Swarming server. Args: request_id: Request ID for the fulfilled lease whose machine to send the instruction to. swarming_server: URL of the Swarming server to connect to. """ return net.json_request( '%s/api/machine_provider/v1/instruct' % MachineProviderConfiguration.get_instance_url(), method='POST', payload=utils.to_json_encodable({ 'instruction': { 'swarming_server': swarming_server, }, 'request_id': request_id, }), scopes=MACHINE_PROVIDER_SCOPES, )
def check(key): """Checks the active metadata update operation. Reschedules the active metadata update if the operation failed. Args: key: ndb.Key for a models.Instance entity. """ entity = key.get() if not entity: logging.warning('Instance does not exist: %s', key) return if not entity.active_metadata_update: logging.warning('Instance active metadata operation unspecified: %s', key) return if not entity.active_metadata_update.url: logging.warning( 'Instance active metadata operation URL unspecified: %s', key) return result = net.json_request(entity.active_metadata_update.url, scopes=gce.AUTH_SCOPES) if result['status'] != 'DONE': return if result.get('error'): logging.warning( 'Instance metadata operation failed: %s\n%s', key, json.dumps(result, indent=2), ) metrics.send_machine_event('METADATA_UPDATE_FAILED', key.id()) reschedule_active_metadata_update(key, entity.active_metadata_update.url) metrics.send_machine_event('METADATA_UPDATE_READY', key.id()) else: metrics.send_machine_event('METADATA_UPDATE_SUCCEEDED', key.id()) clear_active_metadata_update(key, entity.active_metadata_update.url)
def _call_token_server(method, request): """Sends an RPC to tokenserver.minter.TokenMinter service. Args: method: name of the method to call. request: dict with request fields. Returns: Dict with response fields. Raises: auth.AuthorizationError on HTTP 403 reply. InternalError if the RPC fails unexpectedly. """ # Double check token server URL looks sane ('https://....'). This is checked # when it's imported from the config. This check should never fail. ts_url = auth.get_request_auth_db().token_server_url try: utils.validate_root_service_url(ts_url) except ValueError as exc: raise InternalError('Invalid token server URL %s: %s' % (ts_url, exc)) # See TokenMinter in # https://chromium.googlesource.com/infra/luci/luci-go/+/master/tokenserver/api/minter/v1/token_minter.proto # But beware that proto JSON serialization uses camelCase, not snake_case. try: return net.json_request( url='%s/prpc/tokenserver.minter.TokenMinter/%s' % (ts_url, method), method='POST', payload=request, headers={'Accept': 'application/json; charset=utf-8'}, scopes=[net.EMAIL_SCOPE]) except net.Error as exc: logging.error('Error calling %s (HTTP %s: %s):\n%s', method, exc.status_code, exc.message, exc.response) if exc.status_code == 403: raise auth.AuthorizationError(exc.response) raise InternalError( 'Failed to call MintOAuthTokenGrant, see server logs')
def call_api( self, endpoint, method='GET', payload=None, params=None, deadline=None, version='v1', service='compute'): """Sends JSON request (with retries) to GCE API endpoint. Args: endpoint: endpoint URL relative to the project URL (e.g. /regions). method: HTTP method to use, e.g. GET, POST, PUT. payload: object to serialize to JSON and put in request body. params: dict with query GET parameters (i.e. ?key=value&key=value). deadline: deadline for a single call attempt. version: API version to use. service: API service to call (compute or replicapool). Returns: Deserialized JSON response. Raises: net.Error on errors. """ assert service in ('compute', 'replicapool') assert endpoint.startswith('/'), endpoint url = 'https://www.googleapis.com/%s/%s/projects/%s%s' % ( service, version, self._project_id, endpoint) return net.json_request( url=url, method=method, payload=payload, params=params, scopes=AUTH_SCOPES, service_account_key=self._service_account_key, deadline=30 if deadline is None else deadline)
def call_api( self, endpoint, method='GET', payload=None, params=None, deadline=None, version='v1', service='compute'): """Sends JSON request (with retries) to GCE API endpoint. Args: endpoint: endpoint URL relative to the project URL (e.g. /regions). method: HTTP method to use, e.g. GET, POST, PUT. payload: object to serialize to JSON and put in request body. params: dict with query GET parameters (i.e. ?key=value&key=value). deadline: deadline for a single call attempt. version: API version to use. service: API service to call (compute or replicapool). Returns: Deserialized JSON response. Raises: net.Error on errors. """ assert service in ('compute', 'replicapool') assert endpoint.startswith('/'), endpoint url = 'https://www.googleapis.com/%s/%s/projects/%s%s' % ( service, version, self._project_id, endpoint) return net.json_request( url=url, method=method, payload=payload, params=params, scopes=['https://www.googleapis.com/auth/compute'], service_account_key=self._service_account_key, deadline=30 if deadline is None else deadline)
def check(key): """Checks the active metadata update operation. Reschedules the active metadata update if the operation failed. Args: key: ndb.Key for a models.Instance entity. """ entity = key.get() if not entity: logging.warning('Instance does not exist: %s', key) return if not entity.active_metadata_update: logging.warning('Instance active metadata operation unspecified: %s', key) return if not entity.active_metadata_update.url: logging.warning( 'Instance active metadata operation URL unspecified: %s', key) return result = net.json_request( entity.active_metadata_update.url, scopes=gce.AUTH_SCOPES) if result['status'] != 'DONE': return if result.get('error'): logging.warning( 'Instance metadata operation failed: %s\n%s', key, json.dumps(result, indent=2), ) reschedule_active_metadata_update(key, entity.active_metadata_update.url) else: clear_active_metadata_update(key, entity.active_metadata_update.url)
def _call(method, endpoint, payload=None, accepted_http_statuses=None): """Makes HTTP request to Pub/Sub service. Args: method: HTTP verb, such as 'GET' or 'PUT'. endpoint: URL of the endpoint, relative to pubsub.googleapis.com/v1/. payload: Body of the request to send as JSON. accepted_http_statuses: List of additional status codes to treat as success. Raises: Error or TransientError. """ try: return net.json_request( url='https://pubsub.googleapis.com/v1/' + endpoint, method=method, payload=payload, scopes=['https://www.googleapis.com/auth/pubsub']) except net.Error as e: if accepted_http_statuses and e.status_code in accepted_http_statuses: return None if e.status_code >= 500: raise TransientError(e) raise Error(e)
def test_json_bad_response(self): self.mock_urlfetch([ ({'url': 'http://localhost/123'}, Response(200, 'not a json', {})), ]) with self.assertRaises(net.Error): net.json_request('http://localhost/123')