def handle_areas(request): global database requested_type = request.match_info.get('type', None) # Init time measure measure = TimeMeasure() if not requested_type: print("Bad Request: No area type specified") raise web.HTTPBadRequest(text="No area type specified.", headers=HEADERS) # Get area type for this resource name area_type = area_types_mapping.get(requested_type) # Check if area could be found if area_type is None: print(f"Bad Request: Area type '{requested_type}' not available.") raise web.HTTPBadRequest( text=f"Area type '{requested_type}' not available.", headers=HEADERS) # Get query parameters of the request query_parameters = request.query # Raise error if not all required parameters are contained in the request necessary_parameters = ["x_min", "y_min", "x_max", "y_max", "zoom"] if not all(param in query_parameters for param in necessary_parameters): print( f"Bad Request: Query parameters missing. Necessary: {necessary_parameters}" ) raise web.HTTPBadRequest( text=f"Query parameters missing. Necessary: {necessary_parameters}", headers=HEADERS) # Read request parameters x_min = float(query_parameters["x_min"]) y_min = float(query_parameters["y_min"]) x_max = float(query_parameters["x_max"]) y_max = float(query_parameters["y_max"]) zoom = round(float(query_parameters["zoom"])) # Check if zoom is within range if (zoom < area_type[JSON_KEY_GROUP_TYPE_ZOOM_MIN]) or ( zoom >= area_type[JSON_KEY_GROUP_TYPE_ZOOM_MAX]): raise web.HTTPNoContent() # Provide measure with meta data about request measure.set_meta_data(x_min, y_min, x_max, y_max, zoom, requested_type) # Get database table name from area type db_table_name = area_type[JSON_KEY_GROUP_TYPE_TABLE_NAME] # Build bounding box query query = f"""SELECT CONCAT( '{{ "type":"FeatureCollection", "crs":{{ "type":"name", "properties":{{ "name":"{DATA_PROJECTION}" }} }}, "features": [', string_agg(CONCAT( '{{ "type":"Feature", "id":', id, ', "geometry":', geojson, ', "properties":{{', CASE WHEN label ISNULL THEN '' ELSE CONCAT('"label":"', label, '",') END, CASE WHEN label_center ISNULL THEN '' ELSE CONCAT(' "label_center":', label_center, ', "start_angle":', start_angle, ', "end_angle":', end_angle, ', "inner_radius":', inner_radius, ', "outer_radius":', outer_radius, ',') END, '"zoom":', zoom, '}} }}'), ','), ' ] }}') FROM {db_table_name} WHERE (zoom = {zoom}) AND (geom && ST_MakeEnvelope({x_min}, {y_min}, {x_max}, {y_max}));""" # Replace line breaks and multiple spaces from query query = query.replace("\n", "") query = re.sub(" {2,}", " ", query) # Try to issue the query at the database and measure timings measure.query_issued() result = database.query_for_result(query) measure.query_done() # Sanity check for result if result is None: print("Internal Server Error: Failed to retrieve data from database") raise web.HTTPInternalServerError( text="Failed to retrieve data from database") # Get GeoJSON from result geo_json = result[0][0] try: # Send success response return web.Response(text=geo_json, content_type="application/json", headers=HEADERS) finally: # Finish measuring measure.request_answered() measure.write_result()
async def delete_ability(self, request: web.Request): await self.delete_on_disk_object(request) return web.HTTPNoContent()
async with request.app.db.acquire() as conn: # type: aiopg.sa.SAConnection user = await inner_auth(conn, request) data = await conn.execute( r""" DELETE FROM app_templates T WHERE (T.project_id = %s) and (T.user_id = %s) and (T.id = %s) RETURNING T.id """, (project_id, user["id"], template_id)) data = await data.fetchall() if len(data) > 1: logging.error(f"{data = }. query delete few template once") return web.HTTPServerError() elif len(data) == 1: return web.HTTPNoContent() else: return web.HTTPNotFound() def main(): app = Application() app.add_routes([ web.route('POST', '/v1/login', login_handler), web.route('POST', '/v1/projects', projects_handler), web.route('POST', '/v1/eye/metric_list', eye_metric_list_handler), web.route('POST', '/v1/eye/query', eye_query_handler), web.route('POST', '/v1/cts/overview', cts_handler), web.route('POST', '/v1/cts/detail', cts_detail_handler), web.route('POST', '/v1/cce/clusters/overview', clusters_overview_handler),
async def sign_out(request: 'Request'): session = await aiohttp_session.get_session(request) session.clear() raise web.HTTPNoContent()
def test_empty_text_204() -> None: resp = web.HTTPNoContent() assert resp.text is None
async def _redirect(self, request, connector): if request.host in ( socket.getfqdn(), f'{self.config["hostname"]}:{self.config["port"]}', ): raise web.HTTPBadRequest( text=( "ERROR: Recursion error. " "Invalid 'Host' header specified.\n" ) ) self.logger.debug( f"Request (redirecting): method={request.method!r} " f"path={request.path!r}, query_string={request.query_string!r}, " f"headers={request.headers!r}, remote={request.remote!r}" ) req_headers = request.headers body = await request.read() # modify headers if self.config["num_forwarded"]: req_headers = copy.deepcopy(dict(req_headers)) req_headers["X-Forwarded-For"] = request.remote self.logger.debug( f"Request headers (redirecting, modified): {req_headers!r}" ) try: async with aiohttp.ClientSession( headers=req_headers, connector=connector, timeout=self.client_timeout, connector_owner=False, auto_decompress=False, ) as session: async with session.request( request.method, request.url, data=body, ) as resp: self.logger.debug( f"Response: {resp.reason}: resp.status={resp.status}, " f"resp.request_info={resp.request_info}, " f"resp.url={resp.url}, resp.headers={resp.headers}" ) proxied_response = web.StreamResponse( headers=resp.headers, status=resp.status ) if ( resp.headers.get("Transfer-Encoding", "").lower() == "chunked" ): proxied_response.enable_chunked_encoding() await proxied_response.prepare(request) async for data in resp.content.iter_any(): await proxied_response.write(data) await proxied_response.write_eof() return proxied_response except ConnectionResetError as err: self.logger.warning(f"Connection reset by peer: {err}") # TODO(damb): Would implementing a retry mechanism be an # alternative? raise web.HTTPNoContent() except asyncio.TimeoutError as err: self.logger.warning( f"Error while executing request: error={type(err)}, " f"url={request.url!r}, method={request.method!r}" ) raise web.HTTPGatewayTimeout(text=f"ERROR: {str(type(err))}\n") except aiohttp.ClientError as err: msg = ( f"Error while executing request: error={type(err)}, " f"msg={err}, url={request.url!r}, method={request.method!r}" ) if isinstance(err, aiohttp.ClientOSError): msg += f", errno={err.errno}" self.logger.warning(msg) raise web.HTTPServiceUnavailable(text=f"ERROR: {str(type(err))}\n")
data = await request.post() if not (url := data.get('url')): return self.log_and_return( web.HTTPBadRequest(reason='Post format unexpected'), 'Deny bad format %s %s', request) if rt := await self.url_conn.delete_url(url, int(bearer.split(':')[0])): if rt == UrlDatabase.StatusCode.NotOwner: return web.json_response(dict( text='Url is not your own created', code=1), status=400) elif rt == UrlDatabase.StatusCode.NotFound: return web.json_response(dict(text='Url not found', code=2), status=404) return web.HTTPBadRequest() return web.HTTPNoContent(content_type='text/html') async def handle_revoke_key(self, request: web.Request) -> web.Response: if not isinstance(bearer := await self.verify_identify(request), str): return bearer new_key = await self.url_conn.update_authorized_key( int(bearer.split(':')[0])) return web.json_response(dict(key=new_key)) @staticmethod async def handle_help_page(_request: web.Request) -> web.Response: try: async with aiofiles.open('data/index.html') as fin: return web.Response(body=await fin.read()) except FileNotFoundError: raise web.HTTPNotFound()
async def delete_schedule(self, request: web.Request): await self.delete_object(request) return web.HTTPNoContent()
async def ExtraMultisigInfoEndpoint(request): return web.HTTPNoContent()
async def OutputsEndpoint(request): return web.HTTPNoContent()
async def service_submission(request: web.Request): reader = MultipartReader.from_response(request) data = None filedata = None # Read multipart email while True: part = await reader.next() # pylint: disable=not-callable if part is None: break if part.headers[hdrs.CONTENT_TYPE] == "application/json": data = await part.json() continue if part.headers[hdrs.CONTENT_TYPE] == "application/zip": filedata = await part.read(decode=True) # Validate max file size maxsize = 10 * 1024 * 1024 # 10MB actualsize = len(filedata) if actualsize > maxsize: raise web.HTTPRequestEntityTooLarge(maxsize, actualsize) filename = part.filename continue raise web.HTTPUnsupportedMediaType( reason=f"One part had an unexpected type: {part.headers[hdrs.CONTENT_TYPE]}" ) # data (dict) and file (bytearray) have the necessary information to compose the email support_email_address = request.app[APP_CONFIG_KEY]["smtp"]["sender"] is_real_usage = any( env in os.environ.get("SWARM_STACK_NAME", "") for env in ("production", "staging") ) db = get_storage(request.app) user = await db.get_user({"id": request[RQT_USERID_KEY]}) user_email = user.get("email") if not is_real_usage: support_email_address = user_email try: # NOTE: temporarily internal import to avoid render_and_send_mail to be interpreted as handler # TODO: Move outside when get_handlers_from_namespace is fixed from .login.utils import render_and_send_mail attachments = [("metadata.json", json.dumps(data, indent=4))] if filedata: attachments.append((filename, filedata)) # send email await render_and_send_mail( request, to=support_email_address, template=common_themed(EMAIL_TEMPLATE_NAME), context={ "user": user_email, "data": json2html.convert( json=json.dumps(data), table_attributes='class="pure-table"' ), "subject": "TEST: " * (not is_real_usage) + "New service submission", }, attachments=attachments, ) except Exception as exc: log.exception("Error while sending the 'new service submission' mail.") raise web.HTTPServiceUnavailable() from exc raise web.HTTPNoContent(content_type="application/json")
def test_empty_body_304(self): resp = web.HTTPNoContent() self.assertIsNone(resp.body)
async def logout(self, request): router = request.app.router['auth'] await self._auth.logout(request) raise web.HTTPNoContent()
async def logout(request): user = in_header_authorization(request.headers) user.logout() return web.HTTPNoContent()
async def put(request: web.Request) -> web.Response: metadata = await request.json() metadata['content_id'] = request.match_info['content_id'] await mq_publish(request, metadata, 'edits') raise web.HTTPNoContent()
async def crud_endpoint(request): request.app['last_request'] = request if request.method == "PATCH": return web.Response(text="{}", content_type="application/json") else: raise web.HTTPNoContent()
async def delete(request: web.Request) -> web.Response: identifier = request.match_info['content_id'] storage_delete(request, identifier) metadata = {'content_id': identifier} await mq_publish(request, metadata, 'deletes') raise web.HTTPNoContent()
def test_empty_text_304() -> None: resp = web.HTTPNoContent() resp.text is None
async def credits_history_api(request: web.Request) -> web.Response: """Endpoint for the website provided by :func:`credits_history` to retrieve its data, the credits history of a given project. --- description: > Provides the history of credits of the given project. The return format is currently optimized against ``c3.js`` which is used by the internal visualization. The first entry of every response array is a string followed by the data. The ``metrics`` array contains the ``friendly_name`` of the metric responsible for this billing. The ``timestamps`` array contains the timestamps of the measurements which caused the billing. To generate test entries take a look at ``bin/generate_credits_history.py`` at the root of this project. Timestamps are formatted ``%Y-%m-%d %H:%M:%S`` and sorted descending. tags: - Service produces: - application/json parameters: - name: project_name in: path type: string description: Name of the project - name: start_date in: query type: string format: date description: Start date of the credits data, format ``%Y-%m-%d %H:%M:%S`` - name: end_date in: query type: string format: date description: End date of the credits data, format ``%Y-%m-%d %H:%M:%S`` responses: 200: description: Credits history schema: type: object required: [timestamps, credits, metrics] properties: timestamps: type: array items: type: str credits: type: array items: type: float metrics: type: array items: type: str 200: description: Response with requested data. 204: description: Project does have credits history but not for given parameters. 400: description: Bad value of one or more parameters. 404: description: Could not find any history data. """ datetime_format = "%Y-%m-%d %H:%M:%S" try: start_date = datetime.strptime(request.query["start_date"], datetime_format) except KeyError: start_date = datetime.fromtimestamp(0) except ValueError: raise web.HTTPBadRequest(reason="Invalid content for ``start_date``") try: end_date: Optional[datetime] = datetime.strptime( request.query["end_date"], datetime_format) except KeyError: end_date = None except ValueError: raise web.HTTPBadRequest(reason="Invalid content for ``end_date``") if end_date and end_date <= start_date: raise web.HTTPBadRequest( reason="``start_date`` must be older than ``end_date``.") try: project_name = request.match_info["project_name"] # Swagger UI sends '{project_name}' if none is specified -.-' if project_name == "{project_name}" or not project_name.strip(): raise KeyError except KeyError: raise web.HTTPBadRequest( reason="No non-empty ``project_name`` provided") influx_client: InfluxDBClient = request.app["influx_client"] time_column = ["timestamps"] credits_column: List[Union[str, float]] = ["credits"] metric_column = ["metrics"] result = await influx_client.query_billing_history(project_name, since=start_date) try: async for point in result: # entries are sorted by timestamp descending if end_date: if point.timestamp > end_date: continue time_column.append(point.timestamp.strftime(datetime_format)) credits_column.append(float(point.credits_left)) metric_column.append(point.metric_friendly_name) except InfluxDBError: raise web.HTTPBadRequest(reason="Invalid project name") # check whether any data were retrieved if credits_column == ["credits"]: # let's check whether the project has history at all if await influx_client.project_has_history(project_name): raise web.HTTPNoContent(reason="Try changing *_date parameters") raise web.HTTPNotFound( reason="No data available for given parameters.") return web.json_response({ "timestamps": time_column, "credits": credits_column, "metrics": metric_column })
def two_hundred_four(request): raise web.HTTPNoContent()
async def handle_ping(self, _): '''Ping the station.''' if self._ping_notify: self._ping_notify() return web.HTTPNoContent()
async def delete_subscriber(self, request: Request) -> StreamResponse: subscriber_id = request.match_info["subscriber_id"] await self.subscription_handler.remove_subscriber(subscriber_id) return web.HTTPNoContent()
async def handler(request: web.Request) -> web.Response: assert "uri" in request.query raise web.HTTPNoContent()
def test_empty_body_304(): resp = web.HTTPNoContent() resp.body is None
async def handler(request: web.Request) -> web.Response: assert request.match_info["name"].startswith("mycompany:") raise web.HTTPNoContent()
async def delete_operation(self, request: web.Request): await self.delete_object(request) return web.HTTPNoContent()
def function2613(): var2188 = web.HTTPNoContent() (var2188.body is None)