def transfer_start(request): lists = yield from request.json() lists = [l['uri'] for l in lists] if not user_scope.google_token: return json_response({ "status": 401, "message": "Google: not logged in.", }) if not user_scope.spotify_token: return json_response({ "status": 402, "message": "Spotify: not logged in.", }) if not lists: return json_response({ "status": 403, "message": "Please select at least one playlist.", }) sslcontext = ssl.create_default_context(cafile=certifi.where()) conn = aiohttp.TCPConnector(ssl_context=sslcontext) with ClientSession(connector=conn) as session: g = Mobileclient(session, user_scope.google_token) s = SpotifyClient(session, user_scope.spotify_token) yield from transfer_playlists(request, s, g, lists) return json_response({ "status": 200, "message": "transfer will start.", })
async def fetch_info(request): secret = request.match_info.get('secret') redis_pool = request.app['redis_pool'] redis_sub_pool = request.app['redis_subscribe_pool'] # We get the status from he query string qs = urllib.parse.parse_qs(request.query_string) statuses = qs.get('wait', []) statuses = [s for s in statuses if s in ['registered', 'redirected']] async with redis_pool.get() as redis: parsed_info = await _info(redis, secret) # cases where we do not want to wait: no status or any of the status returned if not statuses or any(parsed_info.get(status) for status in statuses): return web.json_response(parsed_info) async with redis_sub_pool.get() as redis_sub: channel, = await redis_sub.subscribe(secret) for _ in range(10): done, not_done = await asyncio.wait([channel.get(encoding='utf-8')], timeout=30) if not_done or (done and done.pop().result() in statuses): break await redis_sub.unsubscribe(secret) async with redis_pool.get() as redis: info = await _info(redis, secret) return web.json_response(info)
async def get_task_user_solution(req): session = await get_session(req) model = req.app['model'] if not session.get('user'): raise web.HTTPForbidden() user = await model.users.get_by_id(session['user']['id']) show_user_id = req.query.get('user_id') or user.id if show_user_id != user.id: if not user.can_review_course(course_id=req.query['course_id']): raise web.HTTPForbidden() ts = await model.task_solutions.get_by_task_and_user_id( user_id=show_user_id, course_id=req.rel_url.query['course_id'], task_id=req.rel_url.query['task_id']) if ts: task_comments = await model.task_solution_comments.find_by_task_solution_id(ts.id) return web.json_response({ 'task_solution': await ts.export(with_code=True), 'comments': [c.export() for c in task_comments], }) else: return web.json_response({ 'task_solution': None, 'comments': [], })
def github_webhook(request): logger.info("Processing GitHub webhook event.") payload = yield from request.read() yield from request.release() try: validate_signature(request.headers, payload) except DenySignature: return web.json_response({'message': 'Invalid signature.'}, status=403) payload = json.loads(payload.decode('utf-8')) if 'hook_id' in payload: logger.debug("Ping from GitHub.") return web.json_response({'message': 'Hookaïda !'}, status=200) try: url = infer_url_from_event(payload) except SkipEvent: return web.json_response({'message': 'Event processed.'}) priority = ('10-webhook', url) logger.info("Queuing %s.", url) yield from WORKERS.enqueue( ProcessUrlTask(priority, url, callable_=process_url) ) return web.json_response({'message': 'Event processing in progress.'})
async def handle_webhook(hass, webhook_id, request): """Handle webhook callback. iOS sets the "topic" as part of the payload. Android does not set a topic but adds headers to the request. """ context = hass.data[DOMAIN]['context'] try: message = await request.json() except ValueError: _LOGGER.warning('Received invalid JSON from OwnTracks') return json_response([]) # Android doesn't populate topic if 'topic' not in message: headers = request.headers user = headers.get('X-Limit-U') device = headers.get('X-Limit-D', user) if user: topic_base = re.sub('/#$', '', context.mqtt_topic) message['topic'] = '{}/{}/{}'.format(topic_base, user, device) elif message['_type'] != 'encrypted': _LOGGER.warning('No topic or user found in message. If on Android,' ' set a username in Connection -> Identification') # Keep it as a 200 response so the incorrect packet is discarded return json_response([]) hass.helpers.dispatcher.async_dispatcher_send( DOMAIN, hass, context, message) return json_response([])
async def parse_vcf_metadata(request): try: data = await request.json() result = await run(blocking_parse_vcf_metadata, data) return web.json_response(result) except FatalError as e: return web.json_response({'message': e.args[0]}, status=400)
def test_data_and_body_raises_value_error(self): with pytest.raises(ValueError) as excinfo: json_response(data='foo', body=b'bar') expected_message = ( 'only one of data, text, or body should be specified' ) assert expected_message == excinfo.value.args[0]
async def get_reference(request): try: data = await request.json() result = await run(blocking_get_reference, data) return web.json_response(result) except FatalError as e: return web.json_response({ 'message': e.args[0] }, status=400)
async def doGet(self, request): uuid = request.match_info['uuid'] db = await data.connect(self.db_url) resp = await data.get(db, uuid) db.close() if not resp: return web.json_response({}) else: return web.json_response({uuid: resp.decode()})
async def create(self): v = self.get_validator() d = await self.get_document() if not v.validate(d): return web.json_response(data=v.errors, status=HTTPStatus.BAD_REQUEST) obj = await self.perform_create(v.document) return web.json_response(data=dict(obj), status=HTTPStatus.CREATED, headers=self.get_created_headers(obj))
async def substract(request): """ return tree substract where root is element id """ db = request.app["db"] elid = request.match_info.get("id") if ObjectId.is_valid(elid): els = [] async for el in db.tree.find( {"$or": [{"ancestors": ObjectId(elid)}, {"_id": ObjectId(elid)}]}): els.append(el) return web.json_response(els, dumps=json_util.dumps) return web.json_response(status=404)
async def value_type(request): code = await request.json() info(f'value type: {code}') try: result = await run(blocking_value_type, code) info(f'result: {result}') return web.json_response(result) except FatalError as e: return web.json_response({ 'message': e.args[0] }, status=400)
async def doSet(self, request): uuid = request.match_info['uuid'] name = (await request.json()).get('name') if not name: return web.json_response({'ok': False}) db = await data.connect(self.db_url) resp = await data.set(db, uuid, name) db.close() return web.json_response({'ok': True})
async def list(self): total = await self.get_total() if not total: return web.json_response(data={ 'list': [], 'total': 0, }) return web.json_response(data={ 'list': [dict(obj) for obj in await self.get_list()], 'total': total, })
async def bop_handler(request): logger.info(' ') d = request.GET try: id1, id2 = int(d['id1']), int(d['id2']) except (ValueError, KeyError): logger.warn('invalid request \'%s\'' % request.query_string) return web.json_response([]) logger.info('accepting request with id1=%d id2=%d' % (id1, id2)) result = await solve(id1, id2) logger.info('%d->%d: elapsed_time=%f' % (id1, id2, get_elapsed_time())) logger.info('%d->%d: %d path(s) found' % (id1, id2, len(result))) return web.json_response(result)
async def update(self): # Try to find object with specified primary key, # if object not found, then 404 exception raises await self.get_object() v = self.get_validator() d = await self.get_document() if not v.validate(d): return web.json_response(data=v.errors, status=HTTPStatus.BAD_REQUEST) obj = await self.perform_update(v.document) return web.json_response(data=dict(obj))
async def provide_zen(request): await request.post() fragments_of_zen = [ re.search(r'\w*', fragment.lower()).group() for fragment in request.POST['text'].split() ] if not any(fragments_of_zen): return web.json_response({ 'response_type': 'ephemeral', 'text': ( "Sorry, but no matter how hard I look, I just don't see what " "you're asking of me :(" ), }) try: zen_results = [ zencyclopedia.index[fragment] for fragment in fragments_of_zen ] zen_candidates = set.intersection(*zen_results) except KeyError: zen_candidates = set() if not zen_candidates: return web.json_response({ 'response_type': 'ephemeral', 'text': ( 'Sorry, but I scoured the entire zencyclopedia and still ' "couldn't find a match for your request :(" ), }) if len(zen_candidates) > 1: return web.json_response({ 'response_type': 'ephemeral', 'text': ( 'Your request could match multiple lines, and in the face of ' 'ambiguity, I refused the temptation to guess. Try again ' 'with a request that only matches one of these lines:\n\n' ) + '\n'.join(' - ' + candidate for candidate in zen_candidates), }) return web.json_response({ 'response_type': 'in_channel', 'text': list(zen_candidates)[0], })
async def web_detail_scenarios(request: web.Request): # idで指定されたシナリオを表示する id = request.match_info["id"] if not id: return web.Response(text="Resource not found", status=404) args: dict = testall.test_by_id(id) return web.json_response(text=json.dumps(args, indent=2))
async def post(self): fields = self.request['fields'] conn = self.request['conn'] if not all([fields.login, fields.password]): raise abort(status=400, text='Bad Request') query = sa.select([ User]).select_from(User).where(User.login == fields.login) user = await (await conn.execute(query)).fetchone() if user: if not validate_password(fields.password, user.password): raise abort(status=418, text='Wrong password') else: user = await User.create_user( fields.login, fields.password, conn=conn, ) token = await User.add_token(user.id, conn=conn) return web.json_response({ 'token': token, })
async def handle(request): d = {'response': None} rid = await channel.queue_declare(queue_name='', exclusive=True) callback_queue = rid['queue'] waiter = asyncio.Event() corr_id = str(uuid.uuid4()) response = None async def on_response(channel, body, envelope, properties): if corr_id == properties.correlation_id: d['response'] = body waiter.set() await channel.basic_consume(on_response, no_ack=True, queue_name=callback_queue) await channel.basic_publish( payload='foobar', exchange_name='', routing_key='test', properties={ 'reply_to': callback_queue, 'correlation_id': corr_id, }, ) await waiter.wait() msg = d['response'] r = b'{"message": "' + msg + b'"}' print('got msg, size={}'.format(r.__sizeof__())) return web.json_response(body=r)
async def post(self): self.assertAuth() req = self.request db = req["db"] await req.post() saveId = self.sanitizeNumber(req.GET, "ID", minimum=0) comment = self.sanitizeString(req.POST, "Comment", maximum=1024) if len(comment) > 1024: return web.json_response({ "Status": 0, "Error": "Comment must be 1024 characters or less!" }, status=400) async with db.cursor() as cur: await cur.execute(""" insert into Comments ( saveId, userId, date, content, host ) values ( %s, %s, now(), %s, %s ) """, (saveId, req["user"].id, comment, req["host"])) return self.response({ "Status": 1 })
async def post(self) -> json_response: response = {} data = await self.request.json() chat = Chat( pk=data.get('id') ) if self.request.app['db']: chat.db = self.request.app['db'] try: chat_is = await chat.get() if not chat_is.get('author') == self.request.client.get('_id'): raise NotPermissions else: response = { 'status': True, } except(Exception,) as error: response = { 'status': False, 'error': "{}".format(error) } finally: return json_response( response )
async def index(request): """.""" return web.json_response(Index({ 'list': _listing_url(request), 'create': _create_url(request), }))
async def get(self): """ получение пары профилей """ pair_id = self.request.match_info.get('pair_id') pair = get_pair(pair_id) return web.json_response(pair)
async def get(self) -> json_response: chat = Chat( pk=self.request.match_info.get('id') ) if self.request.app['db']: chat.db = self.request.app['db'] chat_info = await chat.get() if chat_info: response = { 'status': True, 'chat-uid': "{}".format(chat.pk), 'client-list': await chat.list_clients, 'author': "{}".format(chat_info.get('author')) } else: response = { 'status': False, 'error': 'chat not found' } return json_response( response )
async def ajax_page(self, status, page): context = await self.get_page_context(page) record_list = aiohttp_jinja2.render_string( 'admin/partials/_record_list.html', self.request, context) return web.json_response({'status': status, 'record_list': record_list})
async def serve_v2(request): """ Handler for Docker Registry v2 root. The docker client uses this endpoint to discover that the V2 API is available. """ return web.json_response({})
async def on_my_friends(self, request): return web.json_response({ "data": [ { "name": "Bill Doe", "id": "233242342342" }, { "name": "Mary Doe", "id": "2342342343222" }, { "name": "Alex Smith", "id": "234234234344" }, ], "paging": { "cursors": { "before": "QVFIUjRtc2c5NEl0ajN", "after": "QVFIUlpFQWM0TmVuaDRad0dt", }, "next": ("https://graph.facebook.com/v2.7/12345678901234567/" "friends?access_token=EAACEdEose0cB") }, "summary": { "total_count": 3 }})
async def login(request): data = await request.json() counter = data.get('counter') login = data.get('login') password = data.get('password') token = request.headers.get(X_POFFW_HEADER) if not all((token, counter, login, password,)): return web.Response(status=HTTPStatus.BAD_REQUEST) redis = request.app['redis'] task = await redis.get(token) if not task: return web.Response(status=HTTPStatus.BAD_REQUEST) await redis.delete(token) task = json.loads(task.decode('utf-8')) task['counter'] = counter if not hashcash.verify(**task): user_ip, _ = request.transport.get_extra_info('peername') await complexity.store(redis, user_ip) return web.Response(status=HTTPStatus.BAD_REQUEST) return web.json_response()
def action_current(request): """Return the current action /action/current endpoint. :param request: a web requeest object. :type request: request | None """ return web.json_response({'current_action': current_action})
async def search_bikes_of_bike_type(self, request): bike_type_id = request.match_info['bike_type_id'] search_parameters = extract_search_query_parameters(request.rel_url.query) bikes, total = await self.dao_search_bikes({**search_parameters, "bike_type_id": bike_type_id}) return web.json_response({"items": bikes, "total": total})
async def remove_bike(self, request): bike_id = request.rel_url.query['id'] await self.dao_remove_bike(bike_id) return web.json_response({"ok": True})
async def get(self: web.View) -> Response: await check_permission(self.request, Permissions.READ) response = [x.dict() for x in await sources_get()] return web.json_response(response)
async def hello(request): key = request.match_info["key"] global store return web.json_response({key: store.get(key)})
async def acs(request): status = await asyncio.shield(process_acs(request)) return web.json_response({ 'Success': True if status == 0 else False, 'Message': None })
async def get_work(request, data): fm = request.app["fishnet_monitor"] key = data["fishnet"]["apikey"] worker = FISHNET_KEYS[key] fishnet_work_queue = request.app["fishnet"] # priority can be "move" or "analysis" try: (priority, work_id) = fishnet_work_queue.get_nowait() try: fishnet_work_queue.task_done() except ValueError: log.error( "task_done() called more times than there were items placed in the queue in fishnet.py get_work()" ) work = request.app["works"][work_id] # print("FISHNET ACQUIRE we have work for you:", work) if priority == ANALYSIS: fm[worker].append("%s %s %s %s of %s moves" % (datetime.utcnow(), work_id, "request", "analysis", work["moves"].count(" ") + 1)) # delete previous analysis gameId = work["game_id"] game = await load_game(request.app, gameId) if game is None: return web.Response(status=204) for step in game.steps: if "analysis" in step: del step["analysis"] users = request.app["users"] user_ws = users[work["username"]].game_sockets[work["game_id"]] response = { "type": "roundchat", "user": "", "room": "spectator", "message": "Work for fishnet sent..." } await user_ws.send_json(response) else: fm[worker].append("%s %s %s %s for level %s" % (datetime.utcnow(), work_id, "request", "move", work["work"]["level"])) return web.json_response(work, status=202) except asyncio.QueueEmpty: # There was no new work in the queue. Ok # Now let see are there any long time pending work in app["works"] # (in case when worker grabbed it from queue but not responded after MOVE_WORK_TIME_OUT secs) pending_works = request.app["works"] now = monotonic() for work_id in pending_works: work = pending_works[work_id] if work["work"]["type"] == "move" and (now - work["time"] > MOVE_WORK_TIME_OUT): fm[worker].append("%s %s %s %s for level %s" % (datetime.utcnow(), work_id, "request", "move AGAIN", work["work"]["level"])) return web.json_response(work, status=202) return web.Response(status=204)
async def void(request): status = await asyncio.shield(process_void(request)) return web.json_response({ 'Success': True if status == 0 else False, 'Message': messages.get(status) })
def test_text_is_json_encoded(self): resp = json_response({'foo': 42}) assert json.dumps({'foo': 42}) == resp.text
async def wallet_did_list(request: web.BaseRequest): """ Request handler for searching wallet DIDs. Args: request: aiohttp request object Returns: The DID list response """ context: AdminRequestContext = request["context"] session = await context.session() wallet = session.inject(BaseWallet, required=False) if not wallet: raise web.HTTPForbidden(reason="No wallet available") filter_did = request.query.get("did") filter_verkey = request.query.get("verkey") filter_posture = DIDPosture.get(request.query.get("posture")) results = [] public_did_info = await wallet.get_public_did() posted_did_infos = await wallet.get_posted_dids() if filter_posture is DIDPosture.PUBLIC: if (public_did_info and (not filter_verkey or public_did_info.verkey == filter_verkey) and (not filter_did or public_did_info.did == filter_did)): results.append(format_did_info(public_did_info)) elif filter_posture is DIDPosture.POSTED: results = [] for info in posted_did_infos: if (not filter_verkey or info.verkey == filter_verkey) and ( not filter_did or info.did == filter_did): results.append(format_did_info(info)) elif filter_did: try: info = await wallet.get_local_did(filter_did) except WalletError: # badly formatted DID or record not found info = None if (info and (not filter_verkey or info.verkey == filter_verkey) and (filter_posture is None or (filter_posture is DIDPosture.WALLET_ONLY and not info.metadata.get("posted")))): results.append(format_did_info(info)) elif filter_verkey: try: info = await wallet.get_local_did_for_verkey(filter_verkey) except WalletError: info = None if info and (filter_posture is None or (filter_posture is DID_POSTURE.WALLET_ONLY and not info.metadata.get("posted"))): results.append(format_did_info(info)) else: dids = await wallet.get_local_dids() results = [ format_did_info(info) for info in dids if filter_posture is None or DIDPosture.get(info.metadata) is DIDPosture.WALLET_ONLY ] results.sort(key=lambda info: (DIDPosture.get(info["posture"]).ordinal, info["did"])) return web.json_response({"results": results})
async def verify_token(request): data = await request.json() response = decode(data["token"], request.app["config"]["keys"]["public"], algorithms=["ES256"]) return web.json_response(data=response)
async def get_processes_info(request, agent_id, pretty=False, wait_for_complete=False, offset=0, limit=None, select=None, sort=None, search=None, pid=None, state=None, ppid=None, egroup=None, euser=None, fgroup=None, name=None, nlwp=None, pgrp=None, priority=None, rgroup=None, ruser=None, sgroup=None, suser=None, q=None): """ Get processes info an agent :param agent_id: Agent ID :param pretty: Show results in human-readable format :param wait_for_complete: Disable timeout response :param offset: First element to return in the collection :param limit: Maximum number of elements to return :param select: Select which fields to return (separated by comma) :param sort: Sorts the collection by a field or fields (separated by comma). Use +/- at the beginning to list in ascending or descending order. :param search: Looks for elements with the specified string :param pid: Filters by process pid :param state: Filters by process state :param ppid: Filters by process parent pid :param egroup: Filters by process egroup :param euser Filters by process euser :param fgroup: Filters by process fgroup :param name: Filters by process name :param nlwp: Filters by process nlwp :param pgrp: Filters by process pgrp :param priority: Filters by process priority :param rgroup: Filters by process rgroup :param ruser: Filters by process ruser :param sgroup: Filters by process sgroup :param suser: Filters by process suser :param q: Query to filter results by. :return: Data """ filters = { 'state': state, 'pid': pid, 'ppid': ppid, 'egroup': egroup, 'euser': euser, 'fgroup': fgroup, 'name': name, 'nlwp': nlwp, 'pgrp': pgrp, 'priority': priority, 'rgroup': rgroup, 'ruser': ruser, 'sgroup': sgroup, 'suser': suser } f_kwargs = { 'agent_list': [agent_id], 'offset': offset, 'limit': limit, 'select': select, 'sort': parse_api_param(sort, 'sort'), 'search': parse_api_param(search, 'search'), 'filters': filters, 'element_type': 'processes', 'q': q } dapi = DistributedAPI( f=syscollector.get_item_agent, f_kwargs=remove_nones_to_dict(f_kwargs), request_type='distributed_master', is_async=False, wait_for_complete=wait_for_complete, logger=logger, rbac_permissions=request['token_info']['rbac_policies']) data = raise_if_exc(await dapi.distribute_function()) return web.json_response(data=data, status=200, dumps=prettify if pretty else dumps)
async def get(self) -> web.Response: response = {'config': self._ledfx.config} return web.json_response(data=response, status=200)
async def wrapper(request, *args, **kwargs): result = await f(request, *args, **kwargs) return web.json_response(result, dumps=json.dumps)
async def handle_spec(self, request): return web.json_response(self.spec)
async def get_user_roles(request): """ Return a list of project/role for a user. --- description: Return a list of user project roles tags: - Project UserRole parameters: - name: user_id description: id of the user required: true type: integer responses: "200": description: Return a dict with results schema: type: object properties: total_result_count: type: integer resuls: type: array items: type: object properties: id: type: integer username: type: string is_admin: type: boolean "400": description: Invalid input where given """ user_id = request.match_info["user_id"] try: user_id = int(user_id) except (ValueError, TypeError): return ErrorResponse(400, "Incorrect user_id") user = request.cirrina.db_session.query(User).filter_by(id=user_id).first() if not user: return ErrorResponse(404, "User not found") data = { "username": user.username, "user_id": user.id, "roles": { "owner": [], "member": [], "manager": [] }, # FIXME : use USER_ROLES } roles = (request.cirrina.db_session.query(UserRole).filter_by( user_id=user_id).join(Project).filter( UserRole.project_id == Project.id).order_by(Project.name).values( UserRole.role, Project.id, Project.name)) for role in roles: data["roles"][role.role].append({"id": role.id, "name": role.name}) return web.json_response(data)
async def schemas_send_schema(request: web.BaseRequest): """ Request handler for creating a schema. Args: request: aiohttp request object Returns: The schema id sent """ context: AdminRequestContext = request["context"] create_transaction_for_endorser = json.loads( request.query.get("create_transaction_for_endorser", "false")) write_ledger = not create_transaction_for_endorser endorser_did = None connection_id = request.query.get("conn_id") body = await request.json() schema_name = body.get("schema_name") schema_version = body.get("schema_version") attributes = body.get("attributes") if not write_ledger: try: async with context.session() as session: connection_record = await ConnRecord.retrieve_by_id( session, connection_id) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err session = await context.session() endorser_info = await connection_record.metadata_get( session, "endorser_info") if not endorser_info: raise web.HTTPForbidden( reason="Endorser Info is not set up in " "connection metadata for this connection record") if "endorser_did" not in endorser_info.keys(): raise web.HTTPForbidden( reason=' "endorser_did" is not set in "endorser_info"' " in connection metadata for this connection record") endorser_did = endorser_info["endorser_did"] ledger = context.inject_or(BaseLedger) if not ledger: reason = "No ledger available" if not context.settings.get_value("wallet.type"): reason += ": missing wallet-type?" raise web.HTTPForbidden(reason=reason) issuer = context.inject(IndyIssuer) async with ledger: try: # if create_transaction_for_endorser, then the returned "schema_def" # is actually the signed transaction schema_id, schema_def = await shield( ledger.create_and_send_schema( issuer, schema_name, schema_version, attributes, write_ledger=write_ledger, endorser_did=endorser_did, )) except (IndyIssuerError, LedgerError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err if not create_transaction_for_endorser: return web.json_response({ "schema_id": schema_id, "schema": schema_def }) else: session = await context.session() transaction_mgr = TransactionManager(session) try: transaction = await transaction_mgr.create_record( messages_attach=schema_def["signed_txn"], connection_id=connection_id) except StorageError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err return web.json_response({"txn": transaction.serialize()})
async def get_ports_info(request, agent_id, pretty=False, wait_for_complete=False, offset=0, limit=None, select=None, sort=None, search=None, pid=None, protocol=None, tx_queue=None, state=None, process=None, q=None): """ Get ports info of an agent :param agent_id: Agent ID :param pretty: Show results in human-readable format :param wait_for_complete: Disable timeout response :param offset: First element to return in the collection :param limit: Maximum number of elements to return :param select: Select which fields to return (separated by comma) :param sort: Sorts the collection by a field or fields (separated by comma). Use +/- at the beginning to list in ascending or descending order. :param search: Looks for elements with the specified string :param pid: Filters by pid :param protocol: Filters by protocol :param tx_queue: Filters by tx_queue :param state: Filters by state :param process: Filters by process :param q: Query to filter results by. :return: Data """ filters = { 'pid': pid, 'protocol': protocol, 'tx_queue': tx_queue, 'state': state, 'process': process } # Add nested fields to kwargs filters nested = ['local.ip', 'local.port', 'remote.ip'] for field in nested: filters[field] = request.query.get(field, None) f_kwargs = { 'agent_list': [agent_id], 'offset': offset, 'limit': limit, 'select': select, 'sort': parse_api_param(sort, 'sort'), 'search': parse_api_param(search, 'search'), 'filters': filters, 'element_type': 'ports', 'q': q } dapi = DistributedAPI( f=syscollector.get_item_agent, f_kwargs=remove_nones_to_dict(f_kwargs), request_type='distributed_master', is_async=False, wait_for_complete=wait_for_complete, logger=logger, rbac_permissions=request['token_info']['rbac_policies']) data = raise_if_exc(await dapi.distribute_function()) return web.json_response(data=data, status=200, dumps=prettify if pretty else dumps)
async def list_tasks(self, request): return web.json_response(json.loads(repr(self.scheduler.schedule)))
async def get_network_interface_info(request, agent_id, pretty=False, wait_for_complete=False, offset=0, limit=None, select=None, sort=None, search=None, name=None, adapter=None, state=None, mtu=None, q=None): """ Get network interface info of an agent :param agent_id: Agent ID :param pretty: Show results in human-readable format :param wait_for_complete: Disable timeout response :param offset: First element to return in the collection :param limit: Maximum number of elements to return :param select: Select which fields to return (separated by comma) :param sort: Sorts the collection by a field or fields (separated by comma). Use +/- at the beginning to list in ascending or descending order. :param search: Looks for elements with the specified string :param name: Name of the network interface :param adapter: Filters by adapter :param state: Filters by state :param mtu: Filters by mtu :param q: Query to filter results by. :return: Data """ filters = { 'adapter': adapter, 'type': request.query.get('type', None), 'state': state, 'name': name, 'mtu': mtu } # Add nested fields to kwargs filters nested = [ 'tx.packets', 'rx.packets', 'tx.bytes', 'rx.bytes', 'tx.errors', 'rx.errors', 'tx.dropped', 'rx.dropped' ] for field in nested: filters[field] = request.query.get(field, None) f_kwargs = { 'agent_list': [agent_id], 'offset': offset, 'limit': limit, 'select': select, 'sort': parse_api_param(sort, 'sort'), 'search': parse_api_param(search, 'search'), 'filters': filters, 'element_type': 'netiface', 'q': q } dapi = DistributedAPI( f=syscollector.get_item_agent, f_kwargs=remove_nones_to_dict(f_kwargs), request_type='distributed_master', is_async=False, wait_for_complete=wait_for_complete, logger=logger, rbac_permissions=request['token_info']['rbac_policies']) data = raise_if_exc(await dapi.distribute_function()) return web.json_response(data=data, status=200, dumps=prettify if pretty else dumps)
async def get_users(request): """ Return a list of users. --- description: Returns a list of users. tags: - Users produces: - application/json parameters: - name: page description: page number in: query required: false type: integer - name: page_size description: page size in: query required: false type: integer - name: name description: query to filter username in: query required: false type: string - name: email description: query to filter email in: query required: false type: string - name: admin description: return only admin if true in: query required: false type: boolean produces: - text/json responses: "200": description: successful schema: type: object properties: total_result_count: type: integer resuls: type: array items: type: object properties: id: type: integer username: type: string is_admin: type: boolean "400": description: invalid input where given """ name = request.GET.getone("name", "") email = request.GET.getone("email", "") admin = request.GET.getone("admin", "false") query = request.cirrina.db_session.query(User) if admin.lower() == "true": query = query.filter(User.is_admin) if name: query = query.filter(User.username.ilike("%{}%".format(name))) if email: query = query.filter(User.email.ilike("%{}%".format(email))) query = query.order_by(User.username) data = {"total_result_count": query.count()} query = paginate(request, query) users = query.all() data["results"] = [{ "id": user.id, "username": user.username, "email": user.email, "is_admin": user.is_admin } for user in users] return web.json_response(data)
async def search_bikes(self, request): search_parameters = extract_search_query_parameters(request.rel_url.query) email = extract_email_from_request(request) bikes, total = await self.dao_search_bikes({**search_parameters, "is_public": True, "email": email}) return web.json_response({"items": bikes, "total": total})
async def search_bike_types(self, request): search_parameters = extract_search_query_parameters(request.rel_url.query) bike_types, total = await self.dao_search_bike_types(search_parameters) return web.json_response({"items": bike_types, "total": total})
async def get_packages_info(request, agent_id, pretty=False, wait_for_complete=False, offset=0, limit=None, select=None, sort=None, search=None, vendor=None, name=None, architecture=None, version=None, q=None): """ Get packages info of an agent :param agent_id: Agent ID :param pretty: Show results in human-readable format :param wait_for_complete: Disable timeout response :param offset: First element to return in the collection :param limit: Maximum number of elements to return :param select: Select which fields to return (separated by comma) :param sort: Sorts the collection by a field or fields (separated by comma). Use +/- at the beginning to list in ascending or descending order. :param search: Looks for elements with the specified string :param vendor: Filters by vendor :param name: Filters by name :param architecture: Filters by architecture :param version: Filters by version :param q: Query to filter results by. :return: Data """ filters = { 'vendor': vendor, 'name': name, 'architecture': architecture, 'format': request.query.get('format', None), 'version': version } f_kwargs = { 'agent_list': [agent_id], 'offset': offset, 'limit': limit, 'select': select, 'sort': parse_api_param(sort, 'sort'), 'search': parse_api_param(search, 'search'), 'filters': filters, 'element_type': 'packages', 'q': q } dapi = DistributedAPI( f=syscollector.get_item_agent, f_kwargs=remove_nones_to_dict(f_kwargs), request_type='distributed_master', is_async=False, wait_for_complete=wait_for_complete, logger=logger, rbac_permissions=request['token_info']['rbac_policies']) data = raise_if_exc(await dapi.distribute_function()) return web.json_response(data=data, status=200, dumps=prettify if pretty else dumps)
def test_content_type_is_application_json_by_default(self): resp = json_response('') assert 'application/json' == resp.content_type
async def handle_webhook_trigger_update(hass, webhook_id, request): """Handle trigger update webhook callback.""" await update_service(None) return web.json_response({"success": "ok"})
async def ajax_empty(self, status): return web.json_response(dict(status=status))
def test_data_and_body_raises_value_error(self): with pytest.raises(ValueError) as excinfo: json_response(data='foo', body=b'bar') expected_message = ( 'only one of data, text, or body should be specified') assert expected_message == excinfo.value.args[0]
def test_passing_text_only(self): resp = json_response(text=json.dumps('jaysawn')) assert resp.text == json.dumps('jaysawn')
async def wallet_set_public_did(request: web.BaseRequest): """ Request handler for setting the current public DID. Args: request: aiohttp request object Returns: The updated DID info """ context: AdminRequestContext = request["context"] session = await context.session() wallet = session.inject(BaseWallet, required=False) if not wallet: raise web.HTTPForbidden(reason="No wallet available") did = request.query.get("did") if not did: raise web.HTTPBadRequest(reason="Request query must include DID") # Multitenancy setup multitenant_mgr = session.inject(MultitenantManager, required=False) wallet_id = session.settings.get("wallet.id") try: ledger = session.inject(BaseLedger, required=False) if not ledger: reason = "No ledger available" if not session.settings.get_value("wallet.type"): reason += ": missing wallet-type?" raise web.HTTPForbidden(reason=reason) async with ledger: if not await ledger.get_key_for_did(did): raise web.HTTPNotFound( reason=f"DID {did} is not posted to the ledger") did_info = await wallet.get_local_did(did) info = await wallet.set_public_did(did) if info: # Publish endpoint if necessary endpoint = did_info.metadata.get("endpoint") if not endpoint: endpoint = session.settings.get("default_endpoint") await wallet.set_did_endpoint(info.did, endpoint, ledger) async with ledger: await ledger.update_endpoint_for_did(info.did, endpoint) # Add multitenant relay mapping so implicit invitations are still routed if multitenant_mgr and wallet_id: await multitenant_mgr.add_wallet_route(wallet_id, info.verkey, skip_if_exists=True) except WalletNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except (LedgerError, WalletError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err return web.json_response({"result": format_did_info(info)})
def test_content_type_is_overrideable(self): resp = json_response({'foo': 42}, content_type='application/vnd.json+api') assert 'application/vnd.json+api' == resp.content_type