def alert(params: http.QueryParams): try: assert "endpoint" in params.keys() assert "exp_id" in params.keys() assert "metric" in params.keys() assert "tpl_id" in params.keys() except AssertionError as e: return {"error": "endpoint and metric must in params"} endpoint = params["endpoint"].strip() metric = params["metric"].strip() tags = params["tags"].strip().replace(":", "=") tpl_id = int(params["tpl_id"]) exp_id = int(params["exp_id"]) user_infos = dbapi.get_user_contact_by_tpl_id(tpl_id, exp_id) # 目前的415错误先过滤掉,等下一个版本发布后再恢复 if tags == "api=__serv__,errcode=415" or tags == "api=/dangdang/api/config,errcode=415": return {"sms": "misstatement", "wechat": "misstatement"} # 目前的400错误先过滤掉,等待世举查明原因 if tags == "api=__serv__,errcode=400" or tags == "api=/dangdang/api/log,errcode=400": return {"sms": "misstatement", "wechat": "misstatement"} # zhulong 7k7k暂时不报警 if "zhulong" in tags or "7k7k" in tags: return {"sms": "misstatement", "wechat": "misstatement"} metric = metric + "/" + tags if "errcode=404" in metric: return {"sms": "misstatement", "wechat": "misstatement"} # 仅当10分钟内相同报警出现3次或3次以上才会触发短信报警 events = dbapi.get_infos_by_endpoint_metric_time(endpoint, metric, interval=10) if len(events) == 0: return {"sms": "misstatement", "wechat": "misstatement"} event_info = events[0] phones = [user[2] for user in user_infos] phones = ",".join(phones) wechats = [user[3] for user in user_infos] wechats = ",".join(wechats) # 仅当10分钟内相同报警出现3次或3次以上才会触发短信报警 if event_info["count"] >= 3: response = { "sms": send_sms(event_info, phones), "wechat": send_wechat(event_info, wechats) } else: response = { "sms": "misstatement", "wechat": send_wechat(event_info, wechats) } return response
def list_player_games(auth_player: Player, player_slug: str, filters: http.QueryParams) -> dict: limit = 10 dict_filters = {} for f, v in filters.items(): if "player" in f: raise BadRequest("Cannot filter on player.") # Since we support MongoEngine filters, if we have a filter key with __ we # have to parse the value of the filter as JSON. if "__" in f: v = ujson.loads(v.replace("'", '"')) dict_filters[f] = v order = dict_filters.pop("order", "asc") order_by = dict_filters.pop("order_by", "created_at") page = int(dict_filters.pop("page", 1)) offset = (page - 1) * limit print(offset) games = (Game.objects( **dict_filters).order_by(f"{'-' if order == 'desc' else ''}{order_by}" ).skip(offset).limit(limit).all()) game_count = Game.objects(**dict_filters).count() game_list = [mongo_to_dict(g) for g in games] return { "total": game_count, "page": page, "last_page": ceil(game_count / limit), "games": game_list, }
def resolve(self, parameter: Parameter, query_params: http.QueryParams) -> http.QueryParam: name = parameter.name assert name in query_params or parameter.default != inspect._empty, \ f"Query Param: {name} not found!" return http.QueryParam(query_params.get(name, parameter.default))
async def reservations_list(session: Session, query_params: http.QueryParams) -> typing.List[ReservationType]: query_params = dict(query_params) start_date = query_params.get('start_date') end_date = query_params.get('end_date') query = session.query(Reservation) if start_date: query = query.filter( Reservation.start_date <= start_date ) if end_date: query = query.filter( Reservation.end_date >= end_date ) return [ReservationType(ReservationSerializer().dump(obj).data) for obj in query.all()]
def uquery(params: http.QueryParams) -> list: try: fopen = open(fields["fpuser"], "r") except: b = exceptions.NotFound() b.default_detail = "Bad user file path" raise b retlist = [] if (not params._list): raise exceptions.BadRequest() ##Empty for line in fopen.read().splitlines(): f = line.split(":") f.pop(1) for i, ufield in enumerate(f): if (params.get(userf[i]) and params.get(userf[i]) != ufield): break if (i == len(f) - 1): retlist.append(dict(zip(userf, f))) return retlist
def node_metadata_exchange(self, request: Request, query_params: QueryParams): # If these nodes already have the same fleet state, no exchange is necessary. learner_fleet_state = query_params.get('fleet') if learner_fleet_state == self._node_tracker.checksum: self.log.debug( "Learner already knew fleet state {}; doing nothing.".format( learner_fleet_state)) headers = {'Content-Type': 'application/octet-stream'} payload = self._node_tracker.snapshot() signature = self._stamp(payload) return Response(bytes(signature) + payload, headers=headers, status_code=204) nodes = self._node_class.batch_from_bytes( request.body, federated_only=self.federated_only, # TODO: 466 ) # TODO: This logic is basically repeated in learn_from_teacher_node and remember_node. Let's find a better way. 555 for node in nodes: if node in self._node_tracker: continue # TODO: 168 Check version and update if required. @crosstown_traffic() def learn_about_announced_nodes(): try: certificate_filepath = node.get_certificate_filepath( certificates_dir=self._certificate_dir ) # TODO: integrate with recorder? node.save_certificate_to_disk( directory=self._certificate_dir, force=True) node.verify_node( self.network_middleware, accept_federated_only=self.federated_only, # TODO: 466 certificate_filepath=certificate_filepath) except node.SuspiciousActivity: # TODO: Account for possibility that stamp, rather than interface, was bad. message = "Suspicious Activity: Discovered node with bad signature: {}. " \ " Announced via REST." # TODO: Include data about caller? self.log.warn(message) self._suspicious_activity_tracker['vladimirs'].append( node ) # TODO: Maybe also record the bytes representation separately to disk? except Exception as e: self.log.critical(str(e)) raise # TODO else: self.log.info("Previously unknown node: {}".format( node.checksum_public_address)) self._node_recorder(node) # TODO: What's the right status code here? 202? Different if we already knew about the node? return self.all_known_nodes(request)
def query_argument(self, name: ParamName, query_params: http.QueryParams, coerce: ParamAnnotation) -> typing.Any: value = query_params.get(name) if value is None or isinstance(value, coerce): return value try: return coerce(value) except exceptions.TypeSystemError as exc: detail = {name: exc.detail} except (TypeError, ValueError) as exc: detail = {name: str(exc)} raise exceptions.ValidationError(detail=detail)
def gquery(params: http.QueryParams) -> list: try: fopen = open(fields["fpgroup"], "r") except: b = exceptions.NotFound() b.default_detail = "Bad group file path" raise b retlist = [] if (not params._list): raise exceptions.BadRequest() ##Empty for line in fopen.read().splitlines(): f = line.split(":") f.pop(1) for i, gfield in enumerate(f): if (i == len(f) - 1): #last element is always member check f[-1] = f[-1].split(",") if (params._dict.get("member")): qmems = params.get_list("member") #get list of members if (len(qmems) > len(f[-1])): break if (len(set(qmems) & set(f[-1])) != len(qmems)): break retlist.append(dict(zip(groupf, f))) if (params.get(groupf[i]) and params.get(groupf[i]) != gfield): break return retlist
def empty(self, name: ParamName, kwargs: KeywordArgs, query_params: http.QueryParams) -> str: """ Handles unannotated parameters for HTTP requests. These types use either a matched URL keyword argument, or else a query parameter. Args: name: The name of the parameter. kwargs: The URL keyword arguments, as returned by the router. query_params: The query parameters of the incoming HTTP request. Returns: The value that should be used for the handler function. """ if name in kwargs: return kwargs[name] return query_params.get(name)
def scalar_type(self, name: ParamName, kwargs: KeywordArgs, query_params: http.QueryParams, coerce: ParamAnnotation) -> typing.Any: """ Handles `str`, `int`, `float`, or `bool` annotations for HTTP requests. These types use either a matched URL keyword argument, or else a query parameter. Args: name: The name of the parameter. kwargs: The URL keyword arguments, as returned by the router. query_params: The query parameters of the incoming HTTP request. coerce: The type of the parameter. Returns: The value that should be used for the handler function. """ if name in kwargs: value = kwargs[name] is_url_arg = True else: value = query_params.get(name) is_url_arg = False if value is None or isinstance(value, coerce): return value try: return coerce(value) except exceptions.TypeSystemError as exc: detail = {name: exc.detail} except (TypeError, ValueError) as exc: detail = {name: str(exc)} if is_url_arg: raise exceptions.NotFound() raise exceptions.ValidationError(detail=detail)
def get_places_bbox(bbox, es: Elasticsearch, indices: IndexNames, settings: Settings, query_params: http.QueryParams): raw_params = dict(query_params) if 'raw_filter' in query_params: raw_params['raw_filter'] = query_params.get_list('raw_filter') try: params = PlacesQueryParam(**raw_params) except ValidationError as e: logger.warning(f"Validation Error: {e.json()}") raise BadRequest(detail={"message": e.errors()}) bbox_places = fetch_bbox_places(es, indices, categories=params.raw_filter, bbox=params.bbox, max_size=params.size) places_list = [] for p in bbox_places: poi = POI.load_place(p['_source'], params.lang, settings, params.verbosity) places_list.append(poi) return {"places": places_list}
def get_queryparam(name: ParamName, queryparams: http.QueryParams): return queryparams.get(name)
def get_query_params(query_params: http.QueryParams) -> http.Response: return http.Response({'query_params': query_params.to_dict(flat=False)})