def DELETE(self): """ Delete models for multiple instances :: DELETE /_instances DELETE data: :: [ "{region}/{namespace}/{instanceId}", ... ] Returns: :: { "result": "success" } """ try: instances = utils.jsonDecode(web.data()) except: raise InvalidRequestResponse({"result": "Invalid request"}) if not instances: raise InvalidRequestResponse( {"result": ("Missing instances in DELETE" " request")}) deleted = [] if instances: for server in instances: with web.ctx.connFactory() as conn: modelIds = repository.listMetricIDsForInstance( conn, server) if modelIds: for modelId in modelIds: ModelHandler.deleteModel(modelId) deleted.append(server) if instances == deleted: self.addStandardHeaders() return utils.jsonEncode({"result": "success"}) raise web.notfound( "Not able to delete %s" % utils.jsonEncode(list(set(instances) - set(deleted))))
def createModel(cls, modelSpec=None): """ NOTE MER-3479: this code path is presently incorrectly used for two purposes: * Importing of all types of metrics (not desirable; there should be a separate endpoint or an import-specific flag in this endpoint for importing that facilitates slightly different behavior, such as suppressing certain errors to allow for re-import in case of tranisent error part way through the prior import) """ if not modelSpec: # Metric data is missing log.error( "Data is missing in request, raising BadRequest exception") raise InvalidRequestResponse({"result": "Metric data is missing"}) # TODO MER-3479: import using import-specific endpoint # NOTE: pending MER-3479, this is presently a hack for exercising # the adapter import API importing = False if modelSpec.get("datasource") == "custom": if "data" in modelSpec: importing = True try: adapter = createDatasourceAdapter(modelSpec["datasource"]) try: # TODO: Maybe initialize transaction and commit here if importing: # TODO MER-3479: import using import-specific endpoint # NOTE: pending MER-3479, this is presently a hack for exercising # the adapter import API metricId = adapter.importModel(modelSpec) else: metricId = adapter.monitorMetric(modelSpec) except app_exceptions.MetricAlreadyMonitored as e: metricId = e.uid with web.ctx.connFactory() as conn: return repository.getMetric(conn, metricId) except (ValueError, app_exceptions.MetricNotSupportedError) as e: raise InvalidRequestResponse({"result": repr(e)})
def PUT(self, modelId=None): """ Create Model :: POST /_models Data: Use the metric as returned by the datasource metric list. For example, create a custom model, include the following data in the POST request (uid is the same for the metric and model): :: { "uid": "2a123bb1dd4d46e7a806d62efc29cbb9", "datasource": "custom", "min": 0.0, "max": 5000.0 } The "min" and "max" options are optional. """ if modelId: # ModelHandler is overloaded to handle both single-model requests, and # multiple-model requests. As a result, if a user makes a POST, or PUT # request, it's possible that the request can be routed to this handler # if the url pattern matches. This specific POST handler is not meant # to operate on a known model, therefore, raise an exception, and return # a `405 Method Not Allowed` response. raise NotAllowedResponse({"result": "Not supported"}) data = web.data() if data: try: if isinstance(data, basestring): request = utils.jsonDecode(data) else: request = data except ValueError as e: response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) if not isinstance(request, list): request = [request] response = [] for nativeMetric in request: try: validate(nativeMetric, _CUSTOM_MODEL_CREATION_SCHEMA) except ValidationError as e: # Catch ValidationError if validation fails # InvalidRequestResponse produces an HTTP 400 error code response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) else: # Metric data is missing log.error( "Data is missing in request, raising BadRequest exception") raise web.badrequest("Metric data is missing") try: self.addStandardHeaders() metricRowList = self.createModels(data) metricDictList = [ formatMetricRowProxy(metricRow) for metricRow in metricRowList ] response = utils.jsonEncode(metricDictList) raise web.created(response) except web.HTTPError as ex: if bool(re.match("([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("PUT Failed") raise web.internalerror(str(ex) or repr(ex))
def GET(self, metricName): """ Get Tweet Data :: GET /_tweets/{metric}?from={fromTimestamp}&to={toTimestamp} Parameters: :param from: (required) return records from this timestamp :type from: timestamp :param to: (required) return records up to this timestamp :type to: timestamp :param sortOrder: Sort order ("asc" or "desc") :type sortOrder: str :param sortBy: "sort by" field ("agg_ts" or "created_at") :type sortBy: str Returns: :: { "data": [...], "names": ["uid", "created_at", "text", "username", "userid"] } """ queryParams = dict(urlparse.parse_qsl(web.ctx.env["QUERY_STRING"])) fromTimestamp = queryParams.get("from") if not fromTimestamp: raise InvalidRequestResponse({"result": "Invalid `from` value"}) toTimestamp = queryParams.get("to") if not toTimestamp: raise InvalidRequestResponse({"result": "Invalid `to` value"}) orderByDirection = queryParams.get("sortOrder", "desc").lower() if orderByDirection == "asc": direction = asc elif orderByDirection == "desc": direction = desc else: raise InvalidRequestResponse( {"result": "Invalid `sortOrder` value"}) fields = [ tweets.c.uid, tweets.c.created_at, samples.c.agg_ts, tweets.c.text, tweets.c.username, tweets.c.userid ] orderByField = queryParams.get("sortBy", "created_at") if orderByField not in ("agg_ts", "created_at"): raise InvalidRequestResponse({"result": "Invalid `sortBy` value"}) names = ("names", ) + tuple([col.name for col in fields]) with g_connFactory() as conn: sel = (select(fields).select_from( samples.join(tweets, samples.c.msg_uid == tweets.c.uid)).where( samples.c.metric == metricName).where( fromTimestamp <= samples.c.agg_ts).where( samples.c.agg_ts <= toTimestamp)) result = conn.execute(sel.order_by(direction(orderByField))) if "application/octet-stream" in web.ctx.env.get("HTTP_ACCEPT", ""): packer = msgpack.Packer() self.addStandardHeaders(content_type="application/octet-stream") web.header("X-Accel-Buffering", "no") yield packer.pack(names) for row in result: resultTuple = (row.uid, calendar.timegm(row.created_at.timetuple()), calendar.timegm(row.agg_ts.timetuple()), row.text, row.username, row.userid) yield packer.pack(resultTuple) else: results = { "names": ["uid", "created_at", "agg_ts", "text", "username", "userid"], "data": [(row.uid, row.created_at.strftime("%Y-%m-%d %H:%M:%S"), row.agg_ts.strftime("%Y-%m-%d %H:%M:%S"), row.text, row.username, row.userid) for row in result] } self.addStandardHeaders() yield utils.jsonEncode(results)