def main(): setupLogging() args = parseArguments() job_id = args.jobid # Create the REST API client engine_client = EngineApiClient(args.host, BASE_URL, args.port) # Get all the buckets up to now logging.info("Get result buckets for job " + job_id) (http_status_code, response) = engine_client.getAllBuckets(job_id, include_records=False, anomaly_score_filter_value=args.anomalyScore, normalized_probability_filter_value=args.normalizedProbability) if http_status_code != 200: print (http_status_code, json.dumps(response)) return printHeader() printBuckets(response) if args.continue_poll: if len(response) > 0: next_bucket_id = int(response[-1]['id']) + 1 else: next_bucket_id = None while True: # Wait POLL_INTERVAL_SECS then query for any new buckets time.sleep(POLL_INTERVAL_SECS) (http_status_code, response) = engine_client.getBucketsByDate(job_id=job_id, start_date=str(next_bucket_id), end_date=None, include_records=False, anomaly_score_filter_value=args.anomalyScore, normalized_probability_filter_value=args.normalizedProbability) if http_status_code != 200: print (http_status_code, json.dumps(response)) break printBuckets(response) if len(response) > 0: next_bucket_id = int(response[-1]['id']) + 1
def main(): setupLogging() args = parseArguments() # Create the REST API client engine_client = EngineApiClient(args.host, BASE_URL, args.port) job_config = '{"analysisConfig" : {\ "bucketSpan":3600,\ "detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}] },\ "dataDescription" : {"fieldDelimiter":",", "timeField":"time", "timeFormat":"yyyy-MM-dd HH:mm:ssX"} }' logging.info("Creating job") (http_status_code, response) = engine_client.createJob(job_config) if http_status_code != 201: print(http_status_code, json.dumps(response)) return job_id = response['id'] logging.info("Uploading data to " + job_id) file = open(args.file, 'rb') (http_status_code, response) = engine_client.upload(job_id, file) if http_status_code != 202: print(http_status_code, json.dumps(response)) return logging.info("Closing job " + job_id) (http_status_code, response) = engine_client.close(job_id) if http_status_code != 202: print(http_status_code, json.dumps(response)) return logging.info("Get result buckets for job " + job_id) (http_status_code, response) = engine_client.getAllBuckets(job_id) if http_status_code != 200: print(http_status_code, json.dumps(response)) else: print "Date,Anomaly Score,Max Normalized Probablility" for bucket in response: print "{0},{1},{2}".format(bucket['timestamp'], bucket['anomalyScore'], bucket['maxNormalizedProbability'])
def main(): setupLogging() args = parseArguments() # Create the REST API client engine_client = EngineApiClient(args.host, BASE_URL, args.port) job_config = '{"analysisConfig" : {\ "bucketSpan":3600,\ "detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}] },\ "dataDescription" : {"fieldDelimiter":",", "timeField":"time", "timeFormat":"yyyy-MM-dd HH:mm:ssX"} }' logging.info("Creating job") (http_status_code, response) = engine_client.createJob(job_config) if http_status_code != 201: print (http_status_code, json.dumps(response)) return job_id = response['id'] logging.info("Uploading data to " + job_id) file = open(args.file, 'rb') (http_status_code, response) = engine_client.upload(job_id, file) if http_status_code != 202: print (http_status_code, json.dumps(response)) return logging.info("Closing job " + job_id) (http_status_code, response) = engine_client.close(job_id) if http_status_code != 202: print (http_status_code, json.dumps(response)) return logging.info("Get result buckets for job " + job_id) (http_status_code, response) = engine_client.getAllBuckets(job_id) if http_status_code != 200: print (http_status_code, json.dumps(response)) else: print "Date,Anomaly Score,Max Normalized Probablility" for bucket in response: print "{0},{1},{2}".format(bucket['timestamp'], bucket['anomalyScore'], bucket['maxNormalizedProbability'])
def main(): setupLogging() args = parseArguments() host = args.host port = args.port base_url = BASE_URL job_id = args.jobid # Create the REST API client engine_client = EngineApiClient(host, base_url, port) # Get all the buckets up to now logging.info("Get result buckets for job " + job_id) (http_status_code, response) = engine_client.getAllBuckets(job_id) if http_status_code != 200: print (http_status_code, json.dumps(response)) return print "Date,BucketId,AnomalyScore" for bucket in response: print "{0},{1},{2}".format(bucket['timestamp'], bucket['id'], bucket['anomalyScore']) if len(response) > 0: next_bucket_id = int(response[-1]['id']) + 1 else: next_bucket_id = None # Wait POLL_INTERVAL_SECS then query for any new buckets while True: time.sleep(POLL_INTERVAL_SECS) (http_status_code, response) = engine_client.getBucketsByDate(job_id=job_id, start_date=str(next_bucket_id), end_date=None) if http_status_code != 200: print (http_status_code, json.dumps(response)) break for bucket in response: print "{0},{1},{2}".format(bucket['timestamp'], bucket['id'], bucket['anomalyScore']) if len(response) > 0: next_bucket_id = int(response[-1]['id']) + 1