def main():

    setupLogging()

    args = parseArguments()
    job_id = args.jobid

    # Create the REST API client
    engine_client = EngineApiClient(args.host, BASE_URL, args.port)

    # Get all the buckets up to now
    logging.info("Get result buckets for job " + job_id)
    (http_status_code, response) = engine_client.getAllBuckets(job_id, 
        include_records=False, 
        anomaly_score_filter_value=args.anomalyScore,
        normalized_probability_filter_value=args.normalizedProbability)

    
    if http_status_code != 200:
        print (http_status_code, json.dumps(response))
        return
    
    
    printHeader()
    printBuckets(response)

    if args.continue_poll:

        if len(response) > 0:
            next_bucket_id = int(response[-1]['id']) + 1
        else:
            next_bucket_id = None
        
        while True:
            # Wait POLL_INTERVAL_SECS then query for any new buckets
            time.sleep(POLL_INTERVAL_SECS)

            (http_status_code, response) = engine_client.getBucketsByDate(job_id=job_id, 
                start_date=str(next_bucket_id), end_date=None, 
                include_records=False,         
                anomaly_score_filter_value=args.anomalyScore,
                normalized_probability_filter_value=args.normalizedProbability)

            if http_status_code != 200:
                print (http_status_code, json.dumps(response))
                break

            printBuckets(response)
            
            if len(response) > 0:
                next_bucket_id = int(response[-1]['id']) + 1
def main():

    setupLogging()

    args = parseArguments()
    host = args.host
    port = args.port
    base_url = BASE_URL
    job_id = args.jobid

    # Create the REST API client
    engine_client = EngineApiClient(host, base_url, port)

    # Get all the buckets up to now
    logging.info("Get result buckets for job " + job_id)
    (http_status_code, response) = engine_client.getAllBuckets(job_id)
    if http_status_code != 200:
        print (http_status_code, json.dumps(response))
        return
    
    
    print "Date,BucketId,AnomalyScore"
    for bucket in response:
        print "{0},{1},{2}".format(bucket['timestamp'], bucket['id'], bucket['anomalyScore'])
    
    if len(response) > 0:
        next_bucket_id = int(response[-1]['id']) + 1
    else:
        next_bucket_id = None

    # Wait POLL_INTERVAL_SECS then query for any new buckets
    while True:
        time.sleep(POLL_INTERVAL_SECS)

        (http_status_code, response) = engine_client.getBucketsByDate(job_id=job_id, 
            start_date=str(next_bucket_id), end_date=None)
        if http_status_code != 200:
            print (http_status_code, json.dumps(response))
            break

        for bucket in response:
            print "{0},{1},{2}".format(bucket['timestamp'], bucket['id'], bucket['anomalyScore'])
        
        if len(response) > 0:
            next_bucket_id = int(response[-1]['id']) + 1
Beispiel #3
0
def main():
    args = parseArguments()


    start_date = datetime(2014, 05, 18, 0, 0, 0, 0, UtcOffset())
    # interval between the generated timestamps for the records
    interval = timedelta(seconds=300)


    if args.duration <= 0:
        end_date = datetime.now(UtcOffset())
    else:
        duration = timedelta(hours=args.duration)
        end_date = start_date + duration


    job_config = '{\
        "analysisConfig" : {\
            "bucketSpan":3600,\
            "detectors" :[\
                {"fieldName":"In Discards","byFieldName":"host"},\
                {"fieldName":"In Octets","byFieldName":"host"},\
                {"fieldName":"Out Discards","byFieldName":"host"},\
                {"fieldName":"Out Octets","byFieldName":"host"} \
            ]\
        },\
        "dataDescription" : {\
            "fieldDelimiter":",",\
            "timeField":"time",\
            "timeFormat":"yyyy-MM-dd\'T\'HH:mm:ssXXX"\
        }\
    }'


    engine_client = EngineApiClient(args.host, BASE_URL, args.port)
    (http_status_code, response) = engine_client.createJob(job_config)
    if http_status_code != 201:
        print (http_status_code, json.dumps(response))
        return

    job_id = response['id']
    print 'Job created with Id = ' + job_id

    # get the csv header (the first record generated)
    record_generator = generateRecords(args.file, start_date, interval, end_date)
    header = ','.join(next(record_generator))
    header += '\n'

    count = 0
    try:
        # for the results
        next_bucket_id = 1
        print
        print "Date,Anomaly Score,Max Normalized Probablility"

        data = header
        for record in record_generator:
            # format as csv and append new line
            csv = ','.join(record) + '\n'
            data += csv
            # print data

            count += 1
            if count == 100:
                (http_status_code, response) = engine_client.upload(job_id, data)
                if http_status_code != 202:
                    print (http_status_code, json.dumps(response))
                    break

                # get the latest results...
                (http_status_code, response) = engine_client.getBucketsByDate(job_id=job_id,
                    start_date=str(next_bucket_id), end_date=None)
                if http_status_code != 200:
                    print (http_status_code, json.dumps(response))
                    break

                # and print them
                for bucket in response:
                    print "{0},{1},{2},{3}".format(bucket['timestamp'],
                        bucket['anomalyScore'], bucket['maxNormalizedProbability'])

                if len(response) > 0:
                    next_bucket_id = int(response[-1]['id']) + 1

                # must send the header every time
                data = header
                count = 0

            # sleep a little while (optional this can be removed)
            #time.sleep(0.1)

    except KeyboardInterrupt:
        print "Keyboard interrupt closing job..."

    (http_status_code, response) = engine_client.close(job_id)
    if http_status_code != 202:
        print (http_status_code, json.dumps(response))
Beispiel #4
0
def main():
    args = parseArguments()

    start_date = datetime(2014, 05, 18, 0, 0, 0, 0, UtcOffset())
    # interval between the generated timestamps for the records
    interval = timedelta(seconds=300)

    if args.duration <= 0:
        end_date = datetime.now(UtcOffset())
    else:
        duration = timedelta(hours=args.duration)
        end_date = start_date + duration

    job_config = '{\
        "analysisConfig" : {\
            "bucketSpan":3600,\
            "detectors" :[\
                {"fieldName":"In Discards","byFieldName":"host"},\
                {"fieldName":"In Octets","byFieldName":"host"},\
                {"fieldName":"Out Discards","byFieldName":"host"},\
                {"fieldName":"Out Octets","byFieldName":"host"} \
            ]\
        },\
        "dataDescription" : {\
            "fieldDelimiter":",",\
            "timeField":"time",\
            "timeFormat":"yyyy-MM-dd\'T\'HH:mm:ssXXX"\
        }\
    }'

    engine_client = EngineApiClient(args.host, BASE_URL, args.port)
    (http_status_code, response) = engine_client.createJob(job_config)
    if http_status_code != 201:
        print(http_status_code, json.dumps(response))
        return

    job_id = response['id']
    print 'Job created with Id = ' + job_id

    # get the csv header (the first record generated)
    record_generator = generateRecords(args.file, start_date, interval,
                                       end_date)
    header = ','.join(next(record_generator))
    header += '\n'

    count = 0
    try:
        # for the results
        next_bucket_id = 1
        print
        print "Date,Bucket ID,Anomaly Score,Max Normalized Probablility"

        data = header
        for record in record_generator:
            # format as csv and append new line
            csv = ','.join(record) + '\n'
            data += csv
            # print data

            count += 1
            if count == 100:
                (http_status_code,
                 response) = engine_client.upload(job_id, data)
                if http_status_code != 202:
                    print(http_status_code, json.dumps(response))
                    break

                # get the latest results...
                (http_status_code, response) = engine_client.getBucketsByDate(
                    job_id=job_id,
                    start_date=str(next_bucket_id),
                    end_date=None)
                if http_status_code != 200:
                    print(http_status_code, json.dumps(response))
                    break

                # and print them
                for bucket in response:
                    print "{0},{1},{2},{3}".format(
                        bucket['timestamp'], bucket['id'],
                        bucket['anomalyScore'],
                        bucket['maxNormalizedProbability'])

                if len(response) > 0:
                    next_bucket_id = int(response[-1]['id']) + 1

                # must send the header every time
                data = header
                count = 0

            # sleep a little while (optional this can be removed)
            #time.sleep(0.1)

    except KeyboardInterrupt:
        print "Keyboard interrupt closing job..."

    (http_status_code, response) = engine_client.close(job_id)
    if http_status_code != 202:
        print(http_status_code, json.dumps(response))