Ejemplo n.º 1
0
def main(args):
    if "-h" in args or len(args) < 2:
        print 'Usage: python %s {compression factor} {input file path}' % (args[0])
        exit()
    
    compression_factor = int(args[1])    
        
    input_file_path = args[2]        
    
    motes = {} # holds last reading for each mote
    
    tr = TraceReader(input_file_path)
    tw = TraceWriter("%s_compressed.%s" % (os.path.basename(input_file_path).split(".")[0], tr.file_type), tr.arff_attributes)
    
    try:
        for timestamp, mote_id, counter, temperature in tr.read():            
            mote = motes.get(mote_id, None)
            if mote is not None:
                if len(mote['temp_buffer']) >= compression_factor:
                    avg_temp = sum(mote['temp_buffer']) / len(mote['temp_buffer'])
                    initial_timestamp = mote['timestamp_buffer'][0]
                    
                    tw.write((initial_timestamp, mote_id, avg_temp))
                    
                    del mote['temp_buffer'][:]
                    del mote['timestamp_buffer'][:]
            else:
                motes[mote_id] = {'temp_buffer': [], 'timestamp_buffer': []}
                
            motes[mote_id]['temp_buffer'].append(temperature)
            motes[mote_id]['timestamp_buffer'].append(timestamp)    
    finally:
        tw.close()
Ejemplo n.º 2
0
    def __init__(self, trace_path, lag):
        oracle_trace = TraceReader(trace_path)

        self.trace_gen = oracle_trace.read()
        for _i in xrange(lag - 1):
            self.trace_gen.next()
        self.training_delay = 0
        self.last_value = 0
Ejemplo n.º 3
0
 def __init__(self, trace_path, lag):        
     oracle_trace = TraceReader(trace_path)
     
     self.trace_gen = oracle_trace.read()
     for _i in xrange(lag - 1):
         self.trace_gen.next()
     self.training_delay = 0
     self.last_value = 0
Ejemplo n.º 4
0
def main(args):
    filename = '/home/giulio/Dropbox/Projeto Sensores/experiments/temperatura/sala_servidores/samples_20_02_13_15h05m47s.agg'
    motes_to_ignore = []
    tr = TraceReader(filename, motes_to_ignore)

    mote_first_data = {}
    mote_last_data = {}
    line_count = 0

    for data in tr.read():
        mote_id = data[1]
        if mote_id not in mote_first_data.iterkeys():
            mote_first_data[mote_id] = data
        mote_last_data[mote_id] = data
        line_count += 1

    tr.reset()

    # replicate all the first temps to the smallest timestamp
    min_timestamp = min(data[0] for data in mote_first_data.itervalues())

    for data in mote_first_data.itervalues():
        if data[0] != min_timestamp:
            data = list(data)
            data[0] = min_timestamp
            print " ".join(str(d) for d in data)

    # print all data except last line
    for data in tr.read():
        print " ".join(str(d) for d in data)
        line_count -= 1
        if line_count == 1:
            break

    max_timestamp = max(data[0] for data in mote_last_data.itervalues())

    for data in mote_last_data.itervalues():
        if data[0] != max_timestamp:
            data = list(data)
            data[0] = max_timestamp
            print " ".join(str(d) for d in data)
Ejemplo n.º 5
0
def main(args):
    filename = '/home/giulio/Dropbox/Projeto Sensores/experiments/temperatura/sala_servidores/samples_20_02_13_15h05m47s.agg'
    motes_to_ignore = []
    tr = TraceReader(filename, motes_to_ignore)
    
    mote_first_data = {}
    mote_last_data = {}
    line_count = 0
    
    for data in tr.read():
        mote_id = data[1]
        if mote_id not in mote_first_data.iterkeys():
            mote_first_data[mote_id] = data
        mote_last_data[mote_id] = data
        line_count += 1
    
    tr.reset()
    
    # replicate all the first temps to the smallest timestamp
    min_timestamp = min(data[0] for data in mote_first_data.itervalues())
        
    for data in mote_first_data.itervalues():
        if data[0] != min_timestamp:
            data = list(data)
            data[0] = min_timestamp
            print " ".join(str(d) for d in data)
    
    # print all data except last line
    for data in tr.read():
        print " ".join(str(d) for d in data)
        line_count -= 1
        if line_count == 1:
            break
    
    max_timestamp = max(data[0] for data in mote_last_data.itervalues())
    
    for data in mote_last_data.itervalues():
        if data[0] != max_timestamp:
            data = list(data)
            data[0] = max_timestamp
            print " ".join(str(d) for d in data)
Ejemplo n.º 6
0
def main():
    filename = r"D:\Giulio\My Dropbox\Projeto Sensores\experiments\temperatura\sala_servidores\samples_04_10_12_12h28m36s.arff"
    #filename = r"D:\Giulio\workspace2\SensorMonitor\output\arff\temps_25_05_12_15h09m48s.arff"    
    splits = 1
    
    #detectors = [CUSUMDetector(anomaly_threshold=0.01, L=0.0, alpha=0.6) for _ in xrange(splits)]    
    detectors = [TSBitmaps(lag_window=8, lead_window=8, anomaly_threshold=0.355, N=400, n=100, alphabet="abcd") for _ in xrange(splits)]
    splitter = SPIRITSplitter(detectors)
    
    tr = TraceReader(filename, supress_repetitions=False, auto_timestamps=False, suppress_rapid_changes=False)
    for data in tr.read():
        anomalies = splitter.update(data)
        if anomalies:
            print data['timestamp'] - 600, data['timestamp'] + 600#, anomalies
Ejemplo n.º 7
0
def main(args):
    if "-h" in args or len(args) < 2:
        print 'Usage: python %s {compression factor} {input file path}' % (
            args[0])
        exit()

    compression_factor = int(args[1])

    input_file_path = args[2]

    motes = {}  # holds last reading for each mote

    tr = TraceReader(input_file_path)
    tw = TraceWriter(
        "%s_compressed.%s" %
        (os.path.basename(input_file_path).split(".")[0], tr.file_type),
        tr.arff_attributes)

    try:
        for timestamp, mote_id, counter, temperature in tr.read():
            mote = motes.get(mote_id, None)
            if mote is not None:
                if len(mote['temp_buffer']) >= compression_factor:
                    avg_temp = sum(mote['temp_buffer']) / len(
                        mote['temp_buffer'])
                    initial_timestamp = mote['timestamp_buffer'][0]

                    tw.write((initial_timestamp, mote_id, avg_temp))

                    del mote['temp_buffer'][:]
                    del mote['timestamp_buffer'][:]
            else:
                motes[mote_id] = {'temp_buffer': [], 'timestamp_buffer': []}

            motes[mote_id]['temp_buffer'].append(temperature)
            motes[mote_id]['timestamp_buffer'].append(timestamp)
    finally:
        tw.close()
Ejemplo n.º 8
0
def main(args):
    if len(args) < 2:
        print "Usage: python %s {trace path} [-s (spirit mode) {weights path} {original trace path}]" % (args[0])
        exit()
    
    input_path = args[1]
    weights_path = None
    original_trace_path = None
    
    spirit_mode = '-s' in args
    if spirit_mode:
        assert not MULTIVARIATE
        
        spirit_mode_index = args.index('-s')
        weights_path = args[spirit_mode_index + 1]
        original_trace_path = args[spirit_mode_index + 2]
     
    tr = TraceReader(input_path)
    
    lag = int(5 * TIME_MULT) # time units (seconds)
    errors_to_calculate = (filters.MSE, filters.MAE)
    
    #filter_args = {"alpha": 9 * 1e-1, "beta": 2 * 1e-2, "k": lag}
    #filter_args = {"alpha": 4 * 1e-1}
    #filter_args = {"num_last_measures" : 7, "learning_rate" : 1e-4, "lag": lag, "dataset_size": 1}
    #filter_args = {"dataset_size": 2 * TIME_MULT, "lag": lag, "order": 2}
    #filter_args = {"dataset_size": sys.maxint, "order": 1, "lag": lag, "optimize": False, \
    #               "c": 0, "phi": [1]}
    filter_args = {"lag": lag, "window_size": 3 * TIME_MULT}
    #filter_args = {}
    #filter_cls = filters.ExpAvg
    #filter_cls = filters.HoltFilter
    filter_cls = filters.AdaptableHoltFilter
    #filter_cls = filters.DummyFilter
    #filter_cls = filters.SigmoidPerceptron
    #filter_cls = filters.HardLearningLinearPerceptron
    #filter_cls = filters.LinearPerceptron
    #filter_cls = filters.MultiLayerPerceptron
    #filter_cls = filters.RollingPerceptron
    #filter_cls = filters.LazyRollingPerceptron
    #filter_cls = filters.LazyLinearPerceptron
    #filter_cls = filters.FiniteDiffPerceptron
    #filter_cls = filters.DiffPerceptron
    #filter_cls = filters.Oracle
    #filter_cls = filters.Bote
    #filter_cls = filters.SmoothingBote
    #filter_cls = filters.AdaptableARFilter
    #filter_cls = filters.ARFilter
    
    if filter_cls == filters.Oracle:             
        filter_args = {'trace_path': input_path, "lag": lag}
    
    if MULTIVARIATE:
        forecaster = MultivariateFilter(filter_cls, filter_args, len(tr.arff_attributes) - 1) # ignore timestamp
    else:
        forecaster = filter_cls(**filter_args)    
    
    mote_id = "mote_239"
    offset = lag + forecaster.training_delay
    desired_offset = 12 * TIME_MULT
    offset = max(offset, desired_offset)
        
    print "# INFO"
    print "# Lag: %d" % (lag, )
    print "# Errors to calculate: %s" % (", ".join(filters.ERROR_TO_STR[x] for x in errors_to_calculate), )
    print "# Forecaster: %s" % (forecaster, )
    print "# Offset: %d" % (offset, )
        
    start_time = time.time()
    
    results = []
    
    print "Creating buffer..."
    # creates a buffer that consists of the length of the lag window
    data_gen = tr.read()
    
    # fill buffer
    data_buffer = deque(maxlen=lag + 1)
    for _i in xrange(lag + 1):
        data = data_gen.next() # exclude timestamp
        data_buffer.append(get_data(data, mote_id, tr, multivariate=MULTIVARIATE))
    
    print "Making predictions..."            
    # store observations and predictions
    count = 0
    for data in data_gen:
        observation = data_buffer[-1]
        prediction = forecaster.apply(data_buffer[0])
        results.append((observation, prediction))        
        data_buffer.append(get_data(data, mote_id, tr, multivariate=MULTIVARIATE))
        count += 1
        if count % 500 == 0 and isinstance(forecaster, PerceptronBase):
            print "%8d: %s" % (count, forecaster.debug()), len(forecaster.data)
    
    '''
    for fc in forecaster.filters:
        fc.optimize_parameters(fc._data, fc._model)
        print [val / 1e+11 for val in fc._model.itervalues()]
    '''
    
    print "Calculating errors..."
    timeseries_length = len(results)
    
    if spirit_mode:
        errors = calc_spirit_errors(timeseries_length, errors_to_calculate, lag, results, weights_path, original_trace_path)
    else:
        errors = calc_errors(timeseries_length, errors_to_calculate, results, multivariate=MULTIVARIATE, offset=offset)
    
        
    print "# RESULTS"    
    # create averages                
    for k in xrange(len(errors_to_calculate)):
        assert len(errors[k]) == timeseries_length
        
        if MODE == 'avg':
            avg_error = sum(errors[k][offset:]) / (timeseries_length - offset)
        elif MODE == 'max':
            avg_error = max(errors[k][offset:])
                    
        if errors_to_calculate[k] == filters.MSE and RMSE:
            avg_error = math.sqrt(avg_error)
            print "(RMSE mode)"
        print "%s: %f" % (filters.ERROR_TO_STR[errors_to_calculate[k]], avg_error)
    
    print "\nElapsed: %f secs" % (time.time() - start_time)