Example #1
0
    def setup(self):
        '''
        Find emergencies and do some preprocessing.
        '''
        trace_reader = TraceReader(self.trace_path)
        trace_gen = trace_reader.parse()

        # Fills the observation list and finds the trace_start and trace_end
        data = trace_gen.next()
        self.trace_start = data[
            'timestamp'] + self.trace_start_offset  # actual trace start
        self.observations.append(self.data_facade(data, self.mote_id))
        for data in trace_gen:
            self.observations.append(self.data_facade(data, self.mote_id))
            self.trace_end = data['timestamp']

        # Fills the hidden_vars list
        if self.spirit_mode:
            with open(self.hidden_vars_path) as file_obj:
                self.mote_ids = file_obj.readline()[1:].split()
                self.hidden_vars = [data[1] for data in file_iter(file_obj)]
            with open(self.weights_path) as file_obj:
                self.weights = [data[1:] for data in file_iter(file_obj)]

        # Finds all emergencies
        self.emergencies = [e for e in self.confirmation_trigger(iter(self.observations), \
                                                                 offset=self.trace_start - self.trace_start_offset)]
        self.emergencies = trim_emergencies(self.trace_start, self.emergencies,
                                            self.trace_end)
        self.emergencies = join_emergencies(self.emergencies, self.delta)

        assert len(self.emergencies) > 0
Example #2
0
def main(args):
    if len(args) < 2:
        print "Usage: python %s {filename} [-compress {factor}]"
        exit()

    filename = args[1]
    compression_factor = int(args[3]) if len(args) > 2 else 1
    tr = TraceReader(filename)
    tw = TraceWriter('output.arff', tr.arff_attributes)

    sensors = None
    data_keys = None
    data_keys_len = 0
    out_data = OrderedDict()
    count = 1

    for data in tr.parse():
        if not sensors:
            sensors = [Sensor() for _i in xrange(len(data) - 1)]
            data_keys = data.keys()
            data_keys_len = len(data_keys)
            for key in data_keys:
                out_data[key] = 0.0

        for i in xrange(1, data_keys_len):
            key = data_keys[i]
            out_data[key] += sensors[i - 1].emulate(data[key])

        if count % compression_factor == 0:
            out_data['timestamp'] = data['timestamp'] - compression_factor / 2.

            for i in xrange(1, data_keys_len):
                out_data[data_keys[i]] /= compression_factor

            tw.write(out_data)

            for key in data_keys:
                out_data[key] = 0.0

        count += 1

    tw.close()
Example #3
0
def main(args):
    if len(args) < 2:
        print "Usage: python %s {filename} [-compress {factor}]"
        exit()
    
    filename = args[1]
    compression_factor = int(args[3]) if len(args) > 2 else 1
    tr = TraceReader(filename)
    tw = TraceWriter('output.arff', tr.arff_attributes)
            
    sensors = None
    data_keys = None
    data_keys_len = 0
    out_data = OrderedDict()    
    count = 1
    
    for data in tr.parse():                
        if not sensors:
            sensors = [Sensor() for _i in xrange(len(data) - 1)]
            data_keys = data.keys()
            data_keys_len = len(data_keys)
            for key in data_keys:
                out_data[key] = 0.0
        
        for i in xrange(1, data_keys_len):
            key = data_keys[i]
            out_data[key] += sensors[i - 1].emulate(data[key])
                
        if count % compression_factor == 0:
            out_data['timestamp'] = data['timestamp'] - compression_factor / 2.
                        
            for i in xrange(1, data_keys_len):
                out_data[data_keys[i]] /= compression_factor
            
            tw.write(out_data)
            
            for key in data_keys:
                out_data[key] = 0.0
        
        count += 1
    
    tw.close()