Ejemplo n.º 1
0
 overall_sheet = book.add_sheet('overall')
 overall_list_title = ['dataset', 'hidden units', '#week'
                       ] + overall_performance_index
 overall_list_row = 0
 for c in range(len(overall_list_title)):
     overall_sheet.write(0, c, str(overall_list_title[c]))
 dataset_list_title = ['activities'] + per_class_performance_index
 # Go through all bosch datasets
 datasets = ['b1']
 for datafile in datasets:
     feature_filename = 'feature_' + datafile + '.pkl'
     # Looking for processed feature data
     if os.path.exists(feature_filename):
         feature_file = open(feature_filename, mode='r')
         feature_dict = pickle.load(feature_file)
         feature = AlFeature()
         feature.load_from_dict(feature_dict)
     else:
         feature = load_casas_from_file(datafile,
                                        normalize=True,
                                        per_sensor=True,
                                        ignore_other=False)
         feature_file = open(feature_filename, mode='w')
         pickle.dump(feature.export_to_dict(), feature_file, protocol=-1)
     feature_file.close()
     num_samples = feature.x.shape[0]
     train_index = []
     test_index = []
     x_tensor = theano.shared(np.asarray(feature.x,
                                         dtype=theano.config.floatX),
                              borrow=True)
Ejemplo n.º 2
0
    assert (type(model) == StackedDenoisingAutoencoder)
    x_tensor = theano.shared(np.asarray(x, dtype=theano.config.floatX),
                             borrow=True)
    result = model.classify(x_tensor)
    predicted_y = result[0]
    confusion_matrix = get_confusion_matrix(num_classes=num_classes,
                                            label=y,
                                            predicted=predicted_y)
    return confusion_matrix


if __name__ == '__main__':
    # Set current directory to local directory
    os.chdir(os.path.dirname(os.path.realpath(__file__)))
    # Go through all bosch datasets
    datasets = ['b1']
    for datafile in datasets:
        feature_filename = 'feature_' + datafile + '.pkl'
        # Looking for processed feature data
        if os.path.exists(feature_filename):
            feature_file = open(feature_filename, mode='r')
            feature_dict = pickle.load(feature_file)
            feature = AlFeature()
            feature.load_from_dict(feature_dict)
        else:
            feature = load_casas_from_file(datafile, datafile + '.translate')
            feature_file = open(feature_filename, mode='w')
            pickle.dump(feature.export_to_dict(), feature_file, protocol=-1)
        feature_file.close()
        run_test(feature)
Ejemplo n.º 3
0
from actlearn.data.AlFeature import AlFeature
from actlearn.utils.event_bar_plot import event_bar_plot
from actlearn.utils.AlResult import AlResult

if __name__ == '__main__':
    # Set current directory to local directory
    os.chdir(os.path.dirname(os.path.realpath(__file__)))
    # Go through all bosch datasets
    datasets = ['b1', 'b2', 'b3']
    for datafile in datasets:
        feature_filename = 'feature_' + datafile + '.pkl'
        # Looking for processed feature data
        if os.path.exists(feature_filename):
            feature_file = open(feature_filename, mode='r')
            feature_dict = pickle.load(feature_file)
            feature = AlFeature()
            feature.load_from_dict(feature_dict)
        else:
            feature = load_casas_from_file(datafile,
                                           normalize=False,
                                           per_sensor=False)
            feature_file = open(feature_filename, mode='w')
            pickle.dump(feature.export_to_dict(), feature_file, protocol=-1)
        feature_file.close()
        num_samples = feature.x.shape[0]
        train_index = []
        test_index = []
        week_array = get_boundary(feature, period='week')
        learning_result_fname = 'dt_learning_' + datafile + '.pkl'
        learning_result = AlResult(result_name='%s decision tree' % datafile,
                                   data_fname=datafile,
Ejemplo n.º 4
0
def load_casas_from_file(data_filename, translation_filename=None,
                         dataset_dir='../datasets/bosch/',
                         normalize=True, per_sensor=True, ignore_other=False):
    """
    Load CASAS Data From File
    :param data_filename:
    :param translation_filename:
    :param dataset_dir:
    :param normalize:
    :param per_sensor:
    :param ignore_other:
    :return:
    """
    # Initialize AlData Structure
    data = AlData()
    # Load Translation File
    if translation_filename is not None:
        data.load_sensor_translation_from_file(dataset_dir + translation_filename)
    # Load Data File
    data.load_data_from_file(dataset_dir + data_filename)
    # Some basic statistical calculations
    data.calculate_window_size()
    data.calculate_mostly_likely_activity_per_sensor()
    # Print out data summary
    data.print_data_summary()
    # Configure Features
    feature = AlFeature()
    # Pass Activity and Sensor Info to AlFeature
    feature.populate_activity_list(data.activity_info)
    feature.populate_sensor_list(data.sensor_info)
    # feature.DisableActivity('Other_Activity')
    # Add lastEventHour Feature
    feature.featureWindowNum = 1
    feature.add_feature(AlFeatureSensorCount(normalize=normalize))
    feature.add_feature(AlFeatureWindowDuration(normalize=normalize))
    feature.add_feature(AlFeatureEventHour(normalize=normalize))
    feature.add_feature(AlFeatureEventSensor(per_sensor=per_sensor))
    feature.add_feature(AlFeatureLastDominantSensor(per_sensor=per_sensor))
    feature.add_feature(AlFeatureEventSecond(normalize=normalize))
    feature.add_feature(AlFeatureSensorElapseTime(normalize=normalize))
    # Select whether disable other activity or not
    if ignore_other:
        feature.disable_activity('Other_Activity')
    # Print Feature Summary
    feature.print_feature_summary()
    # Calculate Features
    feature.populate_feature_array(data.data)
    # Return features data
    return feature
 os.chdir(os.path.dirname(os.path.realpath(__file__)))
 # Loading Log configuration
 logging.config.fileConfig('../log/log.cfg')
 # Initialize AlData Structure
 data = AlData()
 # Load Translation File
 data.load_sensor_translation_from_file(dataset_dir + 'b1.translate')
 # Load Data File
 data.load_data_from_file(dataset_dir + 'b1')
 # Some basic statistical calculations
 data.calculate_window_size()
 data.calculate_mostly_likely_activity_per_sensor()
 # Print out data summary
 data.print_data_summary()
 # Configure Features
 feature = AlFeature()
 # Pass Activity and Sensor Info to AlFeature
 feature.populate_activity_list(data.activity_info)
 feature.populate_sensor_list(data.sensor_info)
 # feature.DisableActivity('Other_Activity')
 # Add lastEventHour Feature
 feature.featureWindowNum = 1
 feature.add_feature(AlFeatureSensorCount(normalize=False))
 feature.add_feature(AlFeatureWindowDuration(normalize=False))
 feature.add_feature(AlFeatureEventHour(normalize=False))
 feature.add_feature(AlFeatureEventSensor(per_sensor=False))
 feature.add_feature(AlFeatureLastDominantSensor(per_sensor=False))
 feature.add_feature(AlFeatureEventSecond(normalize=False))
 feature.add_feature(AlFeatureSensorElapseTime(normalize=False))
 # Print Feature Summary
 feature.print_feature_summary()