Пример #1
0
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    train_model(model,
                DATASET_INDEX,
                dataset_prefix='eeg2_attention',
                epochs=500,
                batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='eeg2_attention',
                   batch_size=128)
Пример #2
0
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    train_model(model,
                DATASET_INDEX,
                dataset_prefix='arabic_voice_',
                epochs=1000,
                batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='arabic_voice_',
                   batch_size=128)
Пример #3
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='kick_vs_punch_', epochs=1000, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='kick_vs_punch_',
                   batch_size=128)
Пример #4
0
    # add load model code here to fine-tune

    return model

def squeeze_excite_block(input):
    ''' Create a squeeze-excite block
    Args:
        input: input tensor
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1] # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,  activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
    se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='walk_vs_run_', epochs=1000, batch_size=128)

    evaluate_model(model, DATASET_INDEX, dataset_prefix='walk_vs_run_', batch_size=128)
Пример #5
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    #train_model(model, DATASET_INDEX, dataset_prefix='action_3d', epochs=600, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='action_3d',
                   batch_size=128)
Пример #6
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='ht_sensor', epochs=600, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='ht_sensor',
                   batch_size=128)
Пример #7
0
    import json
    ''' Train portion '''
    scores = []

    for i in range(10):
        K.clear_session()

        print("Begin iteration %d" % (i + 1))
        print("*" * 80)
        print()

        model = generate_model()  # change to generate_model_2()
        #train_model(model, DATASET_INDEX, dataset_prefix='character_attention', dataset_fold_id=(i + 1), epochs=600, batch_size=128)
        score = evaluate_model(model,
                               DATASET_INDEX,
                               dataset_prefix='character_attention',
                               dataset_fold_id=(i + 1),
                               batch_size=128)
        scores.append(score)

    with open('data/character/scores.json', 'w') as f:
        json.dump({'scores': scores}, f)
    ''' evaluate average score '''
    with open('data/character/scores.json', 'r') as f:
        results = json.load(f)

    scores = results['scores']
    avg_score = sum(scores) / len(scores)
    print("Scores : ", scores)
    print("Average score over 10 epochs : ", avg_score)
Пример #8
0
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model()

    train_model(model,
                DATASET_INDEX,
                dataset_prefix='daily_sport_no_attention',
                epochs=500,
                batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='daily_sport_no_attention',
                   batch_size=128)
Пример #9
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='net_flow_', epochs=1000, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='net_flow_',
                   batch_size=128)
Пример #10
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='cmu_subject_16_', epochs=1000, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='cmu_subject_16_',
                   batch_size=128)
Пример #11
0
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    train_model(model,
                DATASET_INDEX,
                dataset_prefix='movement_aal',
                epochs=1000,
                batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='movement_aal',
                   batch_size=128)
Пример #12
0
    import json
    ''' Train portion '''
    scores = []

    for i in range(10):
        K.clear_session()

        print("Begin iteration %d" % (i + 1))
        print("*" * 80)
        print()

        model = generate_model()  # change to generate_model_2()
        #train_model(model, DATASET_INDEX, dataset_prefix='ck', dataset_fold_id=(i + 1), epochs=600, batch_size=128)
        score = evaluate_model(model,
                               DATASET_INDEX,
                               dataset_prefix='ck',
                               dataset_fold_id=(i + 1),
                               batch_size=128)
        scores.append(score)

    with open('data/CK/scores.json', 'w') as f:
        json.dump({'scores': scores}, f)
    ''' evaluate average score '''
    with open('data/CK/scores.json', 'r') as f:
        results = json.load(f)

    scores = results['scores']
    avg_score = sum(scores) / len(scores)
    print("Scores : ", scores)
    print("Average score over 10 epochs : ", avg_score)
Пример #13
0
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model()

    train_model(model,
                DATASET_INDEX,
                dataset_prefix='gesture_phase',
                epochs=1000,
                batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='gesture_phase',
                   batch_size=128)
Пример #14
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='character_trajectories_', epochs=1000, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='character_trajectories_',
                   batch_size=128)
Пример #15
0
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    train_model(model,
                DATASET_INDEX,
                dataset_prefix='ozone',
                epochs=600,
                batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='ozone',
                   batch_size=128)
Пример #16
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='shapes_random_', epochs=1000, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='shapes_random_',
                   batch_size=128)
Пример #17
0
    # add load model code here to fine-tune

    return model

def squeeze_excite_block(input):
    ''' Create a squeeze-excite block
    Args:
        input: input tensor
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1] # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,  activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
    se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='u_wave_', epochs=1000, batch_size=128)

    evaluate_model(model, DATASET_INDEX, dataset_prefix='u_wave_', batch_size=128)
Пример #18
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='occupancy_detect', epochs=1000, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='occupancy_detect',
                   batch_size=128)
Пример #19
0
    # add load model code here to fine-tune

    return model

def squeeze_excite_block(input):
    ''' Create a squeeze-excite block
    Args:
        input: input tensor
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1] # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,  activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
    se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='japanese_vowels_', epochs=1000, batch_size=128)

    evaluate_model(model, DATASET_INDEX, dataset_prefix='japanese_vowels_', batch_size=128)
Пример #20
0
        filters: number of output filters
        k: width factor

    Returns: a keras tensor
    '''
    filters = input._keras_shape[-1]  # channel_axis = -1 for TF

    se = GlobalAveragePooling1D()(input)
    se = Reshape((1, filters))(se)
    se = Dense(filters // 16,
               activation='relu',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = Dense(filters,
               activation='sigmoid',
               kernel_initializer='he_normal',
               use_bias=False)(se)
    se = multiply([input, se])
    return se


if __name__ == "__main__":
    model = generate_model_2()

    # train_model(model, DATASET_INDEX, dataset_prefix='pendigits_', epochs=1000, batch_size=128)

    evaluate_model(model,
                   DATASET_INDEX,
                   dataset_prefix='pendigits_',
                   batch_size=128)