if is_light_net:
        MODEL = 'mini_xception'
        EGA_path = 'train_weights/demo_weights/MTL/VGG/EGA/11_0.59-0.96-0.78.hdf5'
        EPGA_path = 'train_weights/CONFUSION/ferplus-adience-aflw/EPGA-minixception/net-augmentation_false_dropout_false_bn_false_0.005-pesudo-naive_true_distilled_false_pesudo_false_interpolation_false_0.9_0.5-lr-5_1_1_freezing_false_32_.01-0.4715-0.6918-0.6539-0.3392.hdf5'
        target_size = (64, 64)
    else:
        MODEL = 'vggFace'
        EGA_path = '/home/yanhong/Downloads/next_step/HY_MTL/train_weights/CONFUSION/ferplus-adience/EmotionAgenderNetVGGFace_vgg16/10_0.51-0.89-0.7.hdf5'
        EPA_path = 'train_weights/CONFUSION/expw-aflw-adience/EPA_VGGFace_vgg16/2_.01-0.63-0.78-0.83.hdf5'
        EPGA_path = 'train_weights/CONFUSION/fer2013-adience-aflw/EPGA-VGGFace_vgg16/net-augmentation_false_dropout_false_bn_false_0.005-pesudo-naive_true_distilled_false_pesudo_false_interpolation_false_0.9_0.5-lr-5_1_1_freezing_false_32_.01-0.6412-0.7285-0.9398-0.7813.hdf5'
        target_size = (224, 224)
        # EPGA_path='train_weights/CONFUSION/ferplus-adience-aflw/EPGA-VGGFace_vgg16/net-augmentation_false_dropout_false_bn_false_0.005-pesudo-naive_true_distilled_false_pesudo_false_interpolation_false_0.9_0.5-lr-5_1_1_freezing_false_32_.01-0.8091-0.7566-0.9489-0.7716.hdf5'

    if is_EGA:
        EGA_multi_model = Net(MODEL, 1, 4, is_dropout, 8, 5, 8, 2)
        EGA_multi_model.load_weights(EGA_path)
    elif is_EPGA:
        EPGA_multi_model = Net(MODEL, 1, 12, 7, 5, 8, 2, False, False, None)
        EPGA_multi_model.load_weights(EPGA_path)

    else:
        EPA_multi_model = Net(MODEL, 1, 9, is_dropout, 7, 5, 8, 2)
        EPA_multi_model.load_weights(EPA_path)

EPGA_multi_model.summary()
emotion_window = []
gender_window = []
pose_window = []
age_window = []

# loading images
Beispiel #2
0
        # gender_path='train_weights/GenderNetminixception/imdb/freezing_true-drouout_false_1__05-0.95.hdf5'
        emotion_path = 'train_weights/EmotionNetminixception/fer2013/fer2013mini_XCEPTION.95-0.66.hdf5'
        pose_path = 'train_weights/PoseNetminixception/aflw/freezing_true-drouout_false_2-202-0.72.hdf5'
        age_path='train_weights/AgeNetEmotionNetminixception/adience/freezing_true-drouout_false_1__11-0.49.hdf5'
        gender_path = 'train_weights/GenderNetminixception/imdb/freezing_true-drouout_false_1__05-0.95.hdf5'
    else:
        MODEL='vggFace'
        emotion_path='1'
        gender_age_path='1'
        age_path='1'
        pose_path='1'
    emotion_model=Net(MODEL,1,0,7,5,8,2,False,False,None)
    gender_model=Net(MODEL,1,10,7,5,8,2,False,False,None)
    pose_model=Net(MODEL,1,5,7,5,8,2,False,False,None)
    age_model=Net(MODEL,1,1,7,5,8,2,False,False,None)
    emotion_model.load_weights(emotion_path)
    gender_model.load_weights(gender_path)
    age_model.load_weights(age_path)
    pose_model.load_weights(pose_path)
    target_size = (64,64)

else:
    if is_light_net:
        MODEL='mini_xception'
        EGA_path='train_weights/demo_weights/MTL/VGG/EGA/11_0.59-0.96-0.78.hdf5'
        EPA_path='' 
    else:
        MODEL='vggFace'
        # EGA_path='/home/yanhong/Downloads/next_step/HY_MTL/train_weights/CONFUSION/ferplus-adience/EmotionAgenderNetVGGFace_vgg16/10_0.51-0.89-0.7.hdf5'
        EGA_path='train_weights/demo_weights/MTL/VGG/EGA/naive_false-distilled_true-pesudo_false-threshold_0.0.01-0.8882-0.9651-0.8109.hdf5'
        EPA_path='train_weights/CONFUSION/expw-aflw-adience/EPA_VGGFace_vgg16/based_STL_true-freezing_true-pesudo_true-pesudo_selection_true-threshold_0.90_.01-0.61-0.80-0.76.hdf5'