Example #1
0
 def DATA(train_with, test_with, data_path, mode):
     if train_with == 'clean':
         trX, trY = load_specific_noisy_data(data_path,
                                             is_training='TRAIN',
                                             mode=mode,
                                             noise_name='clean')
     elif train_with == 'noisy':
         trX, trY = load_random_noisy_data(data_path, 'TRAIN', mode)
     else:
         raise ValueError('should set "--train_with" value')
     if test_with == 'clean':
         vaX, vaY = load_specific_noisy_data(data_path,
                                             is_training='VALID',
                                             mode=mode,
                                             noise_name='clean')
         teX, teY = load_specific_noisy_data(data_path,
                                             is_training='TEST',
                                             mode=mode,
                                             noise_name='clean')
     elif test_with == 'noisy':
         vaX, vaY = load_random_noisy_data(data_path, 'VALID', mode)
         teX, teY = load_random_noisy_data(data_path, 'TEST', mode)
     else:
         raise ValueError('should set "--test_with" value')
     return trX, trY, vaX, vaY, teX, teY  #shape [sample, 120(f), 99?(t)], [sample,]
Example #2
0
 def DATA(is_training, train_with, test_with, data_path, mode):
     if is_training == 'TRAIN':
         if train_with == 'clean':
             trX, trY = load_specific_noisy_data(data_path,
                                                 is_training='TRAIN',
                                                 mode=mode,
                                                 noise_name='clean')
         elif train_with == 'noisy':
             trX1, trY1 = load_specific_noisy_data(data_path,
                                                   is_training='TRAIN',
                                                   mode=mode,
                                                   noise_name='clean')
             print(str(trX1.shape), str(trY1.shape))
             trX2, trY2 = load_specific_noisy_data(data_path,
                                                   is_training='TRAIN',
                                                   mode=mode,
                                                   noise_name='white_noise')
             print(str(trX2.shape), str(trY2.shape))
             trX3, trY3 = load_specific_noisy_data(data_path,
                                                   is_training='TRAIN',
                                                   mode=mode,
                                                   noise_name='pink_noise')
             print(str(trX3.shape), str(trY3.shape))
             trX = np.concatenate((trX1, trX2, trX3), axis=0)
             trY = np.concatenate((trY1, trY2, trY3), axis=0)
         else:
             raise ValueError('should set "--train_with" value')
         if test_with == 'clean':
             vaX, vaY = load_specific_noisy_data(data_path,
                                                 is_training='VALID',
                                                 mode=mode,
                                                 noise_name='clean')
         elif test_with == 'noisy':
             vaX, vaY = load_random_noisy_data(data_path, 'VALID', mode)
         else:
             raise ValueError('should set "--test_with" value')
         trX = np.expand_dims(trX[:, :, :, 1], axis=3)
         vaX = np.expand_dims(vaX[:, :, :, 1], axis=3)
         print(str(trX.shape), str(trY.shape), str(vaX.shape),
               str(vaY.shape))
         return (trX, trY, vaX, vaY)
     elif is_training == 'TEST':
         teX, teY = load_specific_noisy_data(data_path,
                                             is_training='TEST',
                                             mode=mode,
                                             noise_name='clean')
         teX = np.expand_dims(teX[:, :, :, 1], axis=3)
         print(str(teX.shape), str(teY.shape))
         return (teX, teY)  #shape [sample, 120(f), 99?(t)], [sample,]
Example #3
0
            raise ValueError('No weights are provided.')
        else:
            test_result = save_path + '/test_result.csv'
            fd_test_result = open(test_result,'a')
            fd_test_result.write('test on epoch '+str(args.keep)+'SNR'+str(args.SNR)+' dimension:'+str(args.dimension)+'\n')
            fd_test_result.write('test_mode,label30_acc,label21_acc\n')
            # clean test
            print('*'*30 + 'clean exp' + '*'*30)    
            label30_acc, label21_acc = test(multi_model, data=data,args=args)
            fd_test_result.write('clean,'+str(label30_acc)+','+str(label21_acc)+'\n')
            fd_test_result.flush()
            for i in range(6):
                print('*'*30 + 'Noisy '+ str(i+2) +' exp' + '*'*30)    
                if args.test_by=='noise':teX, teY = load_specific_noisy_data(args.data_path, 'TEST', args.mode, args.feature_len, noise_list[i]);cprint(noise_list[i],'red')
                elif args.test_by=='echo':teX, teY = load_specific_noisy_data(args.data_path, 'TEST', args.mode, args.feature_len, 'echo');cprint('echo','red')
                else: teX, teY = load_random_noisy_data(args.data_path,'TEST',args.mode, args.feature_len, SNR=args.SNR)
                teX = Dimension(teX,args.dimension)#teX = np.expand_dims(teX[:,:,:,1],axis=3)
                data = (teX, teY)
                label30_acc, label21_acc = test(multi_model, data=data,args=args)
                fd_test_result.write('noisy'+str(i)+','+str(label30_acc)+','+str(label21_acc)+'\n')
                fd_test_result.flush()
            fd_test_result.close()
    else:
        raise ValueError('Wrong is_training value')#'could not find %c in %s' % (ch,str)) 
# Code end
# For not decreasing issue: https://github.com/XifengGuo/CapsNet-Keras/issues/48




Example #4
0
        else:
            test_result = save_path + '/test_result.csv'
            fd_test_result = open(test_result, 'a')
            fd_test_result.write('test on epoch ' + str(args.keep) + 'SNR' +
                                 str(args.SNR) + '\n')
            fd_test_result.write('test_mode,label30_acc,label21_acc\n')
            # clean test
            print('*' * 30 + 'clean exp' + '*' * 30)
            label30_acc, label21_acc = test(multi_model, data=data, args=args)
            fd_test_result.write('clean,' + str(label30_acc) + ',' +
                                 str(label21_acc) + '\n')
            fd_test_result.flush()
            for i in range(5):
                print('*' * 30 + 'Noisy ' + str(i + 2) + ' exp' + '*' * 30)
                teX, teY = load_random_noisy_data(args.data_path,
                                                  'TEST',
                                                  args.mode,
                                                  SNR=args.SNR)
                teX = np.expand_dims(teX[:, :, :, 1], axis=3)
                data = (teX, teY)
                label30_acc, label21_acc = test(multi_model,
                                                data=data,
                                                args=args)
                fd_test_result.write('noisy' + str(i) + ',' +
                                     str(label30_acc) + ',' +
                                     str(label21_acc) + '\n')
                fd_test_result.flush()
            fd_test_result.close()
    else:
        raise ValueError(
            'Wrong is_training value')  #'could not find %c in %s' % (ch,str))
# Code end
Example #5
0
                            teX = du.Dimension(teX, args.dimension)
                            acc = test(multi_model,
                                       data=(teX, teY),
                                       args=args,
                                       matrix_name='/ConfusionMatrix_' +
                                       args.ex_name + '_noisy' + str(snr),
                                       label_names=label_names)
                            # csv write
                            fd_test_result.write('noisy' + str(snr) + ',' +
                                                 str(acc) + '\n')

                            cprint('Test with Clean + SNR' + str(snr), 'red')
                            teX, teY = du.load_random_noisy_data(
                                args.data_path,
                                'TEST',
                                args.mode,
                                args.feature_len,
                                SNR=snr,
                                open_set=args.open_set,
                                open_labels=open_labels)
                            #teX = np.expand_dims(teX[:,:,:,1],axis=3)
                            teX = du.Dimension(teX, args.dimension)
                            acc = test(multi_model,
                                       data=(teX, teY),
                                       args=args,
                                       matrix_name='/ConfusionMatrix_' +
                                       args.ex_name + '_mixed' + str(snr),
                                       label_names=label_names)
                            # csv write
                            fd_test_result.write('mixed' + str(snr) + ',' +
                                                 str(acc) + '\n')
                        except: