# Feature extractor CNN CNN_arch = {'input_dim': wlen, 'fs': fs, 'cnn_N_filt': cnn_N_filt, 'cnn_len_filt': cnn_len_filt, 'cnn_max_pool_len':cnn_max_pool_len, 'cnn_use_laynorm_inp': cnn_use_laynorm_inp, 'cnn_use_batchnorm_inp': cnn_use_batchnorm_inp, 'cnn_use_laynorm':cnn_use_laynorm, 'cnn_use_batchnorm':cnn_use_batchnorm, 'cnn_act': cnn_act, 'cnn_drop':cnn_drop, } CNN_net=CNN(CNN_arch) CNN_net.cuda() # Loading label dictionary lab_dict=np.load(class_dict_file).item() print(CNN_net.out_dim) DNN1_arch = {'input_dim': CNN_net.out_dim, 'fc_lay': fc_lay, 'fc_drop': fc_drop, 'fc_use_batchnorm': fc_use_batchnorm, 'fc_use_laynorm': fc_use_laynorm, 'fc_use_laynorm_inp': fc_use_laynorm_inp, 'fc_use_batchnorm_inp':fc_use_batchnorm_inp, 'fc_act': fc_act, }
'cnn_N_filt': cnn_N_filt, 'cnn_len_filt': cnn_len_filt, 'cnn_max_pool_len': cnn_max_pool_len, 'cnn_use_laynorm_inp': cnn_use_laynorm_inp, 'cnn_use_batchnorm_inp': cnn_use_batchnorm_inp, 'cnn_use_laynorm': cnn_use_laynorm, 'cnn_use_batchnorm': cnn_use_batchnorm, 'cnn_act': cnn_act, 'cnn_drop': cnn_drop, } CNN_net = CNN(CNN_arch) CNN_net_out_dim = CNN_net.out_dim if IS_DATA_PARALLEL: CNN_net = nn.DataParallel(CNN_net, device_ids=DEVICE_IDS) CNN_net.cuda(device) # Loading label dictionary lab_dict = np.load(class_dict_file, allow_pickle=True).item() DNN1_arch = {'input_dim': CNN_net_out_dim, 'fc_lay': fc_lay, 'fc_drop': fc_drop, 'fc_use_batchnorm': fc_use_batchnorm, 'fc_use_laynorm': fc_use_laynorm, 'fc_use_laynorm_inp': fc_use_laynorm_inp, 'fc_use_batchnorm_inp': fc_use_batchnorm_inp, 'fc_act': fc_act, } DNN1_net = MLP(DNN1_arch)