def eval_models(proceed_step): params = get_recursive_params(proceed_step) params = init_save_dirs(params) if not is_initial_params_suitable(params): return if params.fusion_levels and not is_suitable_level_fusion(params): return if params.load_features and not is_cnn_rnn_features_available(params, cnn=0): return if params.data_type != DataTypes.RGBD and not is_cnn_rnn_features_available(params, cnn=1): return logfile_name = params.log_dir + proceed_step + '/' + get_timestamp() + '_' + str(params.trial) + '-' + \ params.net_model + '_' + params.data_type + '_split_' + str(params.split_no) + '.log' init_logger(logfile_name, params) if params.net_model == Models.AlexNet: model = AlexNet(params) elif params.net_model == Models.VGGNet16: model = VGG16Net(params) elif params.net_model == Models.ResNet50 or params.net_model == Models.ResNet101: model = ResNet(params) elif params.net_model == Models.DenseNet121: model = DenseNet(params) else: print('{}{}Unsupported model selection! Please check your model choice in arguments!{}' .format(PrForm.BOLD, PrForm.RED, PrForm.END_FORMAT)) return model.eval()
def extract_fixed_features(): params = get_extraction_params() params = init_save_dirs(params) if not is_initial_params_suitable(params): return logfile_name = params.log_dir + params.proceed_step + '/' + get_timestamp() + '_' + params.net_model + '_' + \ params.data_type + '_cnn_extraction.log' init_logger(logfile_name, params) fixed_extraction(params)
def finetune_model(): params = get_finetune_params() params = init_save_dirs(params) if not is_initial_params_suitable(params): return logfile_name = params.log_dir + params.proceed_step + '/' + get_timestamp() + '_' + str(params.trial) + '-' + \ params.net_model + '_' + params.data_type + '_split_' + str(params.split_no) + '.log' init_logger(logfile_name, params) process_finetuning(params)