"Dataset not registered. Please create a method to read it") db = Dataset(path, dataset, decode=False) print("Dumping " + dataset + " data set to file...") cPickle.dump(db, open(dataset + '_db.p', 'wb')) else: print("Loading data from " + dataset + " data set...") db = cPickle.load(open(dataset + '_db.p', 'rb')) nb_samples = len(db.targets) print("Number of samples: " + str(nb_samples)) if feature_extract: f_global = functions.feature_extract(db.data, nb_samples=nb_samples, dataset=dataset) else: print("Loading features from file...") f_global = cPickle.load(open(dataset + '_features.p', 'rb')) y = np.array(db.targets) y = to_categorical(y, num_classes=globalvars.nb_classes) if speaker_independence: k_folds = len(db.test_sets) splits = zip(db.train_sets, db.test_sets) print("Using speaker independence %s-fold cross validation" % k_folds) else: k_folds = 10 sss = StratifiedShuffleSplit(n_splits=k_folds,
globalvars.dataset = dataset globalvars.nb_classes = nb_classes if load_data: ds = Dataset(path=path, dataset=dataset) print("Writing " + dataset + " data set to file...") cPickle.dump(ds, open(dataset + '_db.p', 'wb')) else: print("Loading data from " + dataset + " data set...") ds = cPickle.load(open(dataset + '_db.p', 'rb')) if feature_extract: functions.feature_extract(ds.data, nb_samples=len(ds.targets), dataset=dataset) try: trials = Trials() best_run, best_model = optim.minimize(model=create_model, data=get_data, algo=tpe.suggest, max_evals=6, trials=trials) U_train, X_train, Y_train, U_test, X_test, Y_test = get_data() best_model_idx = 1 best_score = 0.0 for i in range(1, (globalvars.globalVar + 1)):
parser.add_option('-c', '--nb_classes', dest='nb_classes', type='int', default=7) (options, args) = parser.parse_args(sys.argv) wav_path = options.wav_path load_data = options.load_data feature_extract = options.feature_extract model_path = options.model_path nb_classes = options.nb_classes globalvars.nb_classes = nb_classes y, sr = librosa.load(wav_path, sr=16000) wav = AudioSegment.from_file(wav_path) if feature_extract: f = functions.feature_extract((y, sr), nb_samples=1, dataset='prediction') else: print("Loading features from file...") f = cPickle.load(open('prediction_features.p', 'rb')) u = np.full((f.shape[0], globalvars.nb_attention_param), globalvars.attention_init_value, dtype=np.float64) # load model model = load_model(model_path) # prediction results = model.predict([u, f], batch_size=128, verbose=1) for result in results: print(result)