def validate(data_type, model, seq_length=60, saved_model=None,
             class_limit=None, image_shape=None):
    batch_size = 16

    # Get the data and process it.
    if image_shape is None:
        data = DataSet(
            seq_length=seq_length,
            class_limit=class_limit
        )
    else:
        data = DataSet(
            seq_length=seq_length,
            class_limit=class_limit,
            image_shape=image_shape
        )

    val_generator = data.frame_generator(batch_size, 'test', data_type)

    # Get the model.
    rm = ResearchModels(len(data.classes), model, seq_length, saved_model)

    # Evaluate!
    results = rm.model.evaluate_generator(
        generator=val_generator,
        val_samples=15,
        use_multiprocessing=True,
        workers=1)

    print(results)
    print(rm.model.metrics_names)
    
    
        #val_generator = data.frame_generator(1,'test', data_type)
    # Get the model.
    rm = ResearchModels(len(data.classes), model, seq_length, saved_model)
    # Evaluate!
    scores=np.zeros([14])
    total=np.zeros([14])

    val_trues=[]
    val_preds=[]
    for X,y in data.gen_test('test', data_type): 
        results = rm.model.predict(X)
        predicted=np.argmax(results, axis=-1)
        idx=np.where(np.array(y)==1)
        true_label=idx[1]
        print(true_label)

        total[true_label]=total[true_label]+1
        
        print(len(predicted))
        print(len(true_label))
        if predicted[0]==true_label[0]:
            scores[true_label]=scores[true_label]+1
        
        
        

    
    #val_preds = np.argmax(results, axis=-1)
    
 
    print('Confusion Matrix')
    tn, fp, fn, tp =confusion_matrix(true_label, predicted).ravel()
    print("\n *** Confusion matrix**")
    print(tp)
    print(tn)
    print(fp)
    print(fn)
    print('\n****************')
    
    print(classification_report(true_label, predicted))
    print(scores)
    print('\n****************')
    print(total)