Example #1
0
def main_realtime_traj_dict():
    #slices = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/traj_org_by_ID_10fps.txt')

    skeleton_data_in_tasks_and_time_slices = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/skeleton_data_in_tasks_time_slices_30fps.txt')#'C:/Users/dario.dotti/Desktop/data_recordings_master/master_skeleton_data_in_tasks_time_slices_30fps.txt')

    ### Staatistics on data #####
    # matrix_dist = []
    # for participant in skeleton_data_in_tasks_and_time_slices:
    #
    #     max_dist,max_step = get_distances_between_points(participant)
    #
    #     matrix_dist.append(max_dist)
    #
    # matrix_dist = np.array(matrix_dist).reshape(-1,1)
    # hs.determine_number_k_kMeans(matrix_dist)
    # visualize_cluster(matrix_dist)


    max_dist = 140
    max_step = 15

    final_matrix = []
    final_orig_points = []
    final_matrix_realcoord =[]
    for participant in skeleton_data_in_tasks_and_time_slices:
def main_laban_posture_RAW():
    ## load features from determined tasks
    # features_participants_orig_1 = data_organizer.load_matrix_pickle(
    #     'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/experiment_upperBody_pathPlanning/RAWpostureUpperBody_path_features_2sec_skeletonF_task45.txt')#'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/experiment_upperBody_pathPlanning/pca_RAWpostureUpperBody_path_features.txt')
    # features_participants_orig_2 = data_organizer.load_matrix_pickle(
    #     'C:/Users/dario.dotti/Desktop/data_recordings_master/data_personality/RAWpostureUpperBody_path_features_master_2sec_skeletonF_task45.txt') #'C:/Users/dario.dotti/Desktop/data_recordings_master/data_personality/pca_RAWpostureUpperBody_path_features_master.txt')
    # features_participants_orig = features_participants_orig_1 + features_participants_orig_2
    # del features_participants_orig[44]
    ## load features from all tasks
    features_participants_orig = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/experiment_upperBody_pathPlanning/RAWpostureUpperBody_path_features_2sec_skeletonF_task0123.txt')

    ##

    ## separate feature vector again to see what is more informative ##
    features_participants_3 = np.concatenate(features_participants_orig,axis=0)
    #hot_keyA = features_participants_3[:, :30]
    sk_f = features_participants_3[:, 30:72]
    #posture = features_participants_3[:,72:]
    #print posture.shape
    # #
    # start = 0
    # f_participant = []
    # for i_p in range(0, len(features_participants_orig)):
    #     f_participant.append(posture[start:(start + len(features_participants_orig[i_p]))])
    #     start += len(features_participants_orig[i_p])

    ##

    ## visualize distribution of skeleton features ##
    labels_angles = dist_skeleton_angles_features(sk_f)
def create_cluster_labels_participant_task(raw_features, AE_weights_level_2):

    cluster_model = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/head_joint_id1/10_cluster_model_layer2_new.txt'
    )

    final_labels = []
    for participant in raw_features:
        participant_label = []

        for task in participant:
            task_label = []

            for f_vector in task:
                hd = np.dot(f_vector, AE_weights_level_2[0][0])
                act = sigmoid_function(hd + AE_weights_level_2[1])

                label = cluster_model.predict(act.reshape((1, -1)))
                task_label.append(label[0])

            participant_label.append(task_label)

        final_labels.append(participant_label)

    data_organizer.save_matrix_pickle(
        final_labels,
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/head_joint_id1/10clusters_labels_l2_participants_tasks_new.txt'
    )
Example #4
0
def experiment_as():

    as_matrix_6_tasks = np.array(
        data_organizer.load_matrix_pickle(
            'C:/Users/dario.dotti/Documents/bow_experiment_data/AS_activation_6_labels.txt'
        )).tolist()
    as_matrix_5_tasks_transformed = np.array(
        data_organizer.load_matrix_pickle(
            'C:/Users/dario.dotti/Documents/bow_experiment_data/AS_activation_5_labels_transformed.txt'
        )).tolist()

    as_matrix = np.vstack((as_matrix_6_tasks, as_matrix_5_tasks_transformed))
    #as_matrix = np.array(as_matrix_6_tasks)

    #as_classification_experiment(as_matrix)

    print as_matrix.shape
    return as_matrix
def main_demo_pecs():
    ##get raw data for displaying
    task_skeleton_data = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/pecs_data_review/skeletons_repetitive_behavior_02082017.txt'
    )

    ##HOT features
    HOT_16_subject_6_tasks = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/pecs_data_review/HOT_repetitive_behavior_02082017.txt'
    )

    ##BOW computed on HOT
    # bow_data = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/bow_experiment_data/test_PECS/BOW_3_kmeans_16subject_2sec_without_outlier.txt')
    # labels_bow_data = data_organizer.load_matrix_pickle(
    #     'C:/Users/dario.dotti/Documents/bow_experiment_data/test_PECS/BOW_3_kmeans_labels_16subject_2sec_without_outlier.txt')
    bow_data = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/bow_experiment_data/BOW_30_kmeans_16subject_2sec.txt'
    )
    labels_bow_data = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/bow_experiment_data/BOW_30_kmeans_labels_16subject_2sec.txt'
    )

    lr = LogisticRegression()
    lr.fit(bow_data, np.ravel(labels_bow_data))

    ##cluster data
    # cluster_model = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/bow_experiment_data/test_PECS/cl_30_kmeans_model_2secWindow_without_outliers.txt')
    # labels_cluster = data_organizer.load_matrix_pickle(
    #     'C:/Users/dario.dotti/Documents/bow_experiment_data/test_PECS/cluster_3_kmeans_word__without_outliers.txt')
    cluster_model = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/bow_experiment_data/cl_30_kmeans_model_2secWindow_newVersion.txt'
    )
    labels_cluster = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/bow_experiment_data/cluster_30_kmeans_word_newVersion.txt'
    )
    key_labels = map(lambda x: x[0], labels_cluster)

    ##shared variable between threads
    q = multiprocessing.Queue()
    current_time_shared = multiprocessing.Queue()

    ##launch different processes in the same time
    display_joints_traj = multiprocessing.Process(
        target=draw_joints_and_tracks,
        args=(task_skeleton_data, current_time_shared))
    display_confidence_classifier = multiprocessing.Process(
        target=plot_classifier_confidence,
        args=(HOT_16_subject_6_tasks, cluster_model, key_labels, lr, q))

    ##Start threads
    display_joints_traj.start()
    display_confidence_classifier.start()

    ##call plot initializer
    basic_plot()
    update_figures_in_threads(q)

    ##launch main window loop
    window.geometry('800x700')
    window.mainloop()
def main_laban_posture_ID():
    features_participants_orig = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/experiment_upperBody_pathPlanning/IDpostureUpperBody_path_features.txt')

    #features_participants = np.concatenate(features_participants_orig, axis=0)
    #n_minute = features_participants[:,:30]
    #posture_l = features_participants[:,30:]

    ## compare the  first n minute ##
    n_minute = [np.array(p[:20]).reshape((1,-1)) for p in features_participants_orig]
    n_minute = np.concatenate(n_minute,axis=0)
    n_minute_similarity = np.concatenate([cdist(p[:20].reshape((1,-1)), n_minute, 'cosine') for p in features_participants_orig],axis=0)
    ##


    Z = hierarchy.linkage(n_minute, method='average', metric='cosine')
    #check if metric preserve original distance
    c, coph_dists = cophenet(Z, pdist(n_minute))
    #print c
    y_tr = hierarchy.fcluster(Z, 0.2, criterion="distance")
Example #7
0
def visualize_cluster_pred():
    with open('C:/Users/dario.dotti/Documents/content_6labels.txt', 'r') as f:
        images_name = f.read().split('\n')

    pred = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/cl_prediction_2secWindow_band03.txt')

    class_counter = Counter(pred)
    print class_counter

    for k in class_counter.keys()[:20]:
        print k
        index = np.where(pred == k)[0]
        for i in index[:20]:
            path = 'C:/Users/dario.dotti/Documents/time_windows_HOT/' + images_name[
                i].split(' ')[3]
            img = cv2.imread(path)

            cv2.imshow('ciao', img)
            cv2.waitKey(0)
Example #8
0
def main_pecs_data():
    ##get raw data for displaying
    task_skeleton_data = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/pecs_data_review/skeletons_repetitive_behavior_08082017.txt'
    )
    ##if data contains multiple skeletons here I sort them cronologically
    sort_skeletons(task_skeleton_data)

    my_room = np.zeros((424, 512, 3), dtype=np.uint8)
    my_room += 255
    list_poly = img_processing.divide_image(my_room)

    #draw_joints_and_tracks(task_skeleton_data,my_room)
    #return 0

    skeleton_data_in_time_slices = org_data_in_timeIntervals(
        task_skeleton_data, [0, 0, 2])

    HOT_data, patient_ID = histograms_of_oriented_trajectories(
        list_poly, skeleton_data_in_time_slices)
    data_organizer.save_matrix_pickle(
        HOT_data,
        'C:/Users/dario.dotti/Documents/pecs_data_review/HOT_repetitive_behavior_08082017.txt'
    )
Example #9
0
                ##down
                feature_img[int(p[1]), int(p[0]) + 1] = 0.99
                feature_img[int(p[1]), int(p[0]) + 2] = 0.99


        # cv2.imshow('feature_img',feature_img)
        # cv2.waitKey(0)

        if len(imgs) > 0: imgs = np.vstack((imgs, feature_img.reshape((1, -1))))
        else: imgs = feature_img.reshape((1, -1))


    return imgs


AE_weights_level_1 = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/deep_AE_900_225_weights_008hd1_noNoise.txt')
hd_weights = AE_weights_level_1[0][0]
bias_1_level1 = AE_weights_level_1[1]

#pca = decomposition.PCA(n_components=100)  # 2-dimensional PCA whiten=True, svd_solver='randomized'
#pca = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/clustering_posture_5sec/100pca_deep900225AE_5sec_data.txt')
#cluster_model = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/clustering_posture_5sec/linearSVM_agglomerative15c_5sec_100pca.txt')


def compute_hot_f(xs,ys):
    orientation_intervals = [[range(0, 45)], [range(45, 90)], [range(90, 135)], [range(135, 180)], [range(180, 225)],
                             [range(225, 270)], \
                             [range(270, 315)], [range(315, 360)]]
    magnitude_intervals = [[range(0, 4)], [range(4, 10)], [range(10, 200)]]

    hot_matrix = np.zeros((len(orientation_intervals), len(magnitude_intervals)))
Example #10
0
def experiment_video():

    #video_classification_experiments()
    #as_classification_experiment()

    HOT_matrix_5_tasks = np.array(
        data_organizer.load_matrix_pickle(
            'C:/Users/dario.dotti/Documents/bow_experiment_data/test_PECS/hot_spatial_grid_4x4x3_5_tasks_2secWindow_without_outliers.txt'
        )).tolist()
    HOT_matrix_6_tasks = np.array(
        data_organizer.load_matrix_pickle(
            'C:/Users/dario.dotti/Documents/hot_spatial_grid_4x4x3_6_tasks_2secWindow.txt'
        )
    ).tolist(
    )  # 'C:/Users/dario.dotti/Documents/bow_experiment_data/test_PECS/hot_spatial_grid_4x4x3_6_tasks_2secWindow_without_outliers.txt'
    #HOT_matrix_6_tasks = np.array(data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/bow_experiment_data/hot_spatial_grid_4x4x3_6_tasks_2secWindow.txt')).tolist()
    #HOT_matrix_5_tasks = np.array(data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/bow_experiment_data/hot_spatial_grid_4x4x3_5_tasks_2secWindow.txt')).tolist()

    length_task3 = [45, 51, 33, 51, 62]

    ##modify 5 tasks to make it 6 tasks and merge the two matrices
    for n_subj, subject in enumerate(HOT_matrix_5_tasks):

        new_subject = []

        for n_task, task in enumerate(subject):

            if n_task == 3:

                new_subject.append(task[:length_task3[n_subj]])

                new_subject.append(task[length_task3[n_subj]:])
            else:
                new_subject.append(task)
        HOT_matrix_6_tasks.append(new_subject)

    #data_organizer.save_matrix_pickle(HOT_matrix_6_tasks,'C:/Users/dario.dotti/Documents/bow_experiment_data/final_HOT_matrix_6_tasks.txt')

    ##transform matrix for clustering
    HOT_matrix_for_cluster = []

    for s in xrange(0, len(HOT_matrix_6_tasks)):
        for t in xrange(0, len(HOT_matrix_6_tasks[s])):

            for time_slice in xrange(0, len(HOT_matrix_6_tasks[s][t])):

                if len(HOT_matrix_for_cluster) > 0:
                    HOT_matrix_for_cluster = np.vstack(
                        (HOT_matrix_for_cluster,
                         HOT_matrix_6_tasks[s][t][time_slice]))
                else:
                    HOT_matrix_for_cluster = HOT_matrix_6_tasks[s][t][
                        time_slice]

    print np.array(HOT_matrix_for_cluster).shape

    # #Clustering Meanshift
    ##video_clustering_fit(concatenated_matrix,'C:/Users/dario.dotti/Documents/cl_model_2secWindow_band03.txt')

    #cluster_model = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/bow_experiment_data/cl_model_2secWindow_band03.txt')
    #labels = cluster_model.predict(HOT_matrix_for_cluster)

    # #Clustering KMeans
    # determine_number_k_kMeans(HOT_matrix_for_cluster)
    # cluster_model = KMeans(n_clusters=30, n_jobs=-1)
    # cluster_model.fit(HOT_matrix_for_cluster)
    # data_organizer.save_matrix_pickle(cluster_model,
    #                                   'C:/Users/dario.dotti/Documents/bow_experiment_data/cl_30_kmeans_model_2secWindow_newVersion.txt')
    # data_organizer.save_matrix_pickle(cluster_model,
    #                                   'C:/Users/dario.dotti/Documents/cl_30_kmeans_model_2secWindow_newVersion.txt')

    cluster_model = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/bow_experiment_data/test_PECS/cl_30_kmeans_model_2secWindow_without_outliers.txt'
    )
    labels = cluster_model.predict(HOT_matrix_for_cluster)

    labels_counter = Counter(labels).most_common(30)
    #data_organizer.save_matrix_pickle(labels_counter,'C:/Users/dario.dotti/Documents/cluster_3_kmeans_word__without_outliers.txt')

    matrix_training, labels, tasks_dict = test_hist_task(
        cluster_model, labels_counter, HOT_matrix_6_tasks)

    return matrix_training, labels, tasks_dict
Example #11
0
=======
from sklearn.metrics import classification_report,precision_recall_fscore_support,accuracy_score
from sklearn.cluster import KMeans
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split
from pomegranate import *

>>>>>>> 9348384985d2847c272133ff77ce6181ca1fa082

import data_organizer
import img_processing
import AE_rec



feature_p_1 = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Desktop/Hier_AE_deliverable/head_joint_id1/feature_matrix_participant_task_l2_new_realCoordinates.txt')
<<<<<<< HEAD
feature_p_2 =data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Desktop/Hier_AE_deliverable/head_joint_id1/feature_matrix_participant_master_task_l2_new_realCoordinates.txt')


feature_p = feature_p_1[:19]+feature_p_2[:27]
real_coord = feature_p_1[19:]+feature_p_2[27:]
list_poly = img_processing.divide_image(np.zeros((414,512),dtype=np.uint8))


space_features = []
for i_p in xrange(0,len(real_coord)):
    for i_task in xrange(0,len(real_coord[i_p])):
        for i_slice in  xrange(0,len(real_coord[i_p][i_task])):
            votes = np.zeros((16, 3))
def main_laban_posture_ID():
    cl_model_posture = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_2_data_recording/posture/kmeans_50_posture_allTasks.txt')

    features_participants_orig = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/experiment_upperBody_pathPlanning/RAWpostureUpperBody_path_features_2sec_skeletonF_ALLTASKS.txt')  # 'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/experiment_upperBody_pathPlanning/pca_RAWpostureUpperBody_path_features.txt')
    # features_participants_orig_2 = data_organizer.load_matrix_pickle(
    #     'C:/Users/dario.dotti/Desktop/data_recordings_master/data_personality/RAWpostureUpperBody_path_features_master_2sec_skeletonF_ALLTASKS.txt')  # 'C:/Users/dario.dotti/Desktop/data_recordings_master/data_personality/pca_RAWpostureUpperBody_path_features_master.txt')
    # features_participants_orig = features_participants_orig_1 + features_participants_orig_2

    l_per_participant = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_2_data_recording/clusters_on_pearson_corr_personality_scores.txt')



    ## separate feature vector again to see what is more informative ##
    features_participants_3 = np.concatenate(features_participants_orig, axis=0)
    # hot_keyA = features_participants_3[:, :30]
    skeleton_angles = features_participants_3[:, 30:72]
    posture = features_participants_3[:, 72:]
    print posture.shape

    ## feature and label vectors with all data ##
    start = 0
    labels_data = np.zeros((posture.shape[0], 1), dtype=int)
    for i_p in xrange(0, len(features_participants_orig)):
        for i in xrange(0, len(features_participants_orig[i_p])):
            labels_data[start + i, 0] = int(l_per_participant[i_p])

        start += len(features_participants_orig[i_p])

    labels_data = labels_data.ravel()
    y_tr = cl_model_posture.predict(posture)


    ## visualize posture distribution ##
    hist = np.zeros((3, 50))
    for pers_label in xrange(1, 4):
        for i_l in xrange(0, len(y_tr)):
            if labels_data[i_l] == pers_label: hist[(pers_label - 1), y_tr[i_l]] += 1
    fig = plt.figure()
    ax = fig.add_subplot(111)

    width = 0.25
    ind = np.arange(0, 50)

    ##normalized heights
    rect1 = plt.bar(ind, hist[0,:]/len(np.where(labels_data==1)[0]), width)
    rect2 = plt.bar(ind+width, hist[1, :]/len(np.where(labels_data==2)[0]), width, color='red')
    rect3 = plt.bar(ind+(width*2), hist[2, :]/len(np.where(labels_data==3)[0]), width, color='green')

    ax.legend((rect1[0], rect2[0], rect3[0]), ('class1', 'class2','class3'), fontsize=11)
    ax.set_xticks(ind + width)
    ax.set_xticklabels(ind)
    plt.show()
    ###

    ## check posture differences using skeleton raw angles ##
    for pers_label in xrange(1,4):
        print pers_label
        angles_per_cl = []
        for i_l in xrange(0,len(y_tr)):
            if labels_data[i_l] == pers_label:
                angles_per_cl.append(skeleton_angles[i_l])

        ## show mean per joints ##
        ## matrix 7 joints x 6 stat of angles
        angles_per_cl = np.array(angles_per_cl).reshape((7*len(angles_per_cl),6))

        for i_joint in xrange(7):
            joint_stat = []
            for i_value in xrange(i_joint,angles_per_cl.shape[0],7):
                joint_stat.append(angles_per_cl[i_value])
            print np.mean(np.array(joint_stat),axis=0)
def main_laban_posture_RAW():
    features_participants_orig_1 = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/experiment_upperBody_pathPlanning/RAWpostureUpperBody_path_features_2sec_skeletonF.txt')#'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/experiment_upperBody_pathPlanning/pca_RAWpostureUpperBody_path_features.txt')
    features_participants_orig_2 = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Desktop/data_recordings_master/data_personality/RAWpostureUpperBody_path_features_master_2sec_skeletonF.txt') #'C:/Users/dario.dotti/Desktop/data_recordings_master/data_personality/pca_RAWpostureUpperBody_path_features_master.txt')
    features_participants_orig = features_participants_orig_1 + features_participants_orig_2

    ##

    ## separate feature vector again to see what is more informative ##
    #features_participants_3 = np.concatenate(features_participants_orig,axis=0)
    # hot = features_participants_3[:, :24]
    # key_areas = features_participants_3[:,24:30]
    #posture = features_participants_3[:,30:130]

    # #
    # start = 0
    # f_participant = []
    # for i_p in range(0, len(features_participants_orig)):
    #     f_participant.append(posture[start:(start + len(features_participants_orig[i_p]))])
    #     start += len(features_participants_orig[i_p])

    ##

    ##concatenate features to form n_minute feature vectors
    # t = 3
    # feature_p_n_minute=[]
    # for p in features_participants_orig:
    #     n_minute_p = []
    #     for n_slice in range(0, len(p) - (t-1), t/2):
    #         n_minute_p.append(p[n_slice:(n_slice + t)].reshape((1,-1)))
    #     feature_p_n_minute.append(np.concatenate(n_minute_p,axis=0))

    feature_p_n_minute = features_participants_orig
    n_minute = np.concatenate(feature_p_n_minute,axis=0)

    #print n_minute.shape

    #pca_on_data(n_minute)

    ### clustering on data ###
    #hs.determine_number_k_kMeans(n_minute)

    #Z = hierarchy.linkage(n_minute, method='average', metric='euclidean')
    # check if metric preserve original distance
    #c, coph_dists = cophenet(Z, pdist(n_minute))
    #print c
    #y_tr = hierarchy.fcluster(Z, 5,criterion="distance") #cosine = 0.5
    #
    # ##print y_tr
    #print Counter(y_tr)
    # data_organizer.save_matrix_pickle(y_tr,'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_2_data_recording/label_features_clustering.txt')


    #y_tr = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_2_data_recording/label_features_clustering.txt')
    ##

    #### classification on data #######
    l_per_participant  = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_2_data_recording/clusters_on_pearson_corr_personality_scores.txt')

    extrovert_label_per_p =[2, 0, 1, 0, 2, 2, 1, 1, 2, 1, 2, 1, 0, 1, 1, 1, 0, 2, 2, 2, 2, 1, 1, 2, 2, 0, 2, 0, 1, 0, 0, 1, 1, 2, 1, 0, 2, 1, 0, 1, 0, 0, 1, 2, 0, 1]
    consc_label_per_p = [2, 2, 1, 2, 2, 2, 2, 1, 0, 2, 2, 0, 1, 0, 1, 2, 1, 0, 2, 0, 1, 0, 2, 1, 2, 1, 0, 1, 0, 2, 0, 2, 2, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 2]
    nevrotic_label_per_p = [0, 2, 1, 1, 1, 2, 2, 1, 0, 0, 2, 2, 2, 1, 2, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 1, 1, 1, 2, 2, 2, 0, 1, 1, 0, 1, 0]


    ## baseline classifications
    raw_feature_classification(n_minute, feature_p_n_minute, l_per_participant)
Example #14
0
def main_demo():

    ##get raw data for displaying
    body_joints = video_traj.xml_parser(
        'C:/Users/dario.dotti/Documents/pilot_abnormal_behavior_indoor/joints/subject7_points.xml'
    )
    ##HOT features organized per subjects and tasks
    HOT_16_subject_6_tasks = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_demo/HOT_matrix_16_subject_6_tasks.txt'
    )

    ##BOW computed on HOT
    bow_data = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_demo/BOW_16subject_2sec.txt')
    labels_bow_data = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_demo/BOW_labels_16subject_2sec.txt'
    )

    lr = LogisticRegression()
    lr.fit(bow_data, np.ravel(labels_bow_data))

    ##cluster data
    cluster_model = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/cl_model_2secWindow_band03.txt')
    labels_cluster = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_demo/cluster_labels.txt')
    labels_cluster_counter = Counter(labels_cluster).most_common(40)
    keys_labels = map(lambda x: x[0], labels_cluster_counter)

    ##load binary data for displaying
    entrance_door_str = ambient_sensors.org_data_ID(
        'C:/Users/dario.dotti/Documents/pilot_abnormal_behavior_indoor/binary/18-10-16_sensors_subject7.txt'
    )['entrance']

    entrance_door = []
    signal_entrance_door = []
    #converting entrance door from string to time
    for i, s in enumerate(entrance_door_str):

        date = s.split(' ')[1]
        time = s.split(' ')[2].split('-')

        entrance_door.append(
            dt.strptime(date + ' ' + time[0] + ':' + time[1] + ':' + time[2],
                        '%y-%m-%d %H:%M:%S'))
        signal_entrance_door.append(s.split(' ')[0])

    subj = 3  #n_subject order: 4,5,6,7,10,11,12,13,14,19,20,15,3,16,17,18
    task = 0  #n_task order confusion: 0,1,2 repetitive:3 house_activity: 4,5

    ##shared variable between threads
    q = multiprocessing.Queue()
    current_time_shared = multiprocessing.Queue()

    ##launch different processes in the same time
    display_joints_traj = multiprocessing.Process(
        target=draw_joints_and_tracks, args=(body_joints, current_time_shared))
    display_confidence_classifier = multiprocessing.Process(
        target=plot_classifier_confidence,
        args=(HOT_16_subject_6_tasks[subj][task], cluster_model, keys_labels,
              lr, q))
    display_ambient_sensor = multiprocessing.Process(
        target=show_binary_sensor,
        args=(entrance_door, signal_entrance_door, q, current_time_shared))

    display_joints_traj.start()
    display_confidence_classifier.start()
    display_ambient_sensor.start()

    ##call plot initializer
    basic_plot()
    update_figures_in_threads(q)

    ##launch main window loop
    window.geometry('800x700')
    window.mainloop()
Example #15
0
import video_traj
import hierarchical_ae_learning_methods as hs
import AE_rec





<<<<<<< HEAD
# AE_weights_level_1 = data_organizer.load_matrix_pickle(
#         'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/ae/head_joint_id1/144weights_l1_hd1002.txt')
#
# cluster_model_l1 = data_organizer.load_matrix_pickle(
#         'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/head_joint_id1/20_cluster_model_layer1.txt')
=======
AE_weights_level_1 = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Desktop/Hier_AE_deliverable/ae/head_joint_id1/144weights_l1_hd1002.txt')

cluster_model_l1 = data_organizer.load_matrix_pickle(
    'C:/Users/dario.dotti/Desktop/Hier_AE_deliverable/40_cluster_model_layer2_new.txt')
>>>>>>> 9348384985d2847c272133ff77ce6181ca1fa082

def encode_features_using_AE_layer1_cluster_activation(feature_matrix,layer):

    ##check visually the reconstruction
    #AE_rec.AE_reconstruction_level1(feature_matrix, AE_weights_level_1)


    for test_traj in feature_matrix:
        if sum(test_traj) != 0:

            ##compute AE reconstruction
def main():

    ###### Layer 1 #########
    #raw_features = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/posture_shoulder_arms_3fps.txt')
    AE_weights_level_1 = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Desktop/Hier_AE_deliverable/ae/head_joint_id1/144weights_l1_hd1002.txt'
    )  #'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/deep_AE_900_225_weights_008hd1_noNoise.txt')#625_weights_008hd1

    # ############
    #
    #data_new = [f for part in raw_features for task in part for f in task]
    #data_new= []
    #map(lambda part: map(lambda task: data_new.append(task), part),raw_features)

    ####Visually check weights
    plot_layers_l1(AE_weights_level_1[0], tied_weights=True)
    plt.show()

    #plt.show()
    # ##Visually check the reconstruction
    #AE_reconstruction_level1(data_new,AE_weights_level_1)
    #
    # ##create activation matrix
    #matrix_activations_data_l1 = hid_unit_activation_allLayers(data_new, AE_weights_level_1)

    #data_organizer.save_matrix_pickle(matrix_activations_data_l1,'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/matrix_act_400_weights.txt')
    #matrix_activations_data_l1 = data_organizer.load_matrix_pickle('C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/matrix_act_625_weights_6fps.txt')
    matrix_activations_data_l1 = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/posture_data/upperBody/matrix_deep_act_225_weights.txt'
    )

    # ####Clustering on the activation matrix
    cluster_activation_allLayers(matrix_activations_data_l1,
                                 AE_weights_level_1, [],
                                 n_layer=1)
    return 0
    # ################

    #### Layer 2 temporal reconstruction####

    raw_features = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/head_joint_id1/feature_matrix_participant_task_l2_new.txt'
    )
    AE_weights_level_1 = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/ae/head_joint_id1/144weights_l1_hd1002.txt'
    )
    original_points_grid_level_2 = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/head_joint_id1/orig_points_participant_task_l2_new.txt'
    )
    AE_weights_level_2 = data_organizer.load_matrix_pickle(
        'C:/Users/dario.dotti/Documents/data_for_personality_exp/after_data_cleaning/ae/head_joint_id1/169weights_l2_001_new.txt'
    )

    data_new = [f for part in raw_features for task in part for f in task]

    ## Visualize weights layer 2 temporal##
    # AE_showWeights_level2_temporalExperiment(AE_weights_level_2, AE_weights_level_1)
    # original_points_grid_level_2_new = [f for part in original_points_grid_level_2 for task in part for f in task]
    # reconstruction_AE_weights_level2_temporal(data_new, original_points_grid_level_2_new, AE_weights_level_1, AE_weights_level_2)

    #### Get activations and cluster ####
    #matrix_activations_data_l2 = hid_unit_activation_allLayers(data_new,AE_weights_level_2)
    #cluster_activation_allLayers(matrix_activations_data_l2, AE_weights_level_1, AE_weights_level_2,n_layer=2 )

    #####

    create_cluster_labels_participant_task(raw_features, AE_weights_level_2)