コード例 #1
0
ファイル: correlate_signals.py プロジェクト: sunh20/pyESig2
                sound_corr_temp == np.nanmax(sound_corr_temp))[0][0]
        else:
            mvmt_corr_temp = copy.deepcopy(mvmt_corr)
            mvmt_corr_temp[sound_channel] = 0
            mvmt_channel = np.where(
                mvmt_corr_temp == np.nanmax(mvmt_corr_temp))[0][0]
    average = np.mean(np.vstack([mvmt_corr, sound_corr]), axis=0)

    rest_channel = np.where(average == np.nanmin(average))[0][0]

    # plt.plot(sound_sections, label="actual sound")
    # plt.plot(mvmt_sections, label="actual mvmt")
    # plt.plot(cluster_sections[:,mvmt_channel], label="cluster mvmt")
    # plt.plot(cluster_sections[:,sound_channel], label="cluster sound")
    # plt.plot(cluster_sections[:,rest_channel], label="cluster rest")
    # plt.legend()
    # plt.show()
    #
    # pdb.set_trace()
    print str(mvmt_channel) + str(mvmt_corr[mvmt_channel])
    print str(sound_channel) + str(sound_corr[sound_channel])
    print str(rest_channel) + str(average[rest_channel])
    return {'Mvmt': mvmt_channel, 'Sound': sound_channel, 'Rest': rest_channel}


if __name__ == "__main__":
    if not (len(sys.argv) == 4):
        raise varError("Arguments should be <Sound File> <Movement File>\
                         <Cluster Result File>")
    correlate(*sys.argv[1:])
コード例 #2
0
                    else:
                        cluster_centers.append([])
                pickle.dump(cluster_centers, open(save_loc + "\\" + sbj_id + "_" +
                                                  str(date) + "_" + str(level) + "_centers.p", "wb"))



            # Plot Cluster Figures
            plt.figure(figsize=(20,10))
            for l in xrange(5):
                condensed = condense_time(cluster_result[l, :], l, 16)
                index = np.argsort(condensed[:,:].sum(axis=0))

                # for c in index[-min(2**(l+1),5):]:
                #      plt.plot(np.arange(0,condensed.shape[0]*10,10),condensed[:,c])
                #
                # plt.xlabel("Time(Minute)")
                # plt.ylabel("Cluster Count")
                # #plt.xticks(np.arange(0,condensed.shape[0]*10,10))
                # plt.show()
                pickle.dump(condensed, open(save_loc + "\\" + sbj_id + "_" + str(date) + "_" + str(l) + ".p", "wb"))
            pickle.dump(cluster_result, open(save_loc + "\\" + sbj_id + "_" + str(date) + ".p", "wb"))


if __name__ == "__main__":
    if not (len(sys.argv) == 5):
        raise varError("Arguments should be <Subject ID> <Dates> <Features directory>\
                          <Save directory>")
    sys.argv[2] = sys.argv[2].split()
    hier_cluster_main(*sys.argv[1:])
コード例 #3
0
ファイル: extract_labels.py プロジェクト: Frikster/pyESig2
        "Speaking", "Multiple_people", "Sleeping", "Eating",
        "Listening.Watching_Media", "Listening.Listening_to_family_member",
        "Listening.Listening_to_staff", "Rest"
    ]
    tracks_reduced = ["Mvmt", "Sound", "Rest", "Other"]
    for i in xrange(800):
        file_num = str(i).zfill(4)
        sbj_indexes = np.where(
            np.array(labels.filename) == file_num + '-' + labeller)[0]

        if sbj_indexes.shape[0] > 0:

            vid_length = getLength(vid_folder + "\\" + sbj_id + "_" +
                                   str(day) + "\\" + sbj_id + "_" + str(day) +
                                   "_" + file_num + ".avi")
            result = convert_reduced_labels_to_array(labels, sbj_indexes,
                                                     vid_length, tracks,
                                                     tracks_reduced)
            pickle.dump(
                result,
                open(
                    dst_folder + "\\" + sbj_id + "_" + str(day) + "_" +
                    file_num + "_" + labeller + ".p", "wb"))


if __name__ == "__main__":
    if not (len(sys.argv) == 7):
        raise varError("Arguments should be <Subject ID> <Day><Label File>\
                         <Video directory> <Save directory>")
    extract_reduced_labels(*sys.argv[1:])
コード例 #4
0
ファイル: correlate_signals.py プロジェクト: Frikster/pyESig2
            sound_corr_temp[mvmt_channel]=0
            sound_channel = np.where(sound_corr_temp==np.nanmax(sound_corr_temp))[0][0]
        else:
            mvmt_corr_temp = copy.deepcopy(mvmt_corr)
            mvmt_corr_temp[sound_channel]=0
            mvmt_channel = np.where(mvmt_corr_temp==np.nanmax(mvmt_corr_temp))[0][0]
    average = np.mean(np.vstack([mvmt_corr, sound_corr]), axis=0)

    rest_channel = np.where(average==np.nanmin(average))[0][0]

    # plt.plot(sound_sections, label="actual sound")
    # plt.plot(mvmt_sections, label="actual mvmt")
    # plt.plot(cluster_sections[:,mvmt_channel], label="cluster mvmt")
    # plt.plot(cluster_sections[:,sound_channel], label="cluster sound")
    # plt.plot(cluster_sections[:,rest_channel], label="cluster rest")
    # plt.legend()
    # plt.show()
    #
    # pdb.set_trace()
    print str(mvmt_channel) + str(mvmt_corr[mvmt_channel])
    print str(sound_channel) + str(sound_corr[sound_channel])
    print str(rest_channel) + str(average[rest_channel])
    return {'Mvmt':mvmt_channel,'Sound':
           sound_channel,'Rest':rest_channel}

if __name__ == "__main__":
    if not(len(sys.argv) == 4):
        raise varError("Arguments should be <Sound File> <Movement File>\
                         <Cluster Result File>")
    correlate(*sys.argv[1:])
コード例 #5
0
ファイル: extract_labels.py プロジェクト: Frikster/pyESig2
def extract_labeller_reduced_labels(sbj_id, day, src_folder, vid_folder, dst_folder, labeller):

    labels=pd.read_csv(src_folder + sbj_id + "_" + str(day) + ".txt", sep=':', dtype=str)

    tracks = ["Laughing", "Movement.Head", "Movement.Other",
              "Movement.arm", "Speaking", "Multiple_people",
              "Sleeping","Eating", "Listening.Watching_Media",
              "Listening.Listening_to_family_member",
              "Listening.Listening_to_staff", "Rest"]
    tracks_reduced = ["Mvmt", "Sound", "Rest", "Other"]
    for i in xrange(800):
        file_num = str(i).zfill(4)
        sbj_indexes = np.where(np.array(labels.filename)== file_num + '-' + labeller)[0]

        if sbj_indexes.shape[0]>0:

            vid_length = getLength(vid_folder + "\\"  + sbj_id
                                          + "_" + str(day) + "\\" + sbj_id
                                          + "_" + str(day) + "_" + file_num + ".avi")
            result = convert_reduced_labels_to_array(labels,sbj_indexes, vid_length, tracks, tracks_reduced)
            pickle.dump(result, open(dst_folder +  "\\" + sbj_id +"_" +str(day) +
                                     "_" + file_num + "_" + labeller + ".p", "wb"))


if __name__ == "__main__":
    if not(len(sys.argv) == 7):
        raise varError("Arguments should be <Subject ID> <Day><Label File>\
                         <Video directory> <Save directory>")
    extract_reduced_labels(*sys.argv[1:])