Exemple #1
0
            label.unsqueeze(1).repeat(1, PROPOSAL_NUM).view(-1)).view(
                batch_size, PROPOSAL_NUM)
        raw_loss = creterion(raw_logits, label)
        concat_loss = creterion(concat_logits, label)
        rank_loss = model.ranking_loss(top_n_prob, part_loss)
        partcls_loss = creterion(
            part_logits.view(batch_size * PROPOSAL_NUM, -1),
            label.unsqueeze(1).repeat(1, PROPOSAL_NUM).view(-1))

        total_loss = raw_loss + rank_loss + concat_loss + partcls_loss
        total_loss.backward()
        raw_optimizer.step()
        part_optimizer.step()
        concat_optimizer.step()
        partcls_optimizer.step()
        progress_bar(i, len(trainloader), 'train')

        _, concat_predict = torch.max(concat_logits, 1)
        total += batch_size
        train_correct += torch.sum(concat_predict.data == label.data)

    print(float(train_correct) / total)
    pickle.dump(net, open('./model.pkl', 'wb'))
    if epoch % SAVE_FREQ == 0:  #and epoch > 20:
        train_loss = 0
        train_correct = 0
        total = 0
        net.eval()
        auc_label_lst = []
        auc_pred_lst = []
        people_lst = []
total = 0
net.eval()

for i, data in enumerate(trainloader):
    with torch.no_grad():
        img, label = data[0].cuda(), data[1].cuda()
        batch_size = img.size(0)
        _, concat_logits, _, _, _ = net(img)
        # calculate loss
        concat_loss = creterion(concat_logits, label)
        # calculate accuracy
        _, concat_predict = torch.max(concat_logits, 1)
        total += batch_size
        train_correct += torch.sum(concat_predict.data == label.data)
        train_loss += concat_loss.item() * batch_size
        progress_bar(i, len(trainloader), 'eval on train set')

train_acc = float(train_correct) / total
train_loss = train_loss / total
print(
    'train set loss: {:.3f} and train set acc: {:.3f} total sample: {}'.format(
        train_loss, train_acc, total))

# evaluate on test set
test_loss = 0
test_correct = 0
total = 0
for i, data in enumerate(testloader):
    with torch.no_grad():
        img, label = data[0].cuda(), data[1].cuda()
        batch_size = img.size(0)
Exemple #3
0
            partcls_optimizer.step()
            #progress_bar(i, len(trainloader), 'train')

            # calculate accuracy
            if phase == 'train':
                _, concat_predict = torch.max(concat_logits, 1)
                total += batch_size
                train_correct += torch.sum(concat_predict.data == label.data)
                train_loss += concat_loss.item() * batch_size
            else:
                _, concat_predict = torch.max(concat_logits, 1)
                total += batch_size
                test_correct += torch.sum(concat_predict.data == label.data)
                test_loss += concat_loss.item() * batch_size

            progress_bar(i, len(image_dataloader[phase]),
                         'eval {} set'.format(phase))

        if phase == 'train':
            train_acc = float(train_correct) / total
            train_loss = train_loss / total
        else:
            test_acc = float(test_correct) / total
            test_loss = test_loss / total

        _print(
            'epoch:{} - {} loss: {:.6f} and train acc: {:.6f} total sample: {}'
            .format(epoch, phase, train_loss, train_acc, total))

        for scheduler in schedulers:
            scheduler.step()
def calculate_fixation_per_aoi(input_file, output_file):
    """
    Calculate Fixation features per AOI.

    :param input_file: Fixation CSV file
    :param output_file: Fixation per AOI file    
    """
    print("Calculating Blinking Features per AOI...")
    pd_dataframe = pd.read_csv(input_file, sep=",", index_col=False)
    # print("Finished loading in \"{}\" file".format(input_file))

    overwrite = "y"
    # Export to csv file
    # If file exists and user does not want to overwrite, do nothing
    # if (os.path.exists(output_file)):
    #    overwrite = input("File \"{}\" exists. Would you like to overwrite? (Y/N): ".format(input_file).replace("\\", "/"))

    if overwrite.lower() == "y":
        userID = 0

        fixation_count = [[0 for i in range(NUMBER_OF_PAGES)]
                          for j in range(NUMBER_OF_AOI_TYPES)]
        valid_pupil_count = [[0 for i in range(NUMBER_OF_PAGES)]
                             for j in range(NUMBER_OF_AOI_TYPES)]

        fixation_duration_total = [[0 for i in range(NUMBER_OF_PAGES)]
                                   for j in range(NUMBER_OF_AOI_TYPES)]
        longest_fixation_duration = [[0 for i in range(NUMBER_OF_PAGES)]
                                     for j in range(NUMBER_OF_AOI_TYPES)]

        saccade_length_total = [[0 for i in range(NUMBER_OF_PAGES)]
                                for j in range(NUMBER_OF_AOI_TYPES)]
        saccade_absolute_angle_total = [[0 for i in range(NUMBER_OF_PAGES)]
                                        for j in range(NUMBER_OF_AOI_TYPES)]
        saccade_relative_angle_total = [[0 for i in range(NUMBER_OF_PAGES)]
                                        for j in range(NUMBER_OF_AOI_TYPES)]

        refixation_total = [[0 for i in range(NUMBER_OF_PAGES)]
                            for j in range(NUMBER_OF_AOI_TYPES)]

        left_pupil_difference_total = [[0 for i in range(NUMBER_OF_PAGES)]
                                       for j in range(NUMBER_OF_AOI_TYPES)]
        right_pupil_difference_total = [[0 for i in range(NUMBER_OF_PAGES)]
                                        for j in range(NUMBER_OF_AOI_TYPES)]

        df = pd.read_csv(input_file)
        new_df = pd.DataFrame()

        num_rows = len(pd_dataframe.index)
        count = 0
        # print("Iterating over every row:")
        starttime = time.time()

        for index, row in df.iterrows():
            userID = row['userID']
            page = row['Page']
            aoi_type = row['AOI_TYPE']
            numbered_aoi = AOI_STRING_TO_AOI_NUMBER[aoi_type]

            #Number of Fixation in the AOI
            fixation_count[numbered_aoi][page] += 1

            #Fixation Duration
            fixation_duration = row['FPOGD']
            fixation_duration_total[numbered_aoi][page] += fixation_duration

            #Longest Fixation Duration
            longest_fixation_duration[numbered_aoi][page] = max(
                longest_fixation_duration[numbered_aoi][page],
                fixation_duration)

            #Saccade Length
            saccade_length = row['Saccade_length']
            saccade_length_total[numbered_aoi][page] += saccade_length

            #Saccade Absolute Angle
            saccade_absolute_angle = row['Saccade_absolute_angle']
            saccade_absolute_angle_total[numbered_aoi][
                page] += saccade_absolute_angle

            #Saccade Relative Length
            saccade_relative_angle = row['Saccade_relative_angle']
            saccade_relative_angle_total[numbered_aoi][
                page] += saccade_relative_angle

            #Refixation
            refixation = row['Is_Refixation']
            refixation_total[numbered_aoi][page] += refixation

            if row['LPMMV'] == 1 and row['RPMMV'] == 1:
                valid_pupil_count[numbered_aoi][page] += 1

                left_pupil_difference = row['Left_Difference']
                left_pupil_difference_total[numbered_aoi][
                    page] += left_pupil_difference

                right_pupil_difference = row['Right_Difference']
                right_pupil_difference_total[numbered_aoi][
                    page] += right_pupil_difference

            curtime = time.time()
            elapsed_time = curtime - starttime
            count += 1
            progress = utils.progress_bar(count, num_rows, elapsed_time)
            print(progress, end="\r")

        print("")

        ID_LIST = []
        AOI_TYPE_LIST = []
        PAGE_LIST = []

        FIXATION_COUNT_LIST = []

        TOTAL_FIXATION_DURATION_LIST = []
        AVERAGE_FIXATION_DURATION_LIST = []
        LONGEST_FIXATION_DURATION_LIST = []

        TOTAL_SACCADE_LENGTH_LIST = []
        AVERAGE_SACCADE_LENGTH_LIST = []

        TOTAL_SACCADE_ABSOLUTE_ANGLE_LIST = []
        AVERAGE_SACCADE_ABSOLUTE_ANGLE_LIST = []

        TOTAL_SACCADE_RELATIVE_ANGLE_LIST = []
        AVERAGE_SACCADE_RELATIVE_ANGLE_LIST = []

        TOTAL_REFIXATION_LIST = []
        REFIXATION_RATIO_LIST = []

        TOTAL_LEFT_EYE_DIFERENCE_LIST = []
        AVERAGE_LEFT_EYE_DIFERENCE_LIST = []

        TOTAL_RIGHT_EYE_DIFERENCE_LIST = []
        AVERAGE_RIGHT_EYE_DIFERENCE_LIST = []

        for i in range(NUMBER_OF_PAGES):
            for j in range(NUMBER_OF_AOI_TYPES):
                ID_LIST.append(userID)
                AOI_TYPE_LIST.append(AOI_NUMBER_TO_AOI_STRING[j])
                PAGE_LIST.append(i)

                FIXATION_COUNT_LIST.append(fixation_count[j][i])

                TOTAL_FIXATION_DURATION_LIST.append(
                    fixation_duration_total[j][i])
                LONGEST_FIXATION_DURATION_LIST.append(
                    longest_fixation_duration[j][i])

                TOTAL_SACCADE_LENGTH_LIST.append(saccade_length_total[j][i])

                TOTAL_SACCADE_ABSOLUTE_ANGLE_LIST.append(
                    saccade_absolute_angle_total[j][i])

                TOTAL_SACCADE_RELATIVE_ANGLE_LIST.append(
                    saccade_relative_angle_total[j][i])

                TOTAL_REFIXATION_LIST.append(refixation_total[j][i])

                if fixation_count[j][i] == 0:
                    AVERAGE_FIXATION_DURATION_LIST.append(0)
                    AVERAGE_SACCADE_LENGTH_LIST.append(0)
                    AVERAGE_SACCADE_ABSOLUTE_ANGLE_LIST.append(0)
                    AVERAGE_SACCADE_RELATIVE_ANGLE_LIST.append(0)
                    REFIXATION_RATIO_LIST.append(0)
                else:
                    AVERAGE_FIXATION_DURATION_LIST.append(
                        fixation_duration_total[j][i] / fixation_count[j][i])
                    AVERAGE_SACCADE_LENGTH_LIST.append(
                        saccade_length_total[j][i] / fixation_count[j][i])
                    AVERAGE_SACCADE_ABSOLUTE_ANGLE_LIST.append(
                        saccade_absolute_angle_total[j][i] /
                        fixation_count[j][i])
                    AVERAGE_SACCADE_RELATIVE_ANGLE_LIST.append(
                        saccade_relative_angle_total[j][i] /
                        fixation_count[j][i])
                    REFIXATION_RATIO_LIST.append(refixation_total[j][i] /
                                                 fixation_count[j][i])

                TOTAL_LEFT_EYE_DIFERENCE_LIST.append(
                    left_pupil_difference_total[j][i])

                TOTAL_RIGHT_EYE_DIFERENCE_LIST.append(
                    left_pupil_difference_total[j][i])

                if valid_pupil_count[j][i] == 0:
                    AVERAGE_LEFT_EYE_DIFERENCE_LIST.append(0)
                    AVERAGE_RIGHT_EYE_DIFERENCE_LIST.append(0)
                else:
                    AVERAGE_LEFT_EYE_DIFERENCE_LIST.append(
                        left_pupil_difference_total[j][i] /
                        valid_pupil_count[j][i])
                    AVERAGE_RIGHT_EYE_DIFERENCE_LIST.append(
                        left_pupil_difference_total[j][i] /
                        valid_pupil_count[j][i])

        fixation_duration_variance = [[0 for i in range(NUMBER_OF_PAGES)]
                                      for j in range(NUMBER_OF_AOI_TYPES)]
        saccade_length_variance = [[0 for i in range(NUMBER_OF_PAGES)]
                                   for j in range(NUMBER_OF_AOI_TYPES)]
        saccade_absolute_angle_variance = [[0 for i in range(NUMBER_OF_PAGES)]
                                           for j in range(NUMBER_OF_AOI_TYPES)]
        saccade_relative_angle_variance = [[0 for i in range(NUMBER_OF_PAGES)]
                                           for j in range(NUMBER_OF_AOI_TYPES)]
        left_pupil_difference_variance = [[0 for i in range(NUMBER_OF_PAGES)]
                                          for j in range(NUMBER_OF_AOI_TYPES)]
        right_pupil_difference_variance = [[0 for i in range(NUMBER_OF_PAGES)]
                                           for j in range(NUMBER_OF_AOI_TYPES)]
        SD_FIXATION_DURATION_LIST = []
        SD_SACCADE_LENGTH_LIST = []
        SD_SACCADE_ABSOLUTE_ANGLE_LIST = []
        SD_SACCADE_RELATIVE_ANGLE_LIST = []
        SD_LEFT_EYE_DIFERENCE_LIST = []
        SD_RIGHT_EYE_DIFERENCE_LIST = []

        count = 0
        for index, row in df.iterrows():
            page = row['Page']
            aoi_type = row['AOI_TYPE']
            numbered_aoi = AOI_STRING_TO_AOI_NUMBER[aoi_type]

            if fixation_count[numbered_aoi][page] == 0:
                fixation_duration_variance[numbered_aoi][page] = 0
                saccade_length_variance[numbered_aoi][page] = 0
                saccade_absolute_angle_variance[numbered_aoi][page] = 0
                saccade_relative_angle_variance[numbered_aoi][page] = 0
            else:
                #Fixation Duration
                fixation_duration = row['FPOGD']
                fixation_duration_average = fixation_duration_total[
                    numbered_aoi][page] / fixation_count[numbered_aoi][page]
                fixation_duration_variance[numbered_aoi][page] += pow(
                    fixation_duration - fixation_duration_average, 2)

                #Saccade Length
                saccade_length = row['Saccade_length']
                saccade_length_average = saccade_length_total[numbered_aoi][
                    page] / fixation_count[numbered_aoi][page]
                saccade_length_variance[numbered_aoi][page] += pow(
                    saccade_length - saccade_length_average, 2)

                #Saccade Absolute Angle
                saccade_absolute_angle = row['Saccade_absolute_angle']
                saccade_absolute_angle_average = saccade_absolute_angle_total[
                    numbered_aoi][page] / fixation_count[numbered_aoi][page]
                saccade_absolute_angle_variance[numbered_aoi][page] += pow(
                    saccade_absolute_angle - saccade_absolute_angle_average, 2)

                #Saccade Relative Length
                saccade_relative_angle = row['Saccade_relative_angle']
                saccade_relative_angle_average = saccade_relative_angle_total[
                    numbered_aoi][page] / fixation_count[numbered_aoi][page]
                saccade_relative_angle_variance[numbered_aoi][page] += pow(
                    saccade_relative_angle - saccade_relative_angle_average, 2)

                if row['LPMMV'] == 1 and row['RPMMV'] == 1:
                    if valid_pupil_count[numbered_aoi][page] == 0:
                        left_pupil_difference_variance[numbered_aoi][page] = 0
                        right_pupil_difference_variance[numbered_aoi][page] = 0
                    else:
                        left_pupil_difference = row['Left_Difference']
                        left_pupil_difference_average = left_pupil_difference_total[
                            numbered_aoi][page] / valid_pupil_count[
                                numbered_aoi][page]
                        left_pupil_difference_variance[numbered_aoi][
                            page] += pow(
                                left_pupil_difference -
                                left_pupil_difference_average, 2)

                        right_pupil_difference = row['Right_Difference']
                        right_pupil_difference_average = right_pupil_difference_total[
                            numbered_aoi][page] / valid_pupil_count[
                                numbered_aoi][page]
                        right_pupil_difference_variance[numbered_aoi][
                            page] += pow(
                                right_pupil_difference -
                                right_pupil_difference_average, 2)

            curtime = time.time()
            elapsed_time = curtime - starttime
            count += 1
            progress = utils.progress_bar(count, num_rows, elapsed_time)
            print(progress, end="\r")

        print("")
        SD_LEFT_EYE_DIFERENCE_LIST = []
        SD_RIGHT_EYE_DIFERENCE_LIST = []
        for i in range(NUMBER_OF_PAGES):
            for j in range(NUMBER_OF_AOI_TYPES):
                if fixation_count[j][i] == 0:
                    SD_FIXATION_DURATION_LIST.append(0)
                    SD_SACCADE_LENGTH_LIST.append(0)
                    SD_SACCADE_ABSOLUTE_ANGLE_LIST.append(0)
                    SD_SACCADE_RELATIVE_ANGLE_LIST.append(0)
                else:
                    SD_FIXATION_DURATION_LIST.append(
                        math.sqrt(fixation_duration_variance[j][i] /
                                  fixation_count[j][i]))
                    SD_SACCADE_LENGTH_LIST.append(
                        math.sqrt(saccade_length_variance[j][i] /
                                  fixation_count[j][i]))
                    SD_SACCADE_ABSOLUTE_ANGLE_LIST.append(
                        math.sqrt(saccade_absolute_angle_variance[j][i] /
                                  fixation_count[j][i]))
                    SD_SACCADE_RELATIVE_ANGLE_LIST.append(
                        math.sqrt(saccade_relative_angle_variance[j][i] /
                                  fixation_count[j][i]))

                if valid_pupil_count[j][i] == 0:
                    SD_LEFT_EYE_DIFERENCE_LIST.append(0)
                    SD_RIGHT_EYE_DIFERENCE_LIST.append(0)
                else:
                    SD_LEFT_EYE_DIFERENCE_LIST.append(
                        math.sqrt(left_pupil_difference_variance[j][i] /
                                  valid_pupil_count[j][i]))
                    SD_RIGHT_EYE_DIFERENCE_LIST.append(
                        math.sqrt(right_pupil_difference_variance[j][i] /
                                  valid_pupil_count[j][i]))

        new_df['userID'] = ID_LIST
        new_df['Page'] = PAGE_LIST
        new_df['AOI_TYPE'] = AOI_TYPE_LIST
        new_df['Total_Fixation_Count'] = FIXATION_COUNT_LIST
        new_df['Total_Fixation_Duration'] = TOTAL_FIXATION_DURATION_LIST
        new_df['Average_Fixation_Duration'] = AVERAGE_FIXATION_DURATION_LIST
        new_df['Fixation_Duration_SD'] = SD_FIXATION_DURATION_LIST
        new_df['Longest_Fixation_Duration'] = LONGEST_FIXATION_DURATION_LIST
        new_df['Total_Refixation_Count'] = TOTAL_REFIXATION_LIST
        new_df['Refixation_Ratio'] = REFIXATION_RATIO_LIST
        new_df['Total_Saccade_Length'] = TOTAL_SACCADE_LENGTH_LIST
        new_df['Average_Saccade_Length'] = AVERAGE_SACCADE_ABSOLUTE_ANGLE_LIST
        new_df['Saccade_Length_SD'] = SD_SACCADE_LENGTH_LIST
        new_df[
            'Total_Saccade_Absolute_Angle'] = TOTAL_SACCADE_ABSOLUTE_ANGLE_LIST
        new_df[
            'Average_Saccade_Absolute_Angle'] = AVERAGE_SACCADE_ABSOLUTE_ANGLE_LIST
        new_df['Saccade_Absolute_Angle_SD'] = SD_SACCADE_ABSOLUTE_ANGLE_LIST
        new_df[
            'Total_Saccade_Relative_Angle'] = TOTAL_SACCADE_RELATIVE_ANGLE_LIST
        new_df[
            'Average_Saccade_Relative_Angle'] = AVERAGE_SACCADE_RELATIVE_ANGLE_LIST
        new_df['Saccade_Relative_Angle_SD'] = SD_SACCADE_RELATIVE_ANGLE_LIST
        new_df['Total_Left_Eye_Difference'] = TOTAL_LEFT_EYE_DIFERENCE_LIST
        new_df['Average_Left_Eye_Difference'] = AVERAGE_LEFT_EYE_DIFERENCE_LIST
        new_df['Left_Eye_Difference_SD'] = SD_RIGHT_EYE_DIFERENCE_LIST
        new_df['Total_Right_Eye_Difference'] = TOTAL_RIGHT_EYE_DIFERENCE_LIST
        new_df[
            'Average_Right_Eye_Difference'] = AVERAGE_RIGHT_EYE_DIFERENCE_LIST
        new_df['Right_Eye_Difference_SD'] = SD_RIGHT_EYE_DIFERENCE_LIST

        new_df.to_csv(output_file, index=False)
        print("Finished exporting to {}".format(output_file).replace(
            "\\", "/"))
    else:
        print("Exiting...")
Exemple #5
0
        concat_loss = creterion(concat_logits, label)
        rank_loss = model.ranking_loss(top_n_prob, part_loss)
        partcls_loss = creterion(
            part_logits.view(batch_size * PROPOSAL_NUM, -1),
            label.unsqueeze(1).repeat(1, PROPOSAL_NUM).view(-1),
        )

        total_loss = raw_loss + rank_loss + concat_loss + partcls_loss
        total_loss.backward()

        raw_optimizer.step()
        part_optimizer.step()
        concat_optimizer.step()
        partcls_optimizer.step()

        progress_bar(i, len(trainloader), "train")

    if epoch % SAVE_FREQ == 0:
        train_loss = 0
        train_correct = 0
        total = 0
        net.eval()
        for i, data in enumerate(trainloader):
            with torch.no_grad():
                img, label = data[0].cuda(), data[1].cuda()
                batch_size = img.size(0)
                _, concat_logits, _, _, _ = net(img)
                # calculate loss
                concat_loss = creterion(concat_logits, label)
                # calculate accuracy
                _, concat_predict = torch.max(concat_logits, 1)
Exemple #6
0
        val_loss = 0
        val_correct = 0
        total = 0
        for i, data in enumerate(valloader):
            with torch.no_grad():
                img, label = data[0].cuda(), data[1].cuda()
                batch_size = img.size(0)
                _, concat_logits, _, _, _ = net(img)
                # calculate loss
                concat_loss = creterion(concat_logits, label)
                # calculate accuracy
                _, concat_predict = torch.max(concat_logits, 1)
                total += batch_size
                val_correct += torch.sum(concat_predict.data == label.data)
                val_loss += concat_loss.item() * batch_size
                progress_bar(i, len(valloader), 'eval val set')

        val_acc = float(val_correct) / total
        val_loss = val_loss / total
        _print(
            'epoch:{} - val loss: {:.3f} and val acc: {:.3f} val sample: {}'.
            format(epoch, val_loss, val_acc, total))

        # prediction on test set
        print('begin to predict')
        epoch_result = {}
        prediction_epoch = []
        for i, data in enumerate(testloader):
            with torch.no_grad():
                img = data.cuda()
                _, concat_logits, _, _, _ = net(img)
def calculate_blink_per_aoi(input_file, output_file):
    """
    Calculate Blinking features per AOI.

    :param input_file: Blinking CSV file
    :param output_file: Blinking per AOI file    
    """
    print("Calculating Blinking Features per AOI...")
    pd_dataframe = pd.read_csv(input_file, sep=",", index_col=False)
    # print("Finished loading in \"{}\" file".format(input_file))

    overwrite = "y"
    # Export to csv file
    # If file exists and user does not want to overwrite, do nothing
    # if (os.path.exists(output_file)):
    #    overwrite = input("File \"{}\" exists. Would you like to overwrite? (Y/N): ".format(input_file).replace("\\", "/"))

    if overwrite.lower() == "y":

        blink_count = [[0 for i in range(NUMBER_OF_PAGES)] for j in range(NUMBER_OF_AOI_TYPES)]
        blink_duration_total = [[0 for i in range(NUMBER_OF_PAGES)] for j in range(NUMBER_OF_AOI_TYPES)]

        df = pd.read_csv(input_file)
        new_df = pd.DataFrame()

        num_rows = len(pd_dataframe.index)
        count = 0
        # print("Iterating over every row:")
        starttime = time.time()

        for index, row in df.iterrows():   
            page = row['Page'] 
            aoi_type = row['AOI_TYPE']
            numbered_aoi = AOI_STRING_TO_AOI_NUMBER[aoi_type]
            blink_duration = row['BKDUR']

            blink_count[numbered_aoi][page] += 1
            blink_duration_total[numbered_aoi][page] += blink_duration

            curtime = time.time()
            elapsed_time = curtime - starttime
            count += 1
            progress = utils.progress_bar(count, num_rows, elapsed_time)
            print(progress, end="\r")

        print("")

        AOI_TYPE_LIST = []
        PAGE_LIST = []
        BLINK_COUNT_LIST = []
        TOTAL_BLINK_DURATION_LIST = []
        AVERAGE_BLINK_DURATION_LIST = []

        for i in range(NUMBER_OF_PAGES):
            for j in range(NUMBER_OF_AOI_TYPES):
                AOI_TYPE_LIST.append(AOI_NUMBER_TO_AOI_STRING[j])
                PAGE_LIST.append(i)
                BLINK_COUNT_LIST.append(blink_count[j][i])
                TOTAL_BLINK_DURATION_LIST.append(blink_duration_total[j][i])
                if blink_count[j][i] != 0:
                    AVERAGE_BLINK_DURATION_LIST.append(blink_duration_total[j][i]/blink_count[j][i])
                else:
                    AVERAGE_BLINK_DURATION_LIST.append(0)

        new_df['Page'] = PAGE_LIST
        new_df['AOI_TYPE'] = AOI_TYPE_LIST
        new_df['Blink_Count'] = BLINK_COUNT_LIST
        new_df['Total_Blink_Duration'] = TOTAL_BLINK_DURATION_LIST  
        new_df['Average_Blink_Duration'] = AVERAGE_BLINK_DURATION_LIST  

        new_df.to_csv(output_file, index=False)
        print("Finished exporting to {}".
              format(output_file).replace("\\", "/"))
    else:
        print("Exiting...")
Exemple #8
0
def add_label(input_file, task_file):
    """
    Add Page to CSV File.

    :param input_file: The large, uncollapsed CSV file
    """
    overwrite = "y"
    # Export to csv file
    # If file exists and user does not want to overwrite, do nothing
    # if (os.path.exists(input_file)):
    #    overwrite = input("File \"{}\" exists. Would you like to overwrite? (Y/N): ".format(input_file).replace("\\", "/"))

    if overwrite.lower() != "y":
        print("Exiting...")
        return

    print("Adding AOI and Page Label...")
    pd_dataframe = pd.read_csv(input_file, sep=",", index_col=False)
    # print("Finished loading in \"{}\" file".format(input_file))
    
    time_file = pd.read_csv(task_file, sep=",", index_col=False)
    enter_time = []
    time_file['TIME_IN'] = pd.to_datetime(time_file['createdAt'], format='%Y-%m-%d %H:%M:%S.%f')
    time_file['TIME_OUT'] = pd.to_datetime(time_file['updatedAt'], format='%Y-%m-%d %H:%M:%S.%f')

    time_column_name = pd_dataframe.columns.tolist()[3]
    time_column_name = time_column_name.split(" ")[1][:-1]
    start_hour, start_minute, start_second = time_column_name.split(":")
    start_time = int(start_hour) * 3600 + int(start_minute) * 60 + int(float(start_second))

    id = 0

    for index, row in time_file.iterrows():
        id = row['userID']
        if row['Task'] > 0:
            time_row = row['TIME_IN']
            time_in_second = time_row.hour * 3600 + time_row.minute * 60 + time_row.second
            enter_time.append(time_in_second - start_time)
        if row['Task'] == 30:
            time_row = row['TIME_OUT']
            time_in_second = time_row.hour * 3600 + time_row.minute * 60 + time_row.second                        
            enter_time.append(time_in_second - start_time)            

    overwrite = "y"
    # Export to csv file
    if overwrite.lower() == "y":

        df = pd.read_csv(input_file)
        
        num_rows = len(pd_dataframe.index)
        count = 0
        # print("Iterating over every row:")
        starttime = time.time()

        ID_List = []
        Page_List = []
        AOI_Type_List = []

        for index, row in df.iterrows():
            ID_List.append(id)

            x_coord = row['X_Coordinate'] 
            y_coord = row['Y_Coordinate']

            if x_coord < 630 or x_coord > 1290:
                AOI_Type_List.append("NOT_AOI")
            else:
                if x_coord >= 630 and x_coord <= 1290 and y_coord >= 95 and y_coord <= 165:
                    AOI_Type_List.append("QUERY")
                elif x_coord >= 640 and x_coord <= 1280:
                    if y_coord >= top_y[1] and y_coord <= bot_y[1]:
                        AOI_Type_List.append("RESULT1")
                    elif y_coord >= top_y[2] and y_coord <= bot_y[2]:
                        AOI_Type_List.append("RESULT2")
                    elif y_coord >= top_y[3] and y_coord <= bot_y[3]:
                        AOI_Type_List.append("RESULT3")   
                    elif y_coord >= top_y[4] and y_coord <= bot_y[4]:
                        AOI_Type_List.append("RESULT4")   
                    elif y_coord >= top_y[5] and y_coord <= bot_y[5]:
                        AOI_Type_List.append("RESULT5")   
                    elif y_coord >= top_y[6] and y_coord <= bot_y[6]:
                        AOI_Type_List.append("RESULT6")   
                    else:
                        AOI_Type_List.append("NOT_AOI")
                else:
                    AOI_Type_List.append("NOT_AOI")

            timestamp = row[3]

            if timestamp < enter_time[0] or timestamp > enter_time[len(enter_time) - 1]:
                Page_List.append(0)
            else:
                i = len(enter_time) - 1
                while(i >= 0):
                    if timestamp >= enter_time[i]:
                        Page_List.append(i+1)
                        break
                    i -= 1
                    
            curtime = time.time()
            elapsed_time = curtime - starttime
            count += 1
            progress = utils.progress_bar(count, num_rows, elapsed_time)
            print(progress, end="\r")

        print("")
        df['userID'] = ID_List
        df['AOI_TYPE'] = AOI_Type_List        
        df['Page'] = Page_List
        df.to_csv(input_file, index=False)
        print("Finished exporting to {}".
              format(input_file).replace("\\", "/"))
    else:
        print("Exiting...")