Exemplo n.º 1
0
    def totalDistAndVar(self, groups, dist, varFactor=0):

        row_num = len(groups)
        combins = list(combinations(list(range(0, row_num)), 2))

        #instantiate variable to hold total distance
        total_dist = 0

        dist_list = []

        for i in range(0, len(combins)):

            temp_dist_df = groups[groups.iloc[:, 0].isin(combins[i])]

            #Get pairwise distance between two groups at a time, using self.pairDist

            temp_dist = self.pairDist(temp_dist_df.iloc[0][1:],
                                      temp_dist_df.iloc[1][1:], dist)

            dist_list.append(temp_dist)

            #add to total_dist
            total_dist = total_dist + temp_dist

        dist_var = var(dist_list)

        total_dist = total_dist + varFactor * dist_var

        return dist_var, total_dist
Exemplo n.º 2
0
def CalculateVariances(data, classification, averages, count):

    variancesLW = []
    variancesLD = []
    variancesRW = []
    variancesRD = []
    for scale in data:
        if scale.classification == classification:
            variancesLW.append(scale.left_weight)
            variancesLD.append(scale.left_distance)
            variancesRW.append(scale.right_weight)
            variancesRD.append(scale.right_distance)

    return [
        var(variancesLW),
        var(variancesLD),
        var(variancesRW),
        var(variancesRD)
    ]
Exemplo n.º 3
0
def CalculateVariances(data, species, averages, count):

    variancesSL = []
    variancesSW = []
    variancesPL = []
    variancesPW = []
    for iris in data:
        if iris.species == species:
            variancesSL.append(iris.sepal_length)
            variancesSW.append(iris.sepal_width)
            variancesPL.append(iris.petal_length)
            variancesPW.append(iris.petal_width)

    return [
        var(variancesSL),
        var(variancesSW),
        var(variancesPL),
        var(variancesPW)
    ]
Exemplo n.º 4
0
def cdm(x, y, z):
    """
    # Multilabel Data
    :param x: features
    :param y: labels
    :param z:
    :return: correlation distance matrix
    """
    cdm = []  # cdm : final correlation distance matrix
    for i in range(len(x[0])):
        fea = x[:, i]   # fea : features

        meanf = sum(fea) / len(fea)
        # meanf : mean of features

        corr = []  # corr : storing correlation
        for j in range(len(y[0])):
            lab = y[:, j]
            # lab: Labels
            print()

            meanl = sum(lab) / len(lab)
            # meanl : mean length of labels

            varx = (var(fea, meanf) * var(lab, meanl)) ** 0.5
            # varx : variance of feature
            print(varx)

            covx = np.cov(fea, lab)[0, 1]
            # covx : covariance feature and labels

            corr.append((1 - (covx / varx)))
            # formula : "1 - (covx / varx)" for finding correlation using covariance and variance

        cdm.append(corr)  # Adding correlation into cdm

    return cdm
Exemplo n.º 5
0
import statistics
x= statistics.var()
y=statistics.alphabets()
int=x
print(statistics.num(yes))
Exemplo n.º 6
0
 def test_sd_var2_without_variance(self):
     self.assertEqual(Variance("Var2", get_var=False).apply(self.df)["Var2_SD"], [sqrt(var(self.df["Var2"]))])
Exemplo n.º 7
0
 def test_variance_var2_without_sd(self):
     self.assertEqual(Variance("Var2", get_sd=False).apply(self.df)["Var2_Var"], [var(self.df["Var2"])])
Exemplo n.º 8
0
def test_projection(idx):
    '''
    load a point cloud and the corresponding depth map
    and the calib matrix
    test whether the matrix is working
    '''
    pc = load_pc(
        "/home/chenziwe/robotics/egoDepth/datasets/kitti/2011_09_26_drive_0022_sync/velodyne_points/data/"
        + '{:010d}'.format(idx) + ".bin")
    vcR, vcT = load_calib(
        "/home/chenziwe/robotics/egoDepth/datasets/kitti/2011_09_26_drive_0022_sync/calib/calib_velo_to_cam.txt"
    )
    vc = padRT(vcR, vcT)
    proj = load_proj(
        "/home/chenziwe/robotics/egoDepth/datasets/kitti/2011_09_26_drive_0022_sync/calib/calib_cam_to_cam.txt"
    )
    depth = load_depth(
        "/home/chenziwe/robotics/egoDepth/datasets/kitti/2011_09_26_drive_0022_sync/depth/image_02/"
        + '{:010d}'.format(idx) + ".png")
    img = load_img(
        "/home/chenziwe/robotics/egoDepth/datasets/kitti/2011_09_26_drive_0022_sync/image/data/"
        + '{:010d}'.format(idx) + ".png")
    '''
    visualize_pc(pc)
    cv2.imshow('lol',depth/100.0)
    cv2.waitKey(0)
    neg_pc=list()
    pos_pc=list()
    zero_pc=list()
    neg_count=0
    pos_count=0
    zero_count=0
    for i in range(pc.shape[0]):
        if pc[i][0]<0:
            neg_count+=1
            neg_pc.append(pc[i])
        elif pc[i][0]>0:
            pos_count+=1
            pos_pc.append(pc[i])
        else:
            zero_count+=1
            zero_pc.append(pc[i])
    print("neg",neg_count)
    print("pos",pos_count)
    print("zero",zero_count)
    neg_pc=np.stack(neg_pc)
    pos_pc=np.stack(pos_pc)
    zero_pc=np.stack(zero_pc)
    visualize_pc(neg_pc)
    visualize_pc(pos_pc)
    visualize_pc(zero_pc)
    '''

    pc_c = list()
    pc = euc2hom(pc)  # nx4
    h, w = depth.shape[0], depth.shape[1]
    for i in range(pc.shape[0]):
        #print(pc[i])
        pc[i] = np.matmul(vc, pc[i])
        if pc[i][2] > 0:
            #if pc[i][2] != 0:
            cp = pc[i]
            d = cp[2]
            ip = np.matmul(proj, cp)
            d = ip[2]
            ip = ip / ip[2]
            ip[2] = d
            #ip[2]=pc[i][2]
            pc_c.append(ip)
    pc_c = np.stack(pc_c)
    print(pc_c.shape)
    count = 0
    for r in depth:
        for e in r:
            if e > 0:
                count += 1
    print(count)
    print(np.min(depth))
    #pc_corig=pc_c.copy()
    #pc_c=hom2euc(pc_c) # nx2

    count = 0
    diffs = list()
    depth2 = np.zeros_like(depth)
    for i in range(pc_c.shape[0]):
        p = pc_c[i]
        #print(p)
        x, y = int(round(p[0])), int(round(p[1]))
        #print(x,y)
        if 0 <= x and 0 <= y and x < w and y < h:
            depth2[y, x] = p[2]
        if 2 <= x and 2 <= y and x < w - 2 and y < h - 2:
            if np.max(depth[y - 2:y + 3, x - 2:x + 3]) == 0:
                continue
            #dep=np.sum(depth[y-1:y+2,x-1:x+2])/float(np.count_nonzero(depth[y-1:y+2,x-1:x+2]))
            d = np.min(np.abs(depth[y - 2:y + 3, x - 2:x + 3] - p[2]))
            #if abs(d)>2 and abs(d-p[2])<0.001:
            if abs(d) > 5:
                count += 1
                depth2[y, x] = 512
                #print(d,p[2],depth[y-2:y+3,x-2:x+3])
            diffs.append(d)
            #print("diff:",abs(d))
        #print()
    '''
    '''
    print(len(diffs))
    print(count)
    print(min(diffs))
    print(max(diffs))
    print(mean(diffs))
    print(var(diffs))
    cv2.imshow('img', img)
    cv2.imshow('lol2', depth2 / 200.0)
    cv2.waitKey(0)
Exemplo n.º 9
0
    for t in tensor:
        ret.append(convert_to_colormap(t).unsqueeze(0))
    ret = torch.cat(ret)
    return ret


if __name__ == '__main__':
    #download_kitti()
    #process_kitti_raw()
    #process_kitti_integ()
    #visualize_depth("/home/chenziwe/robotics/egoDepth/datasets/kitti/2011_09_26_drive_0022_sync/depth/image_02/0000000136.png")
    #load_img("/home/chenziwe/robotics/egoDepth/datasets/kitti/2011_09_26_drive_0022_sync/image/data/0000000136.png")
    #diffs=load_timediff("/home/chenziwe/robotics/egoDepth/datasets/kitti/2011_09_26_drive_0022_sync/image/timestamps.txt")
    '''
    '''
    dx = list()
    dy = list()
    dz = list()
    for i in range(100):
        i + 250
        x, y, z = get_diff_means(i, i, velocity=False)
        dx.append(x)
        dy.append(y)
        dz.append(z)
    print(mean(dx), mean(dy), mean(dz))
    print(var(dx), var(dy), var(dz))
    #integ_sequence('2011_09_26_drive_0022')
    #x,y,z=get_diff_means(0,400,velocity=True)
    #test_projection(5)
    #test_reverse(5)
Exemplo n.º 10
0
def coeffvar(data_frame):
    return (var(data_frame)**(1/2))/mean(data_frame)