def split_set(Sset, k):
    mean = themean(Sset, k)
    const = mean[0]
    dire = mean[1]
    if dire == 0:
        Z_ind = np.where(Sset.x[:, k] <= const)[0]
        One_ind = np.where(Sset.x[:, k] > const)[0]
    else:
        Z_ind = np.where(Sset.x[:, k] >= const)[0]
        One_ind = np.where(Sset.x[:, k] < const)[0]
    Sset_0feat = np.delete(Sset.x, One_ind, 0)
    Sset_0label = np.delete(Sset.label, One_ind)
    Sset_1feat = np.delete(Sset.x, Z_ind, 0)
    Sset_1label = np.delete(Sset.label, Z_ind)
    return (theSet(Sset_0feat, Sset_0label), theSet(Sset_1feat, Sset_1label))
def split_set(Sset, k):
  mean = themean(Sset,k)
  const = mean[0]
  dire = mean[1]
  if dire == 0:
    Z_ind = np.where(Sset.x[:,k] <= const)[0]
    One_ind = np.where(Sset.x[:,k] > const)[0]
  else:
    Z_ind = np.where(Sset.x[:,k] >= const)[0]
    One_ind = np.where(Sset.x[:,k] < const)[0]
  Sset_0feat = np.delete(Sset.x, One_ind, 0)
  Sset_0label = np.delete(Sset.label, One_ind)
  Sset_1feat = np.delete(Sset.x, Z_ind, 0)
  Sset_1label = np.delete(Sset.label, Z_ind)
  return (theSet(Sset_0feat, Sset_0label), theSet(Sset_1feat, Sset_1label))
Beispiel #3
0
Datei: depth.py Projekt: ai-cv/rq
def createMedianMask(disparityMap, validDepthMask, rect=None):
    """Return a mask selecting the median layer, plus shadows."""
    if rect is not None:
        x, y, w, h = rect
        disparityMap = disparityMap[y:y + h, x:x + w]
        validDepthMask = validDepthMask[y:y + h, x:x + w]
    median = np.median(disparityMap)
    return np.where((validDepthMask == 0) | (abs(disparityMap - median) < 12),
                    1.0, 0.0)
    def work_on_detected(self):
        cpy = np.copy(self.channels["laser"])
        height, width = cpy.shape

        indices = np.transpose(np.where(cpy > 0))
        if indices.any():
            sum_y, sum_x = np.sum(indices, axis=0)
            self.ave_y, self.ave_x = sum_y / len(indices), sum_x / len(indices)

            # print("average x:", self.ave_x, "average y:", self.ave_y)
            self.manager.update_player_place((self.ave_x, self.ave_y))
Beispiel #5
0
def deskew(image_path):
    gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
    gray = cv2.bitwise_not(gray)
    thresh = cv2.threshold(gray, 0, 255,
                           cv2.THRESH_BINARY | cv2.THRESH_OTSU)[1]

    coords = np.column_stack(np.where(thresh > 0))
    angle = cv2.minAreaRect(coords)[-1]

    # the `cv2.minAreaRect` function returns values in the
    # range [-90, 0); as the rectangle rotates clockwise the
    # returned angle trends to 0 -- in this special case we
    # need to add 90 degrees to the angle
    if angle < -45:
        angle = -(90 + angle)

    # otherwise, just take the inverse of the angle to make
    # it positive
    else:
        angle = -angle

    # rotate the image to deskew it
    (h, w) = image.shape[:2]
    center = (w // 2, h // 2)
    M = cv2.getRotationMatrix2D(center, angle, 1.0)
    rotated = cv2.warpAffine(image,
                             M, (w, h),
                             flags=cv2.INTER_CUBIC,
                             borderMode=cv2.BORDER_REPLICATE)
    cv2.putText(rotated, "Angle: {:.2f} degrees".format(angle), (10, 30),
                cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)

    # show the output image
    print("[INFO] angle: {:.3f}".format(angle))
    cv2.imshow("Input", image)
    cv2.imshow("Rotated", rotated)
for x, color in enumerate(rgb):
    with open(
            'G:/My Drive/Projects/ifl_corridors/rp5k130k25k/annual_loss/tcaloss_'
            + str(x + 1) + '.clr', 'w') as f:
        f.write('%s\n' % ' '.join(color))

#%%
# Reclassify south america rasters (32,33)
# Read raster
for n in range(1, 19, 1):
    rname = 'G:/My Drive/Projects/ifl_corridors/rp5k130k25k/annual_loss/sa_rtile_lybytc2000_' + str(
        n) + '_90m_33.tif'
    with rasterio.open(rname) as r:
        r_meta = r.profile
        a = r.read(1)
        a = np.where(a > 0, 1, 0)
    r_meta['dtype'] = 'int16'
    with rasterio.open(
            'G:/My Drive/Projects/ifl_corridors/rp5k130k25k/annual_loss/sa_rtile_lybytc2000_'
            + str(n) + '_90m_33_rcl.tif', 'w', **r_meta) as dst:
        dst.write(a.astype('int16'), 1)

#%%
# Reclassify africa rasters (25,26,31,32)
# Read raster
for n in range(1, 19, 1):
    rname = 'G:/My Drive/Projects/ifl_corridors/rp5k130k25k/annual_loss/af_rtile_lybytc2000_' + str(
        n) + '_90m_33.tif'
    with rasterio.open(rname) as r:
        r_meta = r.profile
        a = r.read(1)
Beispiel #7
0
k = []

while len(k) != 30:
    m = randint(1, 30)
    if m not in k:
        k.append(m)
print(
    "----------------------------------------------------------------------------"
)
for l in range(0, 30):
    y = int(input("Enter 1 to draw next number:"))
    if y == 1:
        print("The Number is:{}".format(k[l]))
        if k[l] in A:
            a = list(zip(np.where(A == k[l])))
            r = list(zip(a[0], a[1]))
            x, y = r[0]
            A[x[0]][y[0]] = 0

        print("{}:{}".format(A, name_1))
        print('----------------------')
        if k[l] in B:
            a = list(zip(np.where(B == k[l])))
            r = list(zip(a[0], a[1]))
            x, y = r[0]
            B[x[0]][y[0]] = 0

        print("{}:{}".format(B, name_2))
        print('----------------------')
Beispiel #8
0
def blurringkernel(shape, T, a, b):
    xx, yy = shape
    x, y = np.ogrid[(-xx / 2):(xx / 2), (-yy / 2):yy / 2]
    q = (np.pi * (x * a + y * b))
    q[np.where(q == 0)] = T
    return (T / q) * np.sin(q) * np.exp(-1j * q)
Beispiel #9
0
    def query(self, n, model, train_dataset, pool_dataset, budget=10000):
        device = model.state_dict()['softmax.bias'].device

        full_dataset = ConcatDataset([pool_dataset, train_dataset])
        pool_len = len(pool_dataset)

        self.embeddings = self.get_embeddings(model, device, full_dataset)

        # Calc distance matrix
        num_images = self.embeddings.shape[0]
        dist_mat = self.calc_distance_matrix(num_images)

        # We need to get k centers start with greedy solution
        upper_bound = gb.UB
        lower_bound = upper_bound / 2.0
        max_dist = upper_bound

        _x, _y = np.where(dist_mat <= max_dist)
        _distances = dist_mat[_x, _y]
        subset = [i for i in range(1)]
        model = solve_fac_loc(_x, _y, subset, num_images, budget)
        # model.setParam( 'OutputFlag', False )
        x, y, z = model.__data
        delta = 1e-7

        while upper_bound - lower_bound > delta:
            print("State", upper_bound, lower_bound)
            current_radius = (upper_bound + lower_bound) / 2.0

            violate = np.where(_distances > current_radius)  # Point distances which violate the radius

            new_max_d = np.min(_distances[_distances >= current_radius])
            new_min_d = np.max(_distances[_distances <= current_radius])

            print("If it succeeds, new max is:", new_max_d, new_min_d)

            for v in violate[0]:
                x[_x[v], _y[v]].UB = 0  # The upper bound for points, which violate the radius are set to zero

            model.update()
            r = model.optimize()

            if model.getAttr(gb.GRB.Attr.Status) == gb.GRB.INFEASIBLE:
                failed = True
                print("Infeasible")
            elif sum([z[i].X for i in range(len(z))]) > 0:
                failed = True
                print("Failed")
            else:
                failed = False

            if failed:
                lower_bound = max(current_radius, new_max_d)
                # failed so put edges back
                for v in violate[0]:
                    x[_x[v], _y[v]].UB = 1
            else:
                print("solution found", current_radius, lower_bound, upper_bound)
                upper_bound = min(current_radius, new_min_d)
                model.write("s_{}_solution_{}.sol".format(budget, current_radius))

        idxs_labeled = np.arange(start=pool_len, stop=pool_len + len(train_dataset))

        # Perform kcenter greedy
        self.update_distances(idxs_labeled, idxs_labeled, only_new=False, reset_dist=True)
        sel_ind = []
        for _ in range(n):
            ind = np.argmax(self.min_distances)  # Get sample with highest distance
            assert ind not in idxs_labeled, "Core-set picked index already labeled"
            self.update_distances([ind], idxs_labeled, only_new=True, reset_dist=False)
            sel_ind.append(ind)

        assert len(set(sel_ind)) == len(sel_ind), "Core-set picked duplicate samples"

        remaining_ind = list(set(np.arange(pool_len)) - set(sel_ind))

        return sel_ind, remaining_ind