Пример #1
0
class test_vectorization_compiled:
    def setUp(self):
        self.data = np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [1, 0, 0], [1, 0, 1], [1, 1, 0], [1, 1, 1]])
        self.kdtree = KDTree(self.data)

    def test_single_query(self):
        d, i = self.kdtree.query([0, 0, 0])
        assert isinstance(d, float)
        assert isinstance(i, int)

    def test_vectorized_query(self):
        d, i = self.kdtree.query(np.zeros((2, 4, 3)))
        assert_equal(np.shape(d), (2, 4))
        assert_equal(np.shape(i), (2, 4))

    def test_single_query_multiple_neighbors(self):
        s = 23
        kk = self.kdtree.n + s
        d, i = self.kdtree.query([0, 0, 0], k=kk)
        assert_equal(np.shape(d), (kk,))
        assert_equal(np.shape(i), (kk,))
        assert np.all(~np.isfinite(d[-s:]))
        assert np.all(i[-s:] == self.kdtree.n)

    def test_vectorized_query_multiple_neighbors(self):
        s = 23
        kk = self.kdtree.n + s
        d, i = self.kdtree.query(np.zeros((2, 4, 3)), k=kk)
        assert_equal(np.shape(d), (2, 4, kk))
        assert_equal(np.shape(i), (2, 4, kk))
        assert np.all(~np.isfinite(d[:, :, -s:]))
        assert np.all(i[:, :, -s:] == self.kdtree.n)
Пример #2
0
class test_small(ConsistencyTests):
    def setUp(self):
        self.data = np.array([[0,0,0],
                              [0,0,1],
                              [0,1,0],
                              [0,1,1],
                              [1,0,0],
                              [1,0,1],
                              [1,1,0],
                              [1,1,1]])
        self.kdtree = KDTree(self.data)
        self.n = self.kdtree.n
        self.m = self.kdtree.m
        np.random.seed(1234)
        self.x = np.random.randn(3)
        self.d = 0.5
        self.k = 4

    def test_nearest(self):
        assert_array_equal(
                self.kdtree.query((0,0,0.1), 1),
                (0.1,0))

    def test_nearest_two(self):
        assert_array_equal(
                self.kdtree.query((0,0,0.1), 2),
                ([0.1,0.9],[0,1]))
Пример #3
0
class FeaturesQuery(object):

    def __init__(self, fgroup, coord_modes=['lm']):
        self.features1 = fgroup.features
        self.nf1 = len(self.features1) 
        coords1 = np.array([f.get_coord(mode=mode) for mode in coord_modes for f in self.features1])
        if len(self.features1) > 0:
            self.kdtree = KDTree(coords1)

    def get_features(self):
        return set(self.features1)

    def find(self, feature2, tol=np.inf):
        if len(self.features1) == 0:
            return []
        d, i = self.kdtree.query(feature2.get_coord(), k=len(self.features1),
                                 distance_upper_bound=tol)
        if len(self.features1) == 1:
            if d < tol:
                return self.features1
            return []
        i = i[d < tol]
        return nputils.uniq([self.features1[k % self.nf1] for k in i])

    def get_match(self, fgroup2, tol=2):
        if len(self.features1) == 0:
            return FeaturesMatch()
        coords2 = np.array([f.get_coord() for f in fgroup2.features])
        d, i = self.kdtree.query(coords2, k=1, distance_upper_bound=tol)
        i1 = i[d < tol]
        i2 = np.arange(len(coords2))[d < tol]
        features1 = [self.features1[k % self.nf1] for k in i1]
        features2 = [fgroup2.features[k] for k in i2]

        return FeaturesMatch(features1=features1, features2=features2)
Пример #4
0
 def calc_nearest_neighbor_distances(self, detail_level):
     rf = self.get_detail_level_results(detail_level)
     if rf.is_event_list_column_name('eNN_dist') and \
         rf.is_event_list_column_name('eNN_dist') and \
             rf.is_event_list_column_name('eNN_dist'):
         return rf
     cons_points = numpy.array([e.position() for e in rf.consumers()])
     prod_points = numpy.array([e.position() for e in rf.producers()])
     if self.wrapping:
         cons_points = wrap_points(cons_points, self.side_length, is_3d=self.is_3d)
         prod_points = wrap_points(prod_points, self.side_length, is_3d=self.is_3d)
     cons_tree = KDTree(cons_points)
     prod_tree = KDTree(prod_points)
     for e in rf.events:
         c_dist = cons_tree.query(e.position(), k=2)[0]
         p_dist= prod_tree.query(e.position(), k=2)[0]
         if (e.vars['mark'] == 2):
             e.vars['sNN_dist'] = c_dist[1]
             e.vars['oNN_dist'] = p_dist[0]
         else:
             e.vars['sNN_dist'] = p_dist[1]
             e.vars['oNN_dist'] = c_dist[0]
         e.vars['eNN_dist'] = min(e.vars['sNN_dist'], e.vars['oNN_dist'])
     rf.add_event_list_column_name('eNN_dist')
     rf.add_event_list_column_name('oNN_dist')
     rf.add_event_list_column_name('sNN_dist')
     #rf.eventList.update_text()
     rf.write()
     return rf
Пример #5
0
def query_tree3(table_j, table_ks, table_h, max_sep=0.0001):
    """
    For two pandas database files, with the ra/dec header names x_wcs/y_wcs as defined in read_sex
    """

    tree2 = KDTree(zip(table_ks['X_WORLD'].ravel(), table_ks['Y_WORLD'].ravel()))
    tree3 = KDTree(zip(table_h['X_WORLD'].ravel(), table_h['Y_WORLD'].ravel()))

    idxs1 = []
    idxs2 = []
    idxs3 = []
    dups2 = []
    dups3 = []

    for i in range(len(table_j)):
        d2, icoords2 = tree2.query((table_j['X_WORLD'][i], table_j['Y_WORLD'][i]), distance_upper_bound=max_sep)
        d3, icoords3 = tree3.query((table_j['X_WORLD'][i], table_j['Y_WORLD'][i]), distance_upper_bound=max_sep)
        if (d2 != np.inf) & (d3 != np.inf):
            if icoords2 in idxs2:
                dups2.append(icoords2)
            if icoords3 in idxs3:
                dups3.append(icoords3)

            idxs1.append(i)
            idxs2.append(icoords2)
            idxs3.append(icoords3)

    if (len(dups2) > 0) | (len(dups3) > 0):
        raise Exception, 'WARNING: duplicates found'

    return np.array((idxs1, idxs2, idxs3))
Пример #6
0
def compute_lfs(datadict, k=10):
    from scipy.spatial import KDTree

    # collect all ma_coords that are not NaN
    ma_coords = np.concatenate([datadict['ma_coords_in'], datadict['ma_coords_out']])
    ma_coords = ma_coords[~np.isnan(ma_coords).any(axis=1)]

    # build kd-tree of ma_coords to compute lfs
    pykdtree = KDTree(ma_coords)
    if k > 1:
        datadict['lfs'] = np.sqrt(np.median(pykdtree.query(datadict['coords'], k)[0], axis=1))
    else:
        datadict['lfs'] = np.sqrt(pykdtree.query(datadict['coords'], k)[0])
Пример #7
0
def plot_lsh_performance_for_bins_projs(n_samples, len_x, k_neighbours, bins_list, projs_list):

    d = create_data(n_samples, len_x)
    q = create_data(1, len_x)[0]

    fig = plt.figure(figsize=(10,10))
    ax1 = fig.add_subplot(221)
    ax2 = fig.add_subplot(222)
    ax3 = fig.add_subplot(223)
    ax4 = fig.add_subplot(224)

    total,c = len(bins_list)*len(projs_list),1
    for p in projs_list:
        bOR, bAND, tOR, tAND = [], [], [], []
        for b in bins_list:
            if c%(total/10)==0:
                print "%d"%(c*100./total)+"%",
            c+=1

            brOR, brAND, trOR, trAND = [], [], [], []
            for i in range(10):
                q = (np.random.random(size=(len_x))-0.5)*.7
                kt    = KDTree(d)
                ktr   = kt.query(q,k_neighbours)
                drref = np.max(ktr[0])
                lsh = DiscretizedLSH(d,p,b)
                ts = time(); lshrAND = lsh.search_AND(q,k_neighbours); teAND = time()-ts
                ts = time(); lshrOR = lsh.search_OR(q,k_neighbours); teOR = time()-ts
                brAND.append(np.mean([dratio(d[i],q,drref) for i in lshrAND]))
                brOR.append(np.mean([dratio(d[i],q,drref) for i in lshrOR]))
                trAND.append(teAND)
                trOR.append(teOR)
            bOR.append(np.mean(brOR))
            bAND.append(np.mean(brAND))
            tOR.append(np.mean(trOR))
            tAND.append(np.mean(trAND))
            
        ax1.plot(bins_list, bOR, label="np "+str(p))
        ax2.plot(bins_list, bAND, label="np "+str(p))
        ax3.plot(bins_list, tOR, label="np "+str(p))
        ax4.plot(bins_list, tAND, label="np "+str(p))
    ax1.set_xlabel("bin width")
    ax1.set_ylabel("distance ratio")
    ax1.set_title("lsh OR (%d,%d) k=%d"%(n_samples, len_x, k_neighbours))
    ax1.legend()

    ax2.set_xlabel("bin width")
    ax2.set_ylabel("distance ratio")
    ax2.set_title("lsh AND (%d,%d) k=%d"%(n_samples, len_x, k_neighbours))
    ax2.legend()
    
    ax3.set_xlabel("bin width")
    ax3.set_ylabel("query time")
    ax3.set_title("lsh OR (%d,%d) k=%d"%(n_samples, len_x, k_neighbours))
    ax3.legend()

    ax4.set_xlabel("bin width")
    ax4.set_ylabel("query time")
    ax4.set_title("lsh AND (%d,%d) k=%d"%(n_samples, len_x, k_neighbours))
    ax4.legend()
Пример #8
0
def plot_lsh_performance_for_datasizes (row_list, col_list, k_neighbours, bin_width, projs):

    fig = plt.figure(figsize=(10,10))
    ax1 = fig.add_subplot(221)
    ax2 = fig.add_subplot(222)
    ax3 = fig.add_subplot(223)
    ax4 = fig.add_subplot(224)

    total,c = len(row_list)*len(col_list),1

    for col in col_list:
        bOR, bAND, tOR, tAND = [], [], [], []
        for row in row_list:
            
            if c%(total/10)==0:
                print "%d"%(c*100./total)+"%",
            c+=1

            brOR, brAND, trOR, trAND = [], [], [], []
            for i in range(10):
                d = create_data(row, col)
                q = create_data(1, col)[0]
                kt    = KDTree(d)
                ktr   = kt.query(q,k_neighbours)
                drref = np.max(ktr[0])
                lsh = DiscretizedLSH(d,projs,bin_width)
                ts = time(); lshrAND = lsh.search_AND(q,k_neighbours); teAND = time()-ts
                ts = time(); lshrOR = lsh.search_OR(q,k_neighbours); teOR = time()-ts
                brAND.append(np.mean([dratio(d[i],q,drref) for i in lshrAND]))
                brOR.append(np.mean([dratio(d[i],q,drref) for i in lshrOR]))
                trAND.append(teAND)
                trOR.append(teOR)
            bOR.append(np.mean(brOR))
            bAND.append(np.mean(brAND))
            tOR.append(np.mean(trOR))
            tAND.append(np.mean(trAND))
            
        ax1.plot(row_list, bOR, label="cols "+str(col))
        ax2.plot(row_list, bAND, label="cols "+str(col))
        ax3.plot(row_list, tOR, label="cols "+str(col))
        ax4.plot(row_list, tAND, label="cols "+str(col))
    ax1.set_xlabel("rows")
    ax1.set_ylabel("distance ratio")
    ax1.set_title("lsh OR bin_w=%.3f, projs=%d, k=%d"%(bin_width, projs, k_neighbours))
    ax1.legend()

    ax2.set_xlabel("rows")
    ax2.set_ylabel("distance ratio")
    ax2.set_title("lsh AND bin_w=%.3f, projs=%d, k=%d"%(bin_width, projs, k_neighbours))
    ax2.legend()
    
    ax3.set_xlabel("rows")
    ax3.set_ylabel("query time")
    ax3.set_title("lsh OR bin_w=%.3f, projs=%d, k=%d"%(bin_width, projs, k_neighbours))
    ax3.legend()

    ax4.set_xlabel("rows")
    ax4.set_ylabel("query time")
    ax4.set_title("lsh AND bin_w=%.3f, projs=%d, k=%d"%(bin_width, projs, k_neighbours))
    ax4.legend()
Пример #9
0
def treequerycalc(df, k):
    
    points = list(zip(df.X, df.Y)) # make a list of X-Y pairs (lists) from df.X and df.Y
    tree = KDTree(points)
    [nndist, indices] = tree.query(points, k)

    return nndist, indices
Пример #10
0
  def get_query(self, data_path, i, j):
    '''
    '''

    print 'Probing', i, j

    #
    # create kd tree
    #
    data = self._kd_tree_data[data_path]
    data_without_key = [v[:2] for v in data]
    kd_tree = KDTree(data_without_key)

    # now look up the closest point to i,j
    query_result = kd_tree.query([float(i), float(j)])
    print 'Found', query_result[1]


    tile = data[query_result[1]]
    print 'Found tile', tile
    
    # # now look up this border point in out bbox-to-tile dict
    # tile = self._bbox_to_tile[data_path][str(point[0])+'-'+str(point[1])]

    # print 'Finding tile', tile

    return json.dumps(tile[2])
Пример #11
0
def warpCloud( xyc, sourceGridPoints, targetGridPoints, warpQuality=9 ):
   
    sourceTree = KDTree(sourceGridPoints, leafsize=10)
    warpedXYC = []  
    for c in xyc:
        nearestEdge = sourceTree.query(c,k=warpQuality)
        nx = 0.0
        ny = 0.0
        ws = 0.0
        for i in range(warpQuality):
            p = targetGridPoints[nearestEdge[1][i]]
            w = nearestEdge[0][i]
            if w == 0.0:
                nx = p[0]
                ny = p[1]
                ws = 1.0
                break
            else:
                w = 1.0 / w
                nx += w * p[0]
                ny += w * p[1]
                ws += w

        warpedXYC.append([nx/ws,ny/ws])

    warpedXYC = np.array(warpedXYC)
    return warpedXYC
Пример #12
0
def search_headgroup_loss(filename, x_dim):
    from scipy.spatial import KDTree
    mz_tol = 0.005
    rt_tol = 1
    tlc_dataset = TLCdataset(filename, x_dim=x_dim)
    tlc_dataset.get_features_from_mean_spectrum(ppm=6.)
    feature_array = [[f[0], f[1]] for f in tlc_dataset.feature_list]
    kdtree = KDTree(feature_array)
    neutral_losses_rt = {}
    neutral_losses_i = {}
    for head_group in neutral_losses_pos:
        neutral_losses_rt[head_group]=[]
        neutral_losses_i[head_group] = []
    for f in tlc_dataset.feature_list:
        for head_group in neutral_losses_pos.keys():
            neutral_loss_mz = f[0]-neutral_losses_pos[head_group]
            nearest_neighbour = kdtree.query([neutral_loss_mz, f[1]])
            if all([np.abs(nearest_neighbour[0]-neutral_loss_mz < mz_tol), np.abs(f[1]-nearest_neighbour[1]<rt_tol)]):
                neutral_losses_rt[head_group].append(f[1])
                neutral_losses_i[head_group].append(f[2])
    plt.figure()
    for head_group in neutral_losses_rt:
        h = np.histogram(neutral_losses_rt[head_group], bins=tlc_dataset.x_pos, weights=neutral_losses_i[head_group])[0]
        plt.plot(h, label=head_group)
    plt.legend()
    plt.show()
Пример #13
0
def process_image(image_name, colours, thumbs, thumbsize):
    # KD Tree for Nearest-Neighbour
    kdt = KDTree(colours)

    # open image and make it smaller
    img = Image.open(image_name).convert('RGBA')
    img.thumbnail((MAX_RES, MAX_RES))
    img_data = np.array(img)

    # initialise new image
    new_shape = (
        img_data.shape[0] * thumbsize,
        img_data.shape[1] * thumbsize,
        img_data.shape[2]
    )
    new_image = np.zeros(new_shape, dtype=np.float)

    # populate new image
    for x in range(img_data.shape[0]):
        for y in range(img_data.shape[1]):
            _, nn_idx = kdt.query(img_data[x, y, :], k=3)
            thumb_img = thumbs[random.choice(nn_idx)] # TODO: change to distance-weighted probabilities
            x_offset = int((thumbsize - thumb_img.shape[0]) / 2)
            y_offset = int((thumbsize - thumb_img.shape[1]) / 2)
            x1 = (x * thumbsize) + x_offset
            x2 = x1 + thumb_img.shape[0]
            y1 = (y * thumbsize) + y_offset
            y2 = y1 + thumb_img.shape[1]
            new_image[x1:x2, y1:y2, :] = thumb_img
    return new_image
Пример #14
0
def nearest_neighbors(X, k, phi):
  """
  Construct pairwise weights by finding the k nearest neighbors to each point
  and assigning a Gaussian-based distance.

  In particular, w_{i,j} = exp(-phi ||x_i - x_j||_2^2) if j is one of i's k
  closest neighbors.

  If j is one of i's closest neighbors and i is one of j's closest members, the
  edge will only appear once with i < j.

  Parameters
  ----------
  X : [n_samples, n_dim] array
  k : int
      number of neighbors for each sample in X
  phi : float
      non-negative integer dictating how much weight to assign between pairs of points
  """
  from scipy.spatial import KDTree

  tree = KDTree(X)

  weights = []
  for i, x in enumerate(X):
    distances, indices = tree.query(x, k+1)
    for (d, j) in zip(distances, indices)[1:]:
      d = np.exp( -phi * d * d )
      if d == 0: continue
      if i < j:
        weights.append( (i, j, d) )
      else:
        weights.append( (j, i, d) )
  weights = sorted(weights, key=lambda r: (r[0], r[1]))
  return unique_rows(np.asarray(weights))
Пример #15
0
class Palette(object):
    def __init__(self, images):
        self.cache = {}
        self.kdtree = KDTree([v["color"] for v in images.itervalues()])

        self.pmap = {}
        for (img_path, i) in images.iteritems():
            color = tuple(i["color"])
            if not color in self.pmap:
                self.pmap[color] = deque()
            self.pmap[color].append(i["thumbnail"])

    def get_closest_color(self, c):
        dist, idx = self.kdtree.query(c)
        return tuple(self.kdtree.data[idx])

    def get_image(self, color):
        variants = self.pmap[color]
        # Try to use different images
        iname = variants.popleft()
        variants.append(iname)

        img = self.cache.get(iname)
        if not img:
            img = Image.open(iname)
            self.cache[iname] = img
        return img

    def get_closest_image(self, c):
        nc = self.get_closest_color(c)
        return self.get_image(nc)
Пример #16
0
def nearest_neighbors(n,points,qpoints=None,return_distances=False,slow=False):
    extra = 0
    if qpoints==None:
        qpoints=points
        if len(points)>1:
            extra=1
        elif return_distances:
            return array([]),array([])
        else:
            return array([])
        #end if
    #end if
    if n>len(qpoints)-extra:
        print 'nearest_neighbors Error: requested more than the total number of neighbors\n  maximum is: {0}\n  you requested: {1}\nexiting.'.format(len(qpoints)-extra,n)
        exit()
    #end if
    slow = slow or scipy_unavailable
    if not slow:
        kt = KDTree(points)
        dist,ind = kt.query(qpoints,n+extra)
    else:
        dtable,order = distance_table(points,qpoints,ordering=2)
        dist = dtable[:,0:n+extra]
        ind  = order[:,0:n+extra]
    #end if
    if extra==0 and n==1 and not slow:
        nn = atleast_2d(ind).T
    else:
        nn = ind[:,extra:]
    #end if
    if not return_distances:
        return nn
    else:
        return nn,dist
Пример #17
0
def fit_mesh(head_mesh,scan_mesh,base,tofit_verts,init_coefs = None,constrained = False,regul = None,niter = 1):
    if isinstance(tofit_verts,str):
        tofit_verts = np.loadtxt(tofit_verts,'int')
    if isinstance(base,str):
        base = TargetBase(prefix = prefix)
        
    verts = base.vert_list
    ntargs = len(base.targets)

    m = head_mesh.copy()
    nverts = len(scan_mesh)
    
    if init_coefs is not None :
        init_target = base.combine_targets(init_coefs)

    m[verts]+=init_target
    
    kd = KDTree(scan_mesh)
    
    for iter in xrange(niter):
        target = np.zeros((nverts,3))
        dist,indx = kd.query(m[tofit_verts])
        target[tofit_verts] = scan_mesh[indx] - m[tofit_verts]
        target = target[verts]
        if constrained :
            coefs = base.compute_combinaison_safe(target,rcond,regul = regul)
            proj = base.combine_targets(coefs)
        else :
            proj = base.project_target(target)
        if niter > 1 : m[verts]+=(1./niter)*(niter)**(float(iter)/(niter-1)) * proj
        else : m[verts]+= proj

    return m-head_mesh
Пример #18
0
Файл: ICP.py Проект: mindis/CV2
def ICP(base, target):
    geoCenterTarget = np.mean(target, axis=1)
    R = np.eye(3)
    t = np.array([0,0,0])
    
    KDindex = KDTree(target.T)
    lastDistance = np.array([1,1,1])
    
    sample = base
    for i in range(150):
        transformedSample = np.dot(sample.T, R) + t

        I = KDindex.query(transformedSample)
        idx = I[1]
        
        distance = np.sqrt(np.sum(I[0])**2 / transformedSample.shape[0])
        
        if np.sum(np.abs(lastDistance - distance)) < 1e-7:
            break

        lastDistance = distance
        
        geoCenterTransformedSample = transformedSample.mean(axis=0)
        geoCenterTarget = target[:, idx].T.mean(axis = 0)
        
        #Normalize
        A = np.dot( (transformedSample - geoCenterTransformedSample).T, (target[:, idx].T - geoCenterTarget) )
        
        U, s, V = np.linalg.svd(A)
        R = np.dot(U,V)
        geoCentersample = sample.mean(axis=1)
        t = geoCenterTarget - np.dot(geoCentersample, R)
    
    return R, t, distance
Пример #19
0
    def generate_capitals(self, map_obj):
        kdtree = KDTree([center.point for center in map_obj.centers])
        centers_index = {}

        for center in map_obj.centers:
            centers_index[key(center.point)] = center

        height = self.cell_size
        width = math.sqrt(3) / 2 * height
        row = 0
        x = height / 2

        while x < 1:
            if row % 2 == 0:
                y = width
            else:
                y = width / 2

            while y < 1:
                _, index = kdtree.query([x, y])
                point = kdtree.data[index]
                center = centers_index[key(point)]

                # Do not put capitals close on to other
                is_neighbor_capital = any([neighbor.region for neighbor in center.neighbors])

                if not center.water and not is_neighbor_capital:
                    center.region = Region(center)
                    map_obj.regions.append(center.region)

                y += width

            row += 1
            x += (height * 3 / 4)
Пример #20
0
    def get_velocity(self, longitude, latitude, height, date):
        """
        Find plume velocity that is closest to the given location and time.

        :type longitude: float
        :param longitude: Longitude of the point of interest.
        :type latitude: float
        :param latitude: Latitude of the point of interest.
        :type altitude: float
        :param altitude: Altitude in meters of the point of interest.
        :type date: str
        :param date: Date of interest formatted according to the ISO8601
            standard.
        """
        from scipy.spatial import KDTree
        from spectroscopy.util import parse_iso_8601
        _d = parse_iso_8601(date)
        _ts = calendar.timegm((_d.utctimetuple()))
        # find nearest point
        _t = np.atleast_2d(self.time).T
        # TODO: this needs to be changed to account for regular and irregular
        # grids
        a = np.append(self.position, _t, axis=1)
        tree = KDTree(a, leafsize=a.shape[0] + 1)
        point = [longitude, latitude, height, _ts]
        distances, ndx = tree.query([point], k=1)
        vx = self.vx[ndx[0]]
        vx_error = self.vx_error[ndx[0]]
        vy = self.vy[ndx[0]]
        vy_error = self.vy_error[ndx[0]]
        vz = self.vz[ndx[0]]
        vz_error = self.vz_error[ndx[0]]
        time = self.time[ndx[0]]
        lon, lat, hght = self.position[ndx[0], :]
        return (lon, lat, hght, time, vx, vx_error, vy, vy_error, vz, vz_error)
Пример #21
0
class Regression(object):
  """
  Performs kNN regression
  """

  def __init__(self):
    self.k = 5
    self.metric = np.mean
    self.kdtree = None
    self.houses = None
    self.values = None

  def set_data(self, houses, values):
    """
    Sets houses and values data
    :param houses: pandas.DataFrame with houses parameters
    :param values: pandas.Series with houses values
    """
    self.houses = houses
    self.values = values
    self.kdtree = KDTree(self.houses)

  def regress(self, query_point):
    """
    Calculates predicted value for house with particular parameters
    :param query_point: pandas.Series with house parameters
    :return: house value
    """
    _, indexes = self.kdtree.query(query_point, self.k)
    value = self.metric(self.values.iloc[indexes])
    if np.isnan(value):
      raise Exception('Unexpected result')
    else:
      return value
Пример #22
0
class RtreeKDTree(object):
    """ wrap scipy KDTree spatial index to look as much like Rtree class
    as possible
    """
    def __init__(self,stream,interleaved=False):
        """ stream: an iterable, returning tuples of the form (id,[xmin,xmax,ymin,ymax],object)
        for now, requires that xmin==xmax, and ymin==ymax

        For now, only interleaved=False is supported.
        """
        if interleaved:
            raise Exception,"No support for interleaved index.  You must use xxyy ordering"
        
        it = iter(stream)

        self.points = []
        self.data = []
        for fid,xxyy,obj in it:
            if xxyy[0]!=xxyy[1] or xxyy[2]!=xxyy[3]:
                raise Exception,"No support in kdtree for finite sized objects"
            self.points.append([xxyy[0],xxyy[2]])
            self.data.append( (fid,obj) )

        self.refresh_tree()
    def refresh_tree(self):
        self.kdt = KDTree(self.points)
            
    def nearest(self, rect, count):
        xy = [rect[0],rect[2]]
        dists,results = self.kdt.query( xy,k=count )
        
        return [self.data[r][0] for r in results]
    
    def intersects(self,xxyy):
        """ This should be made compatible with the regular RTree call...
        """
        raise Exception,"Intersects is not implemented in scipy.KDTree"
        return []
    
    def insert(self, feat_id, rect=None ):
        if rect[0]!=rect[1] or rect[2]!=rect[3]:
            raise Exception,"No support in kdtree for finite sized objects"
        if rect is None:
            raise Exception,"Not sure what inserting an empty rectangle is supposed to do..."
        self.points.append( [rect[0],rect[2]] )
        self.data.append( [feat_id,None] )

        self.refresh_tree()

    def feat_id_to_index(self,feat_id):
        for i in range(len(self.data)):
            if self.data[i][0] == feat_id:
                return i
        raise Exception,"feature id not found"
    
    def delete(self, feat_id, rect ):
        index = self.feat_id_to_index(feat_id)
        del self.data[index]
        del self.points[index]
        self.refresh_tree()
Пример #23
0
	def cluster(self, data):
		# maps data point to cluster index; optimizes stopping criteria
		data_length = len(data)
		cluster_map = [-1] * data_length
		# start off with a few random cluster centers;
		# this is the Forgy method and usually OK for standard k-means.
		current_cluster_centers = data[np.random.choice(data_length, size=self.k, replace=False), :]

		# repeat the loop until the cluster assignments do not change anymore
		at_least_one_assignment_changed = True
		
		while at_least_one_assignment_changed:
			# consult a KD tree for performance improvement
			cluster_center_tree = KDTree(current_cluster_centers, leafsize=5)
			_, data_membership  = cluster_center_tree.query(data)

			# update stopping condition
			# manual loop instead of equality operator as the latter always checks all elements, ...
			at_least_one_assignment_changed = False
			for i in range(data_length):
				if cluster_map[i] == data_membership[i]: continue
				at_least_one_assignment_changed = True
				break
			
			# and finally update the cluster centers to be the centroids of the voronoi subsets
			cluster_map = data_membership
			current_cluster_centers = [np.median(data[cluster_map == i, :], axis=0) for i in range(self.k)]
		
		return Clusters(data = data, 
						membership = cluster_map, 
						cluster_centers = current_cluster_centers)
Пример #24
0
    def to_mask(self, lons_2d_grid, lats_2d_grid):

        """

        :param lons_2d_grid:
        :param lats_2d_grid:
        :return: the mask of the subregion corresponding to the grid with the upper right and lower left points from self
        """
        x_g, y_g, z_g = lat_lon.lon_lat_to_cartesian(lons_2d_grid.flatten(), lats_2d_grid.flatten())
        ktree = KDTree(list(zip(x_g, y_g, z_g)))

        ll_x, ll_y, ll_z = lat_lon.lon_lat_to_cartesian(self.lleft_lon, self.lleft_lat)
        ur_x, ur_y, ur_z = lat_lon.lon_lat_to_cartesian(self.uright_lon, self.uright_lat)


        i_g, j_g = np.indices(lons_2d_grid.shape)

        i_g_flat, j_g_flat = i_g.flatten(), j_g.flatten()


        _, ind_ll = ktree.query((ll_x, ll_y, ll_z), k=1)
        _, ind_ur = ktree.query((ur_x, ur_y, ur_z), k=1)


        i_ll, j_ll = i_g_flat[ind_ll], j_g_flat[ind_ll]
        i_ur, j_ur = i_g_flat[ind_ur], j_g_flat[ind_ur]

        res = np.zeros_like(lons_2d_grid, dtype=bool)
        res[i_ll:i_ur + 1, j_ll: j_ur + 1] = 1

        return res, (i_ll, j_ll), (i_ur, j_ur)
Пример #25
0
class kNN:
    """A k-Nearest-Neighbours classifier (implemented using KDTrees)"""

    def __init__(self, k = 1):
        self._k = k # number of nearest neighbours

    def train(self, X, y):
        # A KDTree is a datastructure that is aware of the spatial
        # distribution of its elements and can be used for efficient
        # nearest neighbour queries
        self._T = KDTree(X)
        self._y = y
        self._K = np.max(y) + 1 # number of classes

    def classify(self, x):
        # query the KDTree
        dist, ind = self._T.query(x, k=self._k)

        # for k == 1, KDTree.query returns only a value, not a list
        if self._k == 1:
            return self._y[ind]

        # count occurences of each class
        count = np.zeros(self._K)
        for i in ind:
            count[self._y[i]] += 1
        if np.size(count[count == np.max(count)]) == 1:
            # if there is a single class with the most nearest
            # neighbours, return it
            return np.argmax(count)
        else:
            # if there are multiple classes with nearest neighbours,
            # return the one with smallest mean distance
            cls = np.argwhere(count == np.max(count)).flatten()
            return cls[np.argmin([np.mean(dist[ind == cl]) for cl in cls])]
def kdtreeApproach(intersections, tripLocations):
    #counts is a dictionary that has as keys the intersection index in the intersections list
    #and as values the number of trips in the tripLocation list which has the key as the closest
    #intersection.
    counts = {}
    startTime = time.time()
    tree = KDTree(intersections)
    points = tree.query(tripLocations,k = 1)
    indeces = points[1]
    counts = Counter(indeces)
    #key = indeces.keys()
    #value = key[indeces.values().index(max(indeces.values()))]
    #print value
    #print intersections[value]
    #print counts.keys()
    #busy = indeces.keys().index(value.index(max(value)))
    #print busy
    #TODO: insert your code here. You should build the kdtree and use it to query the closest
    #      intersection for each trip


    #
    endTime = time.time()
    print 'The kdtree computation took', (endTime - startTime), 'seconds'
    return counts
Пример #27
0
class PolygonMap(object):
    def __init__(self, seed=None, seed_size=100, relax_iter=4):
        """
        """
        if seed is None:
            seed = np.random.random((seed_size, 2))
        self.points = relax(seed, relax_iter)
        self.polymap = Voronoi(self.points)
        self.dist_map = KDTree(self.points)
        self.poly_graph = nx.Graph()
        self.corner_graph = nx.Graph()
        for p in range(len(self.polymap.points)):
            f = PolyFeature(idx=p, parent=self, center=self.points[p])
            self.poly_graph.add_node(p, feature=f)

        for e in self.polymap.ridge_points:
            self.poly_graph.add_edge(*e.tolist())

        for c in range(len(self.polymap.vertices)):
            self.corner_graph.add_node(c)

        for r in self.polymap.ridge_vertices:
            self.corner_graph.add_edge(*r)

    def feature(self, x, y):
        """:param x, y 2-D coordinate."""
        dist, idx = self.dist_map.query([x, y])
        return self.poly_graph.node[idx]['feature']
Пример #28
0
def kdtree(ref_ra,ref_dec, ra,dec, k=1, dsmax=1./3600,verb=True):
    '''computes distance (deg separation)= sqrt{(dec1-dec2)^2+ cos[0.5*(dec1+dec2)]^2*(ra1-ra2)^2}
    return two dicts:
    1)indices of reference data where matched and unmatched
    2)indices of comparison data where matched, unmatched
    UNITS: degrees for all
    '''
    print "use johan_tree() instead, for certain ra,dec finds <<< sources than astrometry spherematch"
    raise ValueError
    assert(len(ref_ra) == len(ref_dec))
    assert(len(ra) == len(dec))
    #index tree
    tree = KDTree(np.transpose([dec.copy(),np.cos(dec.copy()*np.pi/180)*ra.copy()])) #data has shape NxK, N points and K dimensions (K=2 for ra,dec) 
    #look in tree for each references point, return k=1 NN
    ds, i_tree = tree.query(np.transpose([ref_dec.copy(),np.cos(ref_dec.copy()*np.pi/180)*ref_ra.copy()]), k=k) #id for each data.query source that is NN of each input source
    #make sense of results
    i_ref,i_other={},{}
    i_ref['match']= np.arange(len(ref_ra))[ds<=dsmax]
    i_ref['nomatch']= np.arange(len(ref_ra))[ds>dsmax]
    i_other['match']= i_tree[ds<=dsmax]
    i_other['nomatch']= i_tree[ds>dsmax]
    assert(len(ref_ra[i_ref['match']]) == len(ra[i_other['match']]))
    if verb: print "%d/%d Refs matched, %d/%d Refs unmatched" % \
                    (i_ref['match'].size,len(ref_ra),i_ref['nomatch'].size,len(ref_ra))
    return np.array(i_ref['match']),np.array(i_other['match']),np.array(ds[ds<=dsmax])
Пример #29
0
 def _pointwiseF(self):
     d = np.array(self.rcentroids)
     tree = KDTree(d)
     # get the 4 nearest neighbors (obviously the first one is the point itself)
     nn = tree.query(d,k=4)[1]
     N = len(self.rcentroids)
     X = []
     x = []
     for i in xrange(N):
         W = np.zeros((3,3),float)
         w = np.zeros((3,3),float)
         for j in xrange(3):
             W[j,:] = self.rcentroids[nn[i,j+1]]-self.rcentroids[i]
             w[j,:] = self.dcentroids[nn[i,j+1]]-self.dcentroids[i]
         X.append(W)
         x.append(w)
     #Solve the linear system for each pointwise F
     for i in xrange(N):
         W = X[i]
         w = x[i]
         A = np.zeros((9,9),float)
         A[0,0:3] = W[0,:]
         A[1,3:6] = W[0,:]
         A[2,6:9] = W[0,:]
         A[3,0:3] = W[1,:]
         A[4,3:6] = W[1,:]
         A[5,6:9] = W[1,:]
         A[6,0:3] = W[2,:]
         A[7,3:6] = W[2,:]
         A[8,6:9] = W[2,:]
         b = w.flatten().T
         F = np.linalg.solve(A,b)
         self.localFs.append(F.reshape(3,3))
def main(in_file: Path, target_grid_file: Path, out_dir: Path=None):

    if out_dir is not None:
        out_dir.mkdir(exist_ok=True)
        out_file = out_dir / (in_file.name + "_interpolated")
    else:
        out_file = in_file.parent / (in_file.name + "_interpolated")

    if out_file.exists():
        print(f"Skipping {in_file}, output already exists ({out_file})")
        return

    with xarray.open_dataset(target_grid_file) as ds_grid:
        lons, lats = ds_grid["lon"][:].values, ds_grid["lat"][:].values

        xt, yt, zt = lat_lon.lon_lat_to_cartesian(lons.flatten(), lats.flatten())

        with xarray.open_dataset(in_file) as ds_in:

            lons_s, lats_s = ds_in["lon"][:].values, ds_in["lat"][:].values
            xs, ys, zs = lat_lon.lon_lat_to_cartesian(lons_s.flatten(), lats_s.flatten())

            ktree = KDTree(list(zip(xs, ys, zs)))
            dists, inds = ktree.query(list(zip(xt, yt, zt)), k=1)




            # resample to daily
            ds_in_r = ds_in.resample(t="1D", keep_attrs=True).mean()


            ds_out = xarray.Dataset()
            for vname, var in ds_grid.variables.items():
                ds_out[vname] = var[:]

            ds_out["t"] = ds_in_r["t"][:]


            for vname, var in ds_in_r.variables.items():

                assert isinstance(var, xarray.Variable)

                var = var.squeeze()

                # only interested in (t, x, y) fields
                if var.ndim != 3:
                    print(f"skipping {vname}")
                    continue

                if vname.lower() not in ["t", "time", "lon", "lat"]:
                    print(f"Processing {vname}")
                    var_interpolated = [var[ti].values.flatten()[inds].reshape(lons.shape) for ti in range(var.shape[0])]
                    ds_out[vname] = xarray.DataArray(
                        var_interpolated, dims=("t", "x", "y"),
                        attrs=var.attrs,
                    )

            ds_out.to_netcdf(out_file)
Пример #31
0
class AckermannController:
    def __init__(self):
        rospy.init_node("controller", anonymous=True)
        self.time = rospy.get_time()

        # retrieve ros parameters
        self.max_speed = rospy.get_param("~max_speed")
        self.time_step = rospy.get_param("~time_step")
        self.traj_steps = rospy.get_param("~plan_steps")
        ctrl_freq = rospy.get_param("~ctrl_freq")
        rolename = rospy.get_param("~rolename")
        self.steer_prev = 0.0

        # state information
        self.stateReady = False
        self.state = odom_state()

        # path tracking information
        self.pathReady = False
        self.path = np.zeros(shape=(self.traj_steps, 2))  # absolute position
        self.path_tree = KDTree(self.path)
        self.vel_path = np.zeros(shape=(self.traj_steps, 2))

        self.steer_cache = None

        # # PID controller parameter
        self.pid_str_prop = rospy.get_param("~str_prop")
        self.pid_str_deriv = rospy.get_param("~str_deriv")

        # subscribers, publishers
        rospy.Subscriber("/carla/" + rolename + "/odometry", Odometry,
                         self.odom_cb)
        rospy.Subscriber("MSLcar0/command/trajectory", MultiDOFJointTrajectory,
                         self.desired_waypoints_cb)
        self.command_pub = rospy.Publisher("/carla/" + rolename +
                                           "/ackermann_cmd",
                                           AckermannDrive,
                                           queue_size=10)
        # self.vehicle_cmd_pub = rospy.Publisher(
        #     "/carla/" + rolename + "/vehicle_control_cmd",
        #     CarlaEgoVehicleControl,
        #     queue_size=10
        # )
        self.tracking_pt_viz_pub = rospy.Publisher("tracking_point_mkr",
                                                   Marker,
                                                   queue_size=10)
        self.ctrl_timer = rospy.Timer(rospy.Duration(1.0 / ctrl_freq),
                                      self.timer_cb)

    def desired_waypoints_cb(self, msg):

        for i, pt in enumerate(msg.points):
            self.path[i, 0] = pt.transforms[0].translation.x
            self.path[i, 1] = pt.transforms[0].translation.y
            self.vel_path[i, 0] = pt.velocities[0].linear.x
            self.vel_path[i, 1] = pt.velocities[0].linear.y

        self.path_tree = KDTree(self.path)
        if not self.pathReady:
            self.pathReady = True

    def odom_cb(self, msg):
        self.state.update_vehicle_state(msg)
        if not self.stateReady:
            self.stateReady = True

    def timer_cb(self, event):
        cmd_msg = AckermannDrive()
        current_time = rospy.get_time()
        # delta_t = current_time - self.time
        delta_t = self.time_step
        # delta_t = 0.05
        self.time = current_time
        jerk = 2.0

        # cmd_msg.header.stamp = rospy.Time.now()
        cmd_msg.steering_angle = 0.0
        cmd_msg.steering_angle_velocity = 0.0
        cmd_msg.speed = 0.0
        cmd_msg.acceleration = 0.0
        cmd_msg.jerk = jerk

        if self.pathReady and self.stateReady:
            pos_x, pos_y = self.state.get_position()

            # pick target w/o collision avoidance, closest point on the traj and one point ahead
            _, idx = self.path_tree.query([pos_x, pos_y])
            # Target point for steering
            if idx < self.traj_steps - 3:
                target_pt = self.path_tree.data[idx + 3, :]
                # str_idx = idx + 2
            else:
                target_pt = self.path_tree.data[-1, :]
                # str_idx = self.vel_path.shape[0] - 1
                print("CONTROLLER: at the end of the desired waypoits!!!")
            # Target point for velocity
            if idx < self.traj_steps:
                target_vel = self.vel_path[idx, :]
                # str_idx = idx + 2
            else:
                target_vel = self.vel_path[-1, :]
                # str_idx = self.vel_path.shape[0] - 1

            target_speed = np.linalg.norm(target_vel)

            speed_diff = target_speed - self.state.get_speed()
            # pos_diff = target_pt - self.state.get_position()
            # acceleration = cmd_msg.acceleration + 2.0*(
            #     pos_diff/self.time_step**2.0 - speed_diff/self.time_step
            # )
            acceleration = abs(speed_diff) / (2.0 * delta_t)
            cmd_msg.acceleration = np.min([1.5, acceleration])
            steer = self.compute_ackermann_steer(target_pt)

            steer_diff = abs(steer - self.steer_prev)
            if steer_diff >= 0.3:
                print("      0.3")
                steer = self.steer_prev
            elif steer_diff >= 0.05:
                print("          0.05")
                acceleration = 0.0
                target_speed = target_speed / 5.0

            self.steer_prev = steer

            if self.state.get_speed() - target_speed > 0.0:
                # cmd_msg.acceleration = acceleration - 5
                acceleration = -100.0
                cmd_msg.speed = target_speed / 5.0
                jerk = 1000.0
            elif target_speed - self.state.get_speed() > 0.2:
                acceleration = np.min([3.0, acceleration])
            else:
                acceleration = 0.0

            cmd_msg.steering_angle = steer
            cmd_msg.speed = target_speed
            cmd_msg.acceleration
            cmd_msg.jerk = jerk

            # Print out some debugging information
            if abs(target_speed - self.max_speed) > 2.0:
                # print("Posit diff:", pos_diff)
                print("Speed diff:", speed_diff)
                print("Current speed:", self.state.get_speed())
                print("Desired speed:", target_speed)
                print("Desired accel:", acceleration)
                print("Desired jerk:", jerk)
                print("Speed (sent):", cmd_msg.speed)
                print("Accel (sent):", cmd_msg.acceleration)
                print("Jerk  (sent):", cmd_msg.jerk)
                print("delta t:", delta_t)

            # for visualization purposes and debuging control node
            mk_msg = Marker()
            mk_msg.header.stamp = rospy.Time.now()
            mk_msg.header.frame_id = 'map'
            mk_msg.pose.position.x = target_pt[0]
            mk_msg.pose.position.y = target_pt[1]
            mk_msg.type = Marker.CUBE
            mk_msg.scale.x = 3
            mk_msg.scale.y = 3
            mk_msg.scale.z = 3
            mk_msg.color.a = 1.0
            mk_msg.color.r = 1
            mk_msg.color.b = 1
            self.tracking_pt_viz_pub.publish(mk_msg)

        # self.vehicle_cmd_pub.publish(vehicle_cmd_msg)
        self.command_pub.publish(cmd_msg)

    def compute_ackermann_steer(self, target_pt):
        pos_x, pos_y, yaw = self.state.get_pose()
        if np.linalg.norm([target_pt[0] - pos_x, target_pt[1] - pos_y]) < 1:
            print("target point too close!!!!!!!!")
            if self.steer_cache:
                return self.steer_cache
            else:
                return 0.0

        egoOri = np.array([np.cos(yaw), np.sin(yaw), 0])
        rel_pos = np.array([target_pt[0] - pos_x, target_pt[1] - pos_y, 0])
        rel_pos_norm = np.linalg.norm(rel_pos)

        rel_pos_unit = rel_pos / np.linalg.norm(rel_pos)
        rot = np.cross(rel_pos_unit, egoOri)
        steer = -rot[2] * self.pid_str_prop

        if self.steer_cache is not None:
            d_steer = (steer - self.steer_cache)
        else:
            d_steer = 0.0

        # print("steer, d_steer:", steer, d_steer)

        steer = steer + d_steer * self.pid_str_deriv

        self.steer_cache = steer
        return steer
Пример #32
0
class Partitioner(object):
    """
    Universe of Discourse partitioner. Split data on several fuzzy sets
    """
    def __init__(self, **kwargs):
        """
        Universe of Discourse partitioner scheme. Split data on several fuzzy sets
        """
        self.name = kwargs.get('name', "")
        """partitioner name"""
        self.partitions = kwargs.get('npart', 10)
        """The number of universe of discourse partitions, i.e., the number of fuzzy sets that will be created"""
        self.sets = {}
        self.membership_function = kwargs.get('func', Membership.trimf)
        """Fuzzy membership function (pyFTS.common.Membership)"""
        self.setnames = kwargs.get('names', None)
        """list of partitions names. If None is given the partitions will be auto named with prefix"""
        self.prefix = kwargs.get('prefix', 'A')
        """prefix of auto generated partition names"""
        self.transformation = kwargs.get('transformation', None)
        """data transformation to be applied on data"""
        self.indexer = kwargs.get('indexer', None)
        self.variable = kwargs.get('variable', None)
        """In a multivariate context, the variable that contains this partitioner"""
        self.type = kwargs.get('type', 'common')
        """The type of fuzzy sets that are generated by this partitioner"""
        self.ordered_sets = None
        """A ordered list of the fuzzy sets names, sorted by their middle point"""
        self.kdtree = None
        """A spatial index to help in fuzzyfication"""

        if kwargs.get('preprocess', True):

            data = kwargs.get('data', [None])

            if self.indexer is not None:
                ndata = self.indexer.get_data(data)
            else:
                ndata = data

            if self.transformation is not None:
                ndata = self.transformation.apply(ndata)
            else:
                ndata = data

            if self.indexer is not None:
                ndata = self.indexer.get_data(ndata)

            _min = np.nanmin(ndata)
            if _min == -np.inf:
                ndata[ndata == -np.inf] = 0
                _min = np.nanmin(ndata)

            self.min = float(_min * 1.1 if _min < 0 else _min * 0.9)

            _max = np.nanmax(ndata)
            self.max = float(_max * 1.1 if _max > 0 else _max * 0.9)

            self.sets = self.build(ndata)

            self.partitions = len(self.sets)

            if self.ordered_sets is None and self.setnames is not None:
                self.ordered_sets = self.setnames[:len(self.sets)]
            else:
                self.ordered_sets = FuzzySet.set_ordered(self.sets)

            del (ndata)

    def extractor(self, x):
        """Extract a single primitive type from an structured instance"""
        return x

    def build(self, data):
        """
        Perform the partitioning of the Universe of Discourse

        :param data:  training data
        :return: 
        """
        pass

    def get_name(self, counter):
        """
        Find the name of the fuzzy set given its counter id.

        :param counter: The number of the fuzzy set
        :return: String
        """
        return self.prefix + str(
            counter) if self.setnames is None else self.setnames[counter]

    def lower_set(self):
        """
        Return the fuzzy set on lower bound of the universe of discourse.

        :return: Fuzzy Set
        """
        return self.sets[self.ordered_sets[0]]

    def upper_set(self):
        """
        Return the fuzzy set on upper bound of the universe of discourse.

        :return: Fuzzy Set
        """
        return self.sets[self.ordered_sets[-1]]

    def build_index(self):
        points = []

        #self.index = {}

        for ct, key in enumerate(self.ordered_sets):
            fset = self.sets[key]
            points.append([fset.lower, fset.centroid, fset.upper])
            #self.index[ct] = fset.name

        import sys
        sys.setrecursionlimit(100000)

        self.kdtree = KDTree(points)

        sys.setrecursionlimit(1000)

    def fuzzyfy(self, data, **kwargs):
        """
        Fuzzyfy the input data according to this partitioner fuzzy sets.

        :param data: input value to be fuzzyfied
        :keyword alpha_cut: the minimal membership value to be considered on fuzzyfication (only for mode='sets')
        :keyword method: the fuzzyfication method (fuzzy: all fuzzy memberships, maximum: only the maximum membership)
        :keyword mode: the fuzzyfication mode (sets: return the fuzzy sets names, vector: return a vector with the membership
        values for all fuzzy sets, both: return a list with tuples (fuzzy set, membership value) )

        :returns a list with the fuzzyfied values, depending on the mode
        """

        if isinstance(data, (tuple, list, np.ndarray)):
            ret = []
            for inst in data:
                mv = self.fuzzyfy(inst, **kwargs)
                ret.append(mv)
            return ret

        alpha_cut = kwargs.get('alpha_cut', 0.)
        mode = kwargs.get('mode', 'sets')
        method = kwargs.get('method', 'fuzzy')

        nearest = self.search(data, type='index')
        mv = np.zeros(self.partitions)

        try:
            for ix in nearest:
                tmp = self[ix].membership(data)
                mv[ix] = tmp if tmp >= alpha_cut else 0.
        except:
            print(ix)

        ix = np.ravel(np.argwhere(mv > 0.))

        if ix.size == 0:
            mv[self.check_bounds(data)] = 1.

        if method == 'fuzzy' and mode == 'vector':
            return mv
        elif method == 'fuzzy' and mode == 'sets':
            try:
                ix = np.ravel(np.argwhere(mv > 0.))
                sets = [
                    self.ordered_sets[i] for i in ix if i < self.partitions
                ]
                return sets
            except Exception as ex:
                return None
        elif method == 'maximum' and mode == 'sets':
            mx = max(mv)
            ix = np.ravel(np.argwhere(mv == mx))
            return self.ordered_sets[ix[0]]
        elif mode == 'both':
            ix = np.ravel(np.argwhere(mv > 0.))
            sets = [(self.ordered_sets[i], mv[i]) for i in ix]
            return sets

    def defuzzyfy(self, values, mode='both'):
        if not isinstance(values, list):
            values = [values]

        num = []
        den = []
        for val in values:
            fset = val[0]
            mv = val[1]
            if mode == 'both':
                num.append(self.sets[fset].centroid * mv)
                den.append(mv)
            elif mode == 'sets':
                num.append(self.sets[fset].centroid)
            elif mode == 'vector':
                num.append(self.sets[self.ordered_sets[fset]].centroid * mv)
                den.append(mv)
            else:
                raise Exception('Unknown deffuzyfication mode')

        if mode in ('both', 'vector'):
            return np.nansum(num) / np.nansum(den)
        else:
            return np.nanmean(num)

    def check_bounds(self, data):
        """
        Check if the input data is outside the known Universe of Discourse and, if it is, round it to the closest
        fuzzy set.

        :param data: input data to be verified
        :return: the index of the closest fuzzy set when data is outside de universe of discourse or None if
        the data is inside the UoD.
        """

        if data < self.min:
            return 0
        elif data > self.max:
            return self.partitions - 1

    def search(self, data, **kwargs):
        """
        Perform a search for the nearest fuzzy sets of the point 'data'. This function were designed to work with several
        overlapped fuzzy sets.

        :param data: the value to search for the nearest fuzzy sets
        :param type: the return type: 'index' for the fuzzy set indexes or 'name' for fuzzy set names.
        :param results: the number of nearest fuzzy sets to return
        :return: a list with the nearest fuzzy sets
        """

        if self.kdtree is None:
            self.build_index()

        type = kwargs.get('type', 'index')
        results = kwargs.get('results', 3)

        _, ix = self.kdtree.query([data, data, data], results)

        if type == 'name':
            return [self.ordered_sets[k] for k in sorted(ix)]
        else:
            return sorted(ix)

    def plot(self, ax, rounding=0):
        """
        Plot the partitioning using the Matplotlib axis ax

        :param ax: Matplotlib axis
        """
        ax.set_title(self.name)
        ax.set_ylim([0, 1.1])
        ax.set_xlim([self.min, self.max])
        ticks = []
        x = []
        for key in self.sets.keys():
            s = self.sets[key]
            if s.type == 'common':
                self.plot_set(ax, s)
            elif s.type == 'composite':
                for ss in s.sets:
                    self.plot_set(ax, ss)
            ticks.append(str(round(s.centroid, rounding)) + '\n' + s.name)
            x.append(s.centroid)
        ax.xaxis.set_ticklabels(ticks)
        ax.xaxis.set_ticks(x)

    def plot_set(self, ax, s):
        """
        Plot an isolate fuzzy set on Matplotlib axis

        :param ax: Matplotlib axis
        :param s: Fuzzy Set
        """
        if s.mf == Membership.trimf:
            ax.plot([s.parameters[0], s.parameters[1], s.parameters[2]],
                    [0, s.alpha, 0])
        elif s.mf in (Membership.gaussmf, Membership.bellmf, Membership.sigmf):
            tmpx = np.linspace(s.lower, s.upper, 100)
            tmpy = [s.membership(kk) for kk in tmpx]
            ax.plot(tmpx, tmpy)
        elif s.mf == Membership.trapmf:
            ax.plot(s.parameters, [0, s.alpha, s.alpha, 0])
        elif s.mf == Membership.singleton:
            ax.plot([s.parameters[0], s.parameters[0]], [0, s.alpha])

    def __str__(self):
        """
        Return a string representation of the partitioner, the list of fuzzy sets and their parameters

        :return:
        """
        tmp = self.name + ":\n"
        for key in self.sets.keys():
            tmp += str(self.sets[key]) + "\n"
        return tmp

    def __len__(self):
        """
        Return the number of partitions

        :return: number of partitions
        """
        return self.partitions

    def __getitem__(self, item):
        """
        Return a fuzzy set by its order or its name.

        :param item: If item is an integer then it represents the fuzzy set index (order), if it was a string then
        it represents the fuzzy set name.
        :return: the fuzzy set
        """
        if isinstance(item,
                      (int, np.int, np.int8, np.int16, np.int32, np.int64)):
            if item < 0 or item >= self.partitions:
                raise ValueError(
                    "The fuzzy set index must be between 0 and {}.".format(
                        self.partitions))
            return self.sets[self.ordered_sets[item]]
        elif isinstance(item, str):
            if item not in self.sets:
                raise ValueError(
                    "The fuzzy set with name {} does not exist.".format(item))
            return self.sets[item]
        else:
            raise ValueError(
                "The parameter 'item' must be an integer or a string and the value informed was {} of type {}!"
                .format(item, type(item)))

    def __iter__(self):
        """
        Iterate over the fuzzy sets, ordered by its midpoints.

        :return: An iterator over the fuzzy sets.
        """
        for key in self.ordered_sets:
            yield self.sets[key]
Пример #33
0
def estimate_overall_ref_model(colmap_folder, min_track_len, min_tri_angle,
                               max_tri_angle, image_path):

    print 'Loading COLMAP data'
    scene_manager = SceneManager(colmap_folder)
    scene_manager.load_cameras()
    scene_manager.load_images()

    images = [
        'frame0859.jpg', 'frame0867.jpg', 'frame0875.jpg', 'frame0883.jpg',
        'frame0891.jpg'
    ]

    _L = np.array([])
    _r = np.array([])
    _ndotl = np.array([])

    for image_name in images:

        image_id = scene_manager.get_image_id_from_name(image_name)
        image = scene_manager.images[image_id]
        camera = scene_manager.get_camera(image.camera_id)

        # image pose
        R = util.quaternion_to_rotation_matrix(image.qvec)

        print 'Loading 3D points'
        scene_manager.load_points3D()
        scene_manager.filter_points3D(min_track_len,
                                      min_tri_angle=min_tri_angle,
                                      max_tri_angle=max_tri_angle,
                                      image_list=set([image_id]))

        points3D, points2D = scene_manager.get_points3D(image_id)
        points3D = points3D.dot(R.T) + image.tvec[np.newaxis, :]

        # need to remove redundant points
        # http://stackoverflow.com/questions/16970982/find-unique-rows-in-numpy-array
        points2D_view = np.ascontiguousarray(points2D).view(
            np.dtype((np.void, points2D.dtype.itemsize * points2D.shape[1])))
        _, idx = np.unique(points2D_view, return_index=True)

        points2D, points3D = points2D[idx], points3D[idx]

        # further rule out any points too close to the image border (for bilinear
        # interpolation)
        mask = ((points2D[:, 0] < camera.width - 1) &
                (points2D[:, 1] < camera.height - 1))
        points2D, points3D = points2D[mask], points3D[mask]

        points2D_image = points2D.copy()  # coordinates in image space
        points2D = np.hstack((points2D, np.ones((points2D.shape[0], 1))))
        points2D = points2D.dot(np.linalg.inv(
            camera.get_camera_matrix()).T)[:, :2]

        print len(points3D), 'total points'

        # load image
        #image_file = scene_manager.image_path + image.name
        image_file = image_path + image.name
        im_rgb = skimage.img_as_float(
            skimage.io.imread(image_file))  # color image
        L = skimage.color.rgb2lab(im_rgb)[:, :, 0] * 0.01
        L = np.maximum(
            L, 1e-6)  # unfortunately, can't have black pixels, since we
        # divide by L
        # initial values on unit sphere
        x, y = camera.get_image_grid()

        print 'Computing nearest neighbors'
        kdtree = KDTree(points2D)
        weights, nn_idxs = kdtree.query(np.c_[x.ravel(), y.ravel()],
                                        KD_TREE_KNN)
        weights = weights.reshape(camera.height, camera.width, KD_TREE_KNN)
        nn_idxs = nn_idxs.reshape(camera.height, camera.width, KD_TREE_KNN)

        # turn distances into weights for the nearest neighbors
        np.exp(-weights, weights)  # in-place

        # create initial surface on unit sphere
        S0 = np.dstack((x, y, np.ones_like(x)))
        S0 /= np.linalg.norm(S0, axis=-1)[:, :, np.newaxis]

        r_fixed = np.linalg.norm(points3D, axis=-1)  # fixed 3D depths

        S = S0.copy()
        z = S0[:, :, 2]
        S, r_ratios = nearest_neighbor_warp(weights, nn_idxs, points2D_image,
                                            r_fixed,
                                            util.generate_surface(camera, z))
        z_est = np.maximum(S[:, :, 2], 1e-6)
        S = util.generate_surface(camera, z_est)
        r = np.linalg.norm(S, axis=-1)
        ndotl = util.calculate_ndotl(camera, S)
        if _L.size == 0:
            _L = L.ravel()
            _r = r.ravel()
            _ndotl = ndotl.ravel()
        else:
            _L = np.append(_L, L.ravel())
            _r = np.append(_r, r.ravel())
            _ndotl = np.append(_ndotl, ndotl.ravel())

    #
    falloff, model_params, residual = fit_reflectance_model(
        sfs.POWER_MODEL, _L, _r, _ndotl, False, 5)
    import pdb
    pdb.set_trace()
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.camera_image = None
        self.lights = []
        self.waypoints_2d = None

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        sub4 = rospy.Subscriber('/image_color', Image, self.image_cb)

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)
        self.sim = self.config["sim"]

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.bridge = CvBridge()
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        self.waypoint_tree = None

        self.debug = self.config["debug"]
        if self.debug:
            self.debug_file = open(os.path.join(os.getcwd(), 'debug.csv'), 'w')
            self.debug_file.write(
                'Image,Prediction,Ground Truth,Prediction Time (sec)\n')

        self.light_classifier = TLClassifier(self.sim)
        rospy.spin()
        #self.loop()

    def loop(self):
        rate = rospy.Rate(ROS_RATE)
        while not rospy.is_shutdown():
            #self.image_cb_dummy()############################################################
            rate.sleep()

    def pose_cb(self, msg):
        self.pose = msg

    def waypoints_cb(self, waypoints):
        self.waypoints = waypoints
        if not self.waypoints_2d:
            self.waypoints_2d = [[
                waypoint.pose.pose.position.x, waypoint.pose.pose.position.y
            ] for waypoint in waypoints.waypoints]
            self.waypoint_tree = KDTree(self.waypoints_2d)

    def traffic_cb(self, msg):
        self.lights = msg.lights

#rospy.logwarn('traffic_cb called!!!')

    def image_cb_dummy(
        self
    ):  #####################################################################
        self.image_cb(None)

    def image_cb(self, msg):
        """Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.has_image = True
        self.camera_image = msg
        #rospy.logwarn('Image dims --- Height: %s, Width: %s', msg.height, msg.width)
        #image = self.bridge.imgmsg_to_cv2(msg, 'rgb8')
        #cv2.imwrite('/home/student/CarND-Capstone-master/imgs/simulator/' + str(rospy.Time.now()) + '.png', image)
        light_wp, state = self.process_traffic_lights()
        '''
        Publish upcoming red lights at camera frequency.
        Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
        of times till we start using it. Otherwise the previous stable state is
        used.
        '''
        if self.state != state:
            self.state_count = 0
            self.state = state
        elif self.state_count >= STATE_COUNT_THRESHOLD:
            self.last_state = self.state
            light_wp = light_wp if state == TrafficLight.RED else -1
            self.last_wp = light_wp
            self.upcoming_red_light_pub.publish(Int32(light_wp))
        else:
            self.upcoming_red_light_pub.publish(Int32(self.last_wp))
        self.state_count += 1

    def get_closest_waypoint(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        #TODO implement
        closest_idx = self.waypoint_tree.query([x, y], 1)[1]
        return closest_idx

    def get_light_state(self, light):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        if (not self.has_image):
            self.prev_light_loc = None
            return False

        cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "rgb8")

        #Get classification
        if self.debug:
            start_time = time.time()
        result = self.light_classifier.get_classification(cv_image)
        if self.debug:
            end_time = time.time()
            elapsed = end_time - start_time
            image_name = str(rospy.Time.now()) + '-' + str(result) + '.png'
            #img_path = os.getcwd() + '/../../../imgs/' + image_name
            #cv2.imwrite(img_path,  cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB))
            self.debug_file.write('{},{},{},{:.2f}\n'.format(
                image_name, result, light.state, elapsed))
        return result

        # ############################################### TESTING ONLY ##################################################################
        #return light.state

    def process_traffic_lights(self):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        closest_light = None
        line_wp_idx = None

        # List of positions that correspond to the line to stop in front of for a given intersection
        stop_line_positions = self.config['stop_line_positions']
        if (self.pose):
            car_wp_idx = self.get_closest_waypoint(self.pose.pose.position.x,
                                                   self.pose.pose.position.y)

            #TODO find the closest visible traffic light (if one exists)
            diff = len(self.waypoints.waypoints)
            for i, light in enumerate(self.lights):
                # Get stop line waypoint index
                line = stop_line_positions[i]
                temp_wp_idx = self.get_closest_waypoint(line[0], line[1])

                # Get closest stop line waypoint index
                d = temp_wp_idx - car_wp_idx
                if d >= 0 and d < diff:
                    diff = d
                    closest_light = light
                    line_wp_idx = temp_wp_idx

        if closest_light:
            state = self.get_light_state(closest_light)
            #rospy.logwarn('Traffic light state: %s', state)
            #rospy.logwarn('Closest traffic light stop line index: %s, state: %s', line_wp_idx, state)
            return line_wp_idx, state

        #self.waypoints = None
        return -1, TrafficLight.UNKNOWN

    def __del__(self):
        if self.debug:
            self.debug_file.close()
Пример #35
0
class face_manager: 
    def __init__(self):
        self.name_to_face_vec_list = {}
        self.name_to_face_img_list = {}
        '''
        for example:
            username1:[face_vec1,face_vec2,face_vec3,....]
        '''
        #self.pca_processed_name_to_face_vec_list = {}
        self.kd_tree = None
        self.vec_to_name_dict = {}
        self.vec_list = None
        self.name_list = None
        self.similar_threshold = SIMILAR_THRES
    def build_kdtree(self):
        #step1 build inversed mapping.
        self.vec_list = []
        self.name_list = []
        for name in self.name_to_face_vec_list:
            for vec in self.name_to_face_vec_list[name]:
                self.vec_to_name_dict[vec] = name
                self.vec_list.append(np.array(vec))
                self.name_list.append(name)
        vec_mat = np.array(self.vec_list)
        self.kd_tree = KDTree(vec_mat)

    def add_name_and_img(self,name,cv_img_bgr_origin):
        from Network.facenet.get_face_vec import get_vec_from_cvimg
        # if self.name_to_face_img_list.has_key(name):
        if name in self.name_to_face_img_list.keys():
            self.name_to_face_img_list[name].append(cv_img_bgr_origin)
            self.name_to_face_vec_list[name].append(get_vec_from_cvimg(cv_img_bgr_origin))
        else:
            self.name_to_face_img_list[name] = [cv_img_bgr_origin]
            self.name_to_face_vec_list[name] = [get_vec_from_cvimg(cv_img_bgr_origin)]

    def query_name_by_img(self,cv_img_bgr_origin):
        if self.kd_tree is None:
            self.build_kdtree()
        from Network.facenet.get_face_vec import get_vec_from_cvimg
        vec = get_vec_from_cvimg(cv_img_bgr_origin)
        similar_vecs = self.kd_tree.query_ball_point(vec,self.similar_threshold)
        similar_vecs = map(lambda x:np.array(x),similar_vecs)
        if len(similar_vecs)>0:
            if len(similar_vecs) == 1:
                return self.name_list[int(similar_vecs[0])]  # one match.return it.
            elif len(similar_vecs)>1:
                min_dist,min_id = self.similar_threshold+1,0  # ensures that any of them smaller than THRES+1.
                for i in range(len(similar_vecs)):
                    dist = np.sqrt(np.sum(np.square(np.array(similar_vecs[i]) - np.array(vec))))
                    if dist <min_dist:
                        min_dist = dist
                        min_id = i
                print ('similar_vecs',similar_vecs)
                return self.name_list[int(similar_vecs[min_id])]
        else:
            return None  # no matching.
    def query_nearest_by_img(self,cv_img_bgr_origin,return_score=False):
        if self.kd_tree is None:
            self.build_kdtree()
        from Network.facenet.get_face_vec import get_vec_from_cvimg
        time0 = time.time()
        vec = get_vec_from_cvimg(cv_img_bgr_origin)
        time1 = time.time()
        dist,nearest_vec_id = self.kd_tree.query([vec])
        time2 = time.time()
        print ('vec calc time cost:',time1-time0)
        print ('kdtree query time cost:',time2-time1)
        dist,nearest_vec_id = dist[0],nearest_vec_id[0]
        print ('dist square:',dist*dist)
        if dist*dist < self.similar_threshold:
          if return_score:
            return self.name_list[nearest_vec_id],dist*dist
          else:
            return self.name_list[nearest_vec_id]
        else:
          if return_score:
            return None,None
          else:
            return None
Пример #36
0
def method12(dists, N):
    tree = KDTree(dists, leafsize=dists.shape[0] + 1)
    distances, ndx = tree.query(0, k=N)
Пример #37
0
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.waypoints_2d = None
        self.waypoint_tree = None

        self.stateCount = 0

        self.image_count = 0

        self.state = TrafficLight.RED
        self.last_state = TrafficLight.UNKNOWN

        self.last_wp = -1
        self.state_count = 0

        self.camera_image = None
        self.lights = []

        self.saveImgs = True
        if self.saveImgs:
            if not (os.path.exists("./saveImgs")):
                os.mkdir("./saveImgs")
        self.saveCount = 0

        config_string = rospy.get_param("/traffic_light_config")
        #config_string2 = config_string
        #rospy.loginfo("+++++++++++++++Using simulator+++++++++++++++%s",config_string)
        self.config = yaml.load(config_string)

        #rospy.loginfo("+++++++++++++++Using simulator+++++++++++++++%s",self.config)
        isSite = bool(rospy.get_param("~is_siteP", True))
        if isSite:
            self.usingSimulator = False
            rospy.loginfo("+++++++++++++++Using simulator+++++++++++++++")
        else:
            self.usingSimulator = True
            rospy.loginfo("+++++++++++++++Using simulator+++++++++++++++")

        self.usingSystemLightState = 0
        #self.usingSimulator = 0 if self.config['is_site'] else 1
        #self.usingSimulator = bool(rospy.get_param("~is_siteP", False))

        self.bridge = CvBridge()
        self.light_classifier = TLClassifier(self.usingSimulator)
        self.listener = tf.TransformListener()

        self.stop_closest_waypoint = []

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        #sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)
        sub6 = rospy.Subscriber('/image_color',
                                Image,
                                self.image_cb,
                                queue_size=1,
                                buff_size=2 * 52428800)

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.lastTime = rospy.get_time()

        rate = rospy.Rate(200)
        while not rospy.is_shutdown():
            if self.pose and self.waypoints and self.lights:
                #get closest waypoint
                #closest_waypoint_idx = self.get_closest_waypoint_idx()
                #rospy.loginfo('closest_waypoint_index:%s', closest_waypoint_idx)
                #self.publish_waypoints(closest_waypoint_idx)

                self.InitializeImage = True
                light_wp, state = self.process_traffic_lights()
                self.find_traffic_lights(light_wp, state)
                rospy.loginfo(
                    "=============finish initialize image===========")
                self.InitializeImage = False
                break
            rate.sleep()

        rospy.spin()

    def pose_cb(self, msg):
        self.pose = msg

    def waypoints_cb(self, waypoints):
        self.waypoints = waypoints
        if not self.waypoints_2d:
            self.waypoints_2d = [[
                waypoint.pose.pose.position.x, waypoint.pose.pose.position.y
            ] for waypoint in waypoints.waypoints]
            self.waypoint_tree = KDTree(self.waypoints_2d)

    def traffic_cb(self, msg):
        self.lights = msg.lights

    def image_cb(self, msg):
        """Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.has_image = True

        ThisTime = rospy.get_time()
        #rospy.loginfo("Time elapsed:%s",ThisTime - self.lastTime)
        self.lastTime = ThisTime
        self.image_count = self.image_count + 1

        THRESHOLD_SAMPLE = 1
        #if self.usingSimulator:
        #THRESHOLD_SAMPLE = 1

        if (self.image_count >= THRESHOLD_SAMPLE):
            self.lastTime = rospy.get_time()
            self.image_count = 0
            self.has_image = True
            self.camera_image = msg
            light_wp, state = self.process_traffic_lights()
            if (state != 0) and self.saveImgs:
                iimage = self.bridge.imgmsg_to_cv2(self.camera_image, "rgb8")
                h, w, _ = iimage.shape
                #rospy.loginfo("image width:%s height:%s state:%s",w,h,state)
                if self.usingSimulator:
                    iimage = iimage[0:int(0.7 * h), 0:w]
                else:
                    iimage = iimage[0:int(0.7 * h), 50:int(w - 50)]
                self.saveImags(iimage, state)
            ThisTime = rospy.get_time()
            #rospy.loginfo("Time elapsed:%s, light state:%s",ThisTime - self.lastTime,state)

            self.find_traffic_lights(light_wp, state)

    def saveImags(self, image, state):
        dictTL = {0: "R", 1: "Y", 2: "G", 4: "U"}
        takeImage = image
        if not self.usingSimulator:
            lsImageName = "./saveImgs/image0{0:0>5}.jpg".format(self.saveCount)
            #rospy.loginfo("save image:%s",lsImageName)
            cv2.imwrite(lsImageName, takeImage)
        else:
            lsImageName = "./saveImgs/{0}_image6{1:0>5}.jpg".format(
                dictTL[state], self.saveCount)
            rospy.loginfo("save image:%s", lsImageName)
            cv2.imwrite(lsImageName, takeImage)
        self.saveCount += 1

    def find_traffic_lights(self, light_wp, state):
        '''
        Publish upcoming red lights at camera frequency.
        Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
        of times till we start using it. Otherwise the previous stable state is
        used.
        '''
        if state == TrafficLight.YELLOW:
            state = TrafficLight.RED

        if self.InitializeImage:
            self.last_wp = light_wp
            self.upcoming_red_light_pub.publish(Int32(light_wp))
            return

        if self.state != state:
            self.state_count = 0
            self.state = state
        elif self.state_count >= STATE_COUNT_THRESHOLD:
            self.last_state = self.state

            light_wp = light_wp if state == TrafficLight.RED else -1
            self.last_wp = light_wp
            rospy.loginfo("---light changed,wp is:%s state:%s s state:%s",
                          light_wp, state, self.state)
            self.upcoming_red_light_pub.publish(Int32(light_wp))

        else:
            rospy.loginfo("---light remained,wp is:%s state:%s s state:%s",
                          self.last_wp, state, self.state)
            self.upcoming_red_light_pub.publish(Int32(self.last_wp))

        self.state_count += 1

    def get_closest_waypoint(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        #TODO implement
        closest_idx = self.waypoint_tree.query([x, y], 1)[1]

        # check if the closest is ahed or behind the vehicle
        closest_coord = self.waypoints_2d[closest_idx]
        prev_coord = self.waypoints_2d[closest_idx - 1]

        closest_v = np.array(closest_coord)
        prev_v = np.array(prev_coord)
        pos_v = np.array([x, y])

        val = np.dot(closest_v - prev_v, pos_v - closest_v)

        if val > 0:
            closest_idx = (closest_idx + 1) % len(self.waypoints_2d)

        return closest_idx

    def get_light_state(self, light):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        if self.usingSystemLightState > 0:
            if (self.stateCount > 2):
                #rospy.loginfo("light state:{0}".format(light.state))
                self.stateCount = 0

            self.stateCount = self.stateCount + 1
            return light.state

        cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "rgb8")
        #cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)
        #narrow down seaching area of the image

        h, w, _ = cv_image.shape
        if self.usingSimulator:
            cv_image = cv_image[0:int(0.7 * h), 0:w]
        else:
            cv_image = cv_image[0:int(0.7 * h), 0:w]

        #Get classification
        return self.light_classifier.get_classification(cv_image)

    def process_traffic_lights(self):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        light = None
        light_wp = None
        # List of positions that correspond to the line to stop in front of for a given intersection
        stop_line_positions = self.config['stop_line_positions']
        if (self.pose):
            car_position = self.get_closest_waypoint(self.pose.pose.position.x,
                                                     self.pose.pose.position.y)

            #TODO find the closest visible traffic light (if one exists)
            diff = 200

            #for fast get
            if len(self.stop_closest_waypoint) == 0:
                for i, lightP in enumerate(self.lights):
                    line = stop_line_positions[i]
                    self.stop_closest_waypoint.append(
                        self.get_closest_waypoint(line[0], line[1]))

            #rospy.loginfo("len of waypoints:%s  car wp:%s",diff,car_position)
            for i, lightP in enumerate(self.lights):
                tmp_waypoint_idx = self.stop_closest_waypoint[i]
                d = tmp_waypoint_idx - car_position
                if d >= 0 and d < diff:
                    diff = d
                    light = lightP
                    light_wp = tmp_waypoint_idx

        if light:
            rospy.loginfo("car pos:%s closest light idx %s diff:%s",
                          car_position, light_wp, diff)
            if self.InitializeImage:
                #for first image latency
                state = 0
            else:
                state = self.get_light_state(light)

            return light_wp, state

        #self.waypoints = None
        return -1, TrafficLight.UNKNOWN
Пример #38
0
class TLDetector(object):
    def __init__(self):

        rospy.init_node('tl_detector')
        self.pose = None
        self.waypoints = None
        self.waypoints_2d = None
        self.waypoints_tree = None
        self.camera_image = None
        self.lights = []
        '''
        The permanent "(x, y)" coordinates for each traffic light's stop line are provided by the 
        "config" dictionary, which is imported from the "/traffic_light_config" file
        '''
        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)
        '''
        The node should publish the index of the waypoint for 
        nearest upcoming red light's stop line to a single topic is `/traffic_waypoint`
        '''
        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.bridge = CvBridge()
        self.light_classifier = TLClassifier(self.config['is_site'])
        self.listener = tf.TransformListener()

        self.state = 4  # TrafficLight.UNKNOWN
        # rospy.logwarn(">> __init__: self.state= {0}".format(self.state))

        self.last_state = 4
        self.last_wp = -1
        self.state_count = 0

        # `/current_pose` can be used to determine the vehicle's location
        sub1 = rospy.Subscriber('/current_pose', PoseStamped,
                                self.current_pose_cb)
        # `/base_waypoints` provides the complete list of waypoints for the course
        sub2 = rospy.Subscriber('/base_waypoints', Lane,
                                self.base_waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.vehicle_traffic_lights_cb)
        '''
        `/image_color` which provides an image stream from the car's camera. 
        These images are used to determine the color of upcoming traffic lights
        '''
        # sub6 = rospy.Subscriber('/image_color', Image, self.image_color_cb, queue_size=1, buff_size=5760000)
        sub6 = rospy.Subscriber('/image_color', Image, self.image_color_cb)

        rospy.spin()

    def current_pose_cb(self, msg):
        self.pose = msg

    def base_waypoints_cb(self, waypoints):
        self.waypoints = waypoints
        if not self.waypoints_2d:
            self.waypoints_2d = [[
                waypoint.pose.pose.position.x, waypoint.pose.pose.position.y
            ] for waypoint in waypoints.waypoints]
            self.waypoints_tree = KDTree(self.waypoints_2d)

    def vehicle_traffic_lights_cb(self, msg):
        self.lights = msg.lights

    def image_color_cb(self, msg):
        """
        Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.has_image = True
        self.camera_image = msg
        light_wp, state = self.process_traffic_lights()
        # rospy.logwarn("state: {0}".format(state))
        # rospy.logwarn("self.state: {0}".format(self.state))
        '''
        Publish upcoming red lights at camera frequency.
        Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
        of times till we start using it. Otherwise the previous stable state is
        used.
        '''

        if self.state != state:
            self.state_count = 0
            self.state = state
        elif self.state_count >= STATE_COUNT_THRESHOLD:
            self.last_state = self.state

            if state == TrafficLight.RED:
                light_wp = light_wp
            elif state == TrafficLight.YELLOW:
                light_wp = -light_wp
            elif state == TrafficLight.GREEN:
                light_wp = -2
            else:
                light_wp = -4

            self.last_wp = light_wp
            self.upcoming_red_light_pub.publish(Int32(light_wp))
        else:
            self.upcoming_red_light_pub.publish(Int32(self.last_wp))
        self.state_count += 1

    def get_closest_waypoint(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            x: x position to match a waypoint's "x" to
            y: y position to match a waypoint's "y" to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """

        closest_idx = self.waypoints_tree.query([x, y], 1)[1]
        return closest_idx

    def get_light_state(self, light):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        # For testing just return light state
        # return light.state

        if (not self.has_image):
            self.prev_light_loc = None
            return False

        cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "bgr8")

        #Get classification
        return self.light_classifier.get_classification(cv_image)

    def process_traffic_lights(self):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """

        closest_light = None
        line_wp_idx = None

        # List of positions that correspond to the line to stop in front of for a given intersection
        stop_line_positions = self.config['stop_line_positions']
        if (self.pose):
            car_position_idx = self.get_closest_waypoint(
                self.pose.pose.position.x, self.pose.pose.position.y)

            #TODO find the closest visible traffic light (if one exists)
            diff = len(self.waypoints.waypoints)
            for i, light in enumerate(self.lights):
                # Get stop line waypoint index
                line = stop_line_positions[i]
                temp_wp_idx = self.get_closest_waypoint(line[0], line[1])
                # Find closest stop line waypoint index
                d = temp_wp_idx - car_position_idx
                if d >= 0 and d < diff:
                    diff = d
                    closest_light = light
                    line_wp_idx = temp_wp_idx

        # rospy.logwarn("process_traffic_lights | self.pose: {0}".format(self.pose))
        if closest_light:
            state = self.get_light_state(closest_light)
            # rospy.logwarn("process_traffic_lights | {0}, {1}".format(line_wp_idx, state))
            return line_wp_idx, state

        # self.waypoints = None

        # rospy.logwarn("process_traffic_lights | {0}, {1}".format(-1, TrafficLight.UNKNOWN))
        return -1, TrafficLight.UNKNOWN
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.waypoints_2d = None
        self.waypoint_tree = None
        self.camera_image = None
        self.lights = []

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.bridge = CvBridge()
        self.light_classifier = TLClassifier()
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        rospy.spin()

    def pose_cb(self, msg):
        self.pose = msg

    def waypoints_cb(self, msg):
        self.waypoints = msg.waypoints
        if not self.waypoints_2d:
            self.waypoints_2d = [[
                waypoint.pose.pose.position.x, waypoint.pose.pose.position.y
            ] for waypoint in self.waypoints]
            self.waypoint_tree = KDTree(self.waypoints_2d)

    def traffic_cb(self, msg):
        self.lights = msg.lights

    def image_cb(self, msg):
        """Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.has_image = True
        self.camera_image = msg
        light_wp, state = self.process_traffic_lights()
        '''
        Publish upcoming red lights at camera frequency.
        Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
        of times till we start using it. Otherwise the previous stable state is
        used.
        '''
        if self.state != state:
            self.state_count = 0
            self.state = state
        elif self.state_count >= STATE_COUNT_THRESHOLD:
            self.last_state = self.state
            light_wp = light_wp if state == TrafficLight.RED else -1
            self.last_wp = light_wp
            self.upcoming_red_light_pub.publish(Int32(light_wp))
        else:
            self.upcoming_red_light_pub.publish(Int32(self.last_wp))
        self.state_count += 1

    def get_closest_waypoint(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        current_postion_coord = [x, y]
        closest_wp_idx = self.waypoint_tree.query(current_postion_coord, 1)[1]
        return closest_wp_idx

    def get_light_state(self, light):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        #return light.state
        if (not self.has_image):
            self.prev_light_loc = None
            return False

        cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "bgr8")

        #Get classification
        return self.light_classifier.get_classification(cv_image)

    def process_traffic_lights(self):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        light = None
        closest_light_wp_idx = None
        line_wp_index = None

        # List of positions that correspond to the line to stop in front of for a given intersection
        stop_line_positions = self.config['stop_line_positions']
        if (self.pose):
            car_position_idx = self.get_closest_waypoint(
                self.pose.pose.position.x, self.pose.pose.position.y)

        #TODO find the closest visible traffic light (if one exists)
        diff = len(self.waypoints)
        for i, light in enumerate(self.lights):
            line = stop_line_positions[i]
            temp_wp_idx = self.get_closest_waypoint(line[0], line[1])

            d = temp_wp_idx - car_position_idx
            if d >= 0 and d < diff:
                diff = d
                closest_light_wp_idx = light
                line_wp_index = temp_wp_idx

        if closest_light_wp_idx:
            state = self.get_light_state(closest_light_wp_idx)
            return line_wp_index, state
        return -1, TrafficLight.UNKNOWN
Пример #40
0
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.camera_image = None
        self.lights = []
        self.waypoints_2d = None
        self.waypoints_tree = None

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)

        self.detector_config = {}
        self.detector_config["model_anchor"] = rospy.get_param("model_anchor")
        self.detector_config["model_classes"] = rospy.get_param(
            "model_classes")
        self.detector_config["model_path"] = rospy.get_param("model_path")

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.bridge = CvBridge()
        self.light_classifier = TLClassifier(self.config['is_site'],
                                             self.detector_config)
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0
        self.prev_light_loc = None
        self.has_image = False

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)

        if self.config['is_site']:  # subscribe to raw image
            sub6 = rospy.Subscriber('/image_raw',
                                    Image,
                                    self.image_cb,
                                    queue_size=1,
                                    tcp_nodelay=True)
        else:
            sub6 = rospy.Subscriber('/image_color',
                                    Image,
                                    self.image_cb,
                                    queue_size=1,
                                    tcp_nodelay=True)

        rospy.spin()

    def pose_cb(self, msg):
        self.pose = msg

    def waypoints_cb(self, waypoints):
        self.waypoints = waypoints

        if not self.waypoints_2d:
            self.waypoints_2d = [[
                waypoint.pose.pose.position.x, waypoint.pose.pose.position.y
            ] for waypoint in waypoints.waypoints]
            self.waypoints_tree = KDTree(self.waypoints_2d)

    def traffic_cb(self, msg):
        self.lights = msg.lights

    def image_cb(self, msg):
        """Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.has_image = True
        self.camera_image = msg
        light_wp, state = self.process_traffic_lights()
        '''
        Publish upcoming red lights at camera frequency.
        Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
        of times till we start using it. Otherwise the previous stable state is
        used.
        '''
        if self.state != state:
            self.state_count = 0
            self.state = state
        elif self.state_count >= STATE_COUNT_THRESHOLD:
            self.last_state = self.state
            light_wp = light_wp if state == TrafficLight.RED else -1
            self.last_wp = light_wp
            self.upcoming_red_light_pub.publish(Int32(light_wp))
        else:
            self.upcoming_red_light_pub.publish(Int32(self.last_wp))
        self.state_count += 1

        self.has_image = False

    '''
    def get_closest_waypoint(self, pose):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        # TODO implement
        closest_idx = self.waypoints_tree.query([pose.position.x, pose.position.y], 1)[1]
        return closest_idx
    '''

    def get_closest_waypoint(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            x (float): position to match a waypoint to
            y (float): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        # TODO implement
        closest_idx = self.waypoints_tree.query([x, y], 1)[1]
        return closest_idx

    def get_light_state(self, light):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """

        #return light.state
        if not self.has_image:
            self.prev_light_loc = None
            return TrafficLight.UNKNOWN

        if self.config['is_site']:
            cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "rgb8")
        else:
            cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "rgb8")

        # Get classification
        result = self.light_classifier.get_classification(cv_image)
        return result

    def process_traffic_lights(self):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        closest_light = None
        line_wp_idx = None

        # List of positions that correspond to the line to stop in front of for a given intersection
        stop_line_positions = self.config['stop_line_positions']

        light_state_to_str = {0: "RED", 2: "GREEN", 1: "YELLOW", 4: "UNKNOWN"}

        # TODO: just for testing, remove it
        #if self.config['is_site']:
        #    state = self.get_light_state(closest_light)
        #    rospy.logwarn("Nearest traffic light state: {0}".format(light_state_to_str[int(state)]))
        # TODO: end of testing

        if self.pose and self.waypoints and self.waypoints_tree:
            car_wp_idx = self.get_closest_waypoint(self.pose.pose.position.x,
                                                   self.pose.pose.position.y)

            # TODO find the closest visible traffic light (if one exists)
            diff = len(self.waypoints.waypoints)

            for i, light in enumerate(self.lights):
                # Get stop line waypoint index
                line = stop_line_positions[i]
                temp_wp_idx = self.get_closest_waypoint(line[0], line[1])
                # Find closest stop line waypoint index
                d = temp_wp_idx - car_wp_idx
                if 0 <= d < diff:
                    diff = d
                    closest_light = light
                    line_wp_idx = temp_wp_idx

        if closest_light:
            state = self.get_light_state(closest_light)
            rospy.logwarn("Nearest traffic light state: {0}".format(
                light_state_to_str[state]))
            return line_wp_idx, state

        #self.waypoints = None
        rospy.logwarn("Nearest traffic light state: UNKNOWN")
        return -1, TrafficLight.UNKNOWN
Пример #41
0
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.camera_image = None
        self.lights = []
        self.waypoints_2d = None
        self.waypoints_tree = None

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''/vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)
        self.is_sim = not self.config["is_site"]
        self.is_sim = True
        print("Is Sim %d" % self.is_sim)

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.bridge = CvBridge()

        self.light_classifier = TLClassifier(self.is_sim)
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        # rospy.spin()
        self.ros_spin()

    def ros_spin(self):
        rate = rospy.Rate(100)
        while not rospy.is_shutdown():
            '''Publish upcoming red lights at camera frequency.
            Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
            of times till we start using it. Otherwise the previous stable state is
            used.
            '''
            if self.pose is not None and self.waypoints is not None and self.camera_image is not None:
                light_wp, state = self.process_traffic_lights()
                #                 print("Light waypoint Index: ",light_wp, "Traffic Light: ", TrafficLight.RED, state)
                if self.state != state:
                    self.state_count = 0
                    self.state = state
                elif self.state_count >= STATE_COUNT_THRESHOLD:
                    self.last_state = self.state
                    light_wp = light_wp if state == TrafficLight.RED else -1
                    self.last_wp = light_wp
                    self.upcoming_red_light_pub.publish(Int32(light_wp))
                # print(light_wp,self.state_count)
                else:
                    self.upcoming_red_light_pub.publish(Int32(self.last_wp))
                self.state_count += 1
            rate.sleep()

    def pose_cb(self, msg):
        self.pose = msg

    # Extracts the x and y coordinates of a waypoint
    def waypoint_xy(self, waypoint):
        position = waypoint.pose.pose.position
        return [position.x, position.y]

    def waypoints_cb(self, waypoints):
        self.waypoints = waypoints
        if not self.waypoints_2d:
            self.waypoints_2d = [
                self.waypoint_xy(waypoint) for waypoint in waypoints.waypoints
            ]
            self.waypoints_tree = KDTree(self.waypoints_2d)

    def traffic_cb(self, msg):
        self.lights = msg.lights

    def image_cb(self, msg):
        """Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.has_image = True
        self.camera_image = msg
        print('get camera image', rospy.Time.now())

    # Commented out to test for lag
    # light_wp, state = self.process_traffic_lights()

    def get_closest_waypoint(self, pose_x, pose_y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        # Use KDTree to search through waypoints
        closest_idx = self.waypoints_tree.query([pose_x, pose_y], 1)[1]
        return closest_idx

    def get_light_state(self, light):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        if not self.has_image:
            self.prev_light_loc = None
            return False

        cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "bgr8")

        # Get classification
        classification_result = self.light_classifier.get_classification(
            cv_image)
        rospy.loginfo("ground truth: %s, classifier result: %s", light.state,
                      classification_result)
        return classification_result

    # return light.state

    def process_traffic_lights(self):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        closest_light = None
        light_wp_idx = None
        # List of positions that correspond to the line to stop in front of for a given intersection
        stop_line_positions = self.config['stop_line_positions']
        if (self.pose):
            car_wp_idx = self.get_closest_waypoint(self.pose.pose.position.x,
                                                   self.pose.pose.position.y)
            # print(car_wp_idx)
            # TODO find the closest visible traffic light (if one exists)
            diff = len(self.waypoints.waypoints)
            # print(self.pose.pose.position.x,self.pose.pose.position.y, car_wp_idx)

            for i, light in enumerate(self.lights):
                # Get stop line waypoint index
                line = stop_line_positions[i]
                # print(line[0], line[1])
                temp_wp_idx = self.get_closest_waypoint(line[0], line[1])
                # Find Closest stop line waypoint index
                d = temp_wp_idx - car_wp_idx
                if d >= 0 and d < diff:
                    diff = d
                    closest_light = light
                    light_wp_idx = temp_wp_idx

        if closest_light:
            state = self.get_light_state(closest_light)
            # print("Light waypoint Index: ",light_wp_idx, "Traffic Light: ",TrafficLight.RED, state)
            return light_wp_idx, state

        # self.waypoints = None
        return -1, TrafficLight.UNKNOWN
Пример #42
0
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.base_waypoints = None
        self.waypoints_2d = None
        self.waypoint_tree = None
        self.camera_image = None
        self.lights = []

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.bridge = CvBridge()
        self.light_classifier = TLClassifier()
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        rospy.spin()

    def pose_cb(self, msg):
        self.pose = msg

    def waypoints_cb(self, waypoints):
        if not self.waypoint_tree:
            self.base_waypoints = waypoints
            self.waypoints_2d = [[
                waypoint.pose.pose.position.x, waypoint.pose.pose.position.y
            ] for waypoint in waypoints.waypoints]
            self.waypoint_tree = KDTree(self.waypoints_2d)

    def traffic_cb(self, msg):
        self.lights = msg.lights

    def image_cb(self, msg):
        """Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.has_image = True
        self.camera_image = msg
        light_wp_idx, state = self.process_traffic_lights()
        '''
        Publish upcoming red lights at camera frequency.
        Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
        of times till we start using it. Otherwise the previous stable state is
        used.
        '''
        if self.state != state:
            self.state_count = 0
            self.state = state
        elif self.state_count >= STATE_COUNT_THRESHOLD:
            self.last_state = self.state
            light_wp_idx = light_wp_idx if state == TrafficLight.RED else -1
            self.last_wp = light_wp_idx
            self.upcoming_red_light_pub.publish(Int32(light_wp_idx))
        else:
            self.upcoming_red_light_pub.publish(Int32(self.last_wp))
        self.state_count += 1

    def get_closest_waypoint_idx(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.base_waypoints

        """
        if self.waypoint_tree is None:
            return -1
        else:
            closest_idx = self.waypoint_tree.query([x, y], 1)[1]
            return closest_idx

    def get_light_state(self, light):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        # For testing, just return the light state
        # TODO: remove the next line after testing
        # return light.state

        if (not self.has_image):
            self.prev_light_loc = None
            return False

        cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "bgr8")

        #Get classification
        prob = self.calc_probability_of_traffic_light_in_camera_view(light)
        return self.light_classifier.get_classification(cv_image, prob)

    def calc_probability_of_traffic_light_in_camera_view(self, light):
        light_pos = light.pose.pose.position
        car_pos = self.pose.pose.position

        car_roll, car_pitch, car_yaw = euler_from_quaternion([
            self.pose.pose.orientation.x, self.pose.pose.orientation.y,
            self.pose.pose.orientation.z, self.pose.pose.orientation.w
        ])
        car_w = car_yaw

        car_direction = np.array([np.cos(car_w), np.sin(car_w)])
        light_direction = np.array(
            [light_pos.x - car_pos.x, light_pos.y - car_pos.y])
        cos = np.dot(car_direction, light_direction) / np.linalg.norm(
            car_direction) / np.linalg.norm(light_direction)
        angle = np.arccos(np.clip(cos, -1, 1))

        dx = light_pos.x - car_pos.x
        dy = light_pos.y - car_pos.y
        dist_to_light = np.sqrt(dx**2 + dy**2)

        detect_dist = dist_to_light - 10  # m - if the car is below the traffic light, it will not see the light
        detect_angle = abs(angle)
        prob_dist = (200 - detect_dist) / 200
        prob_angle = (np.radians(45) - detect_angle) / np.radians(45)
        probability = min(max(prob_dist * prob_angle, 0),
                          1) if detect_angle > 0. and detect_dist > 0. else 0.
        # rospy.loginfo("[TLD] probability of traffic light {0} ({1} * {2})".format(probability, prob_dist, prob_angle))
        return probability

    def process_traffic_lights(self):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        closest_light = None
        closest_line_wp_idx = None

        # List of positions that correspond to the line to stop in front of for a given intersection
        stop_line_positions = self.config['stop_line_positions']
        if not None in (self.pose, self.waypoint_tree):
            car_wp_idx = self.get_closest_waypoint_idx(
                self.pose.pose.position.x, self.pose.pose.position.y)

            #TODO find the closest visible traffic light (if one exists)
            min_diff = len(self.base_waypoints.waypoints)
            for i, light in enumerate(self.lights):
                # Get stop line waypoint index
                line = stop_line_positions[i]
                line_wp_idx = self.get_closest_waypoint_idx(line[0], line[1])
                # Find closest stop line waypoint index
                d = line_wp_idx - car_wp_idx
                if d >= 0 and d < min_diff:
                    min_diff = d
                    closest_light = light
                    closest_line_wp_idx = line_wp_idx
        else:
            rospy.logwarn("[TLD] pose or waypoints is missing")

        if closest_light:
            state = self.get_light_state(closest_light)
            return closest_line_wp_idx, state
        else:
            rospy.logwarn("[TLD] no closest light found")

        return -1, TrafficLight.UNKNOWN
Пример #43
0
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.camera_image = None
        self.waypoints_2d = None
        self.waypoint_tree = None
        self.has_image = False
        self.lights = []
        self.last_light_state = TrafficLight.UNKNOWN
        self.immediate_light_state = TrafficLight.UNKNOWN
        self.immediate_light_state_count = 0
        self.light_classifier_throttle_time = rospy.get_rostime()

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)

        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray, self.traffic_cb)
        sub6 = rospy.Subscriber('/image_color', Image, self.image_cb, queue_size=1)

        self.test_pub = rospy.Publisher('/dummy_image', Image, queue_size=1)
        self.light_pub = rospy.Publisher('/tl_filtered', String, queue_size=1)


        config_string = rospy.get_param("/traffic_light_config")
        # config also contains an "is_site" bool variable to check if the car is running in simulator or site
        self.config = yaml.load(config_string)

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint', Int32, queue_size=1)

        self.bridge = CvBridge()
        self.light_classifier = TLClassifier()
        self.light_classifier.find_objects(np.zeros((2,2,3), np.uint8), 0.5, [10.]) # Hit the classifier to get TF through its init
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        self.main_loop()

        #rospy.spin()

    def main_loop(self):
        while not rospy.is_shutdown():
            if(self.has_image):
                light_wp, state = self.process_traffic_lights()

                if self.state != state:
                    self.state_count = 0
                    self.state = state
                elif self.state_count >= STATE_COUNT_THRESHOLD:
                    self.last_state = self.state
                    light_wp = light_wp if state == TrafficLight.RED else -1
                    self.last_wp = light_wp
                    self.upcoming_red_light_pub.publish(Int32(light_wp))
                else:
                    self.upcoming_red_light_pub.publish(Int32(self.last_wp))

                self.state_count += 1

    def pose_cb(self, msg):
        self.pose = msg

    def waypoints_cb(self, waypoints):
        self.waypoints = waypoints
        if not self.waypoints_2d:
            self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]
            self.waypoint_tree = KDTree(self.waypoints_2d)  # store in a tree structure for easier access

    def traffic_cb(self, msg):
        self.lights = msg.lights

    def image_cb(self, msg):
        """Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.has_image = True
        self.camera_image = msg
        self.test_pub.publish(self.camera_image)


    def get_closest_waypoint(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        # find the x, y position in the waypoints in the KDTree
        closest_idx = self.waypoint_tree.query([x, y], 1)[1]
        return closest_idx

    def get_light_state(self, light):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        # just for testing
        #return light.state

        if(not self.has_image):
            self.prev_light_loc = None
            return False

        #Get classification
        # Throttle the incoming requests
        #if self.light_classifier_throttle_time + rospy.Duration(0.05) < rospy.get_rostime():
            #cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "bgr8")
            #self.last_light_state = self.light_classifier.get_classification(cv_image)
            #self.light_classifier_throttle_time = rospy.get_rostime()
            #return self.last_light_state


        cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "bgr8")
        self.immediate_light_state = self.light_classifier.get_classification(cv_image)
        self.light_classifier_throttle_time = rospy.get_rostime()

        # Filter out noise from the classifier
        if (self.immediate_light_state != self.last_light_state):
            self.immediate_light_state_count += 1
            if (self.immediate_light_state_count > 4):
                #if (self.last_light_state == 1 and self.immediate_light_state != 0):
                #    self.last_light_state =  self.last_light_state
                #else:
                self.last_light_state = self.immediate_light_state
                self.immediate_light_state_count = 0

        self.light_pub.publish(str(self.last_light_state))
        return self.last_light_state

        #else:
        #    return self.last_light_state

    def process_traffic_lights(self):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        closest_light = None
        line_wp_idx = None

        # List of positions that correspond to the line to stop in front of for a given intersection
        stop_line_positions = self.config['stop_line_positions']
        if(self.pose):
            x = self.pose.pose.position.x
            y = self.pose.pose.position.y
            car_wp_idx = self.get_closest_waypoint(x, y)

            #TODO find the closest visible traffic light (if one exists)
        diff = len(self.waypoints.waypoints)
            # iterate through the traffic lights to find the closest
        for i, light in enumerate(self.lights):
            # Get stop line waypoint index
            line = stop_line_positions[i]
            temp_wp_idx = self.get_closest_waypoint(line[0], line[1])
            # Find closest stop line waypoint idx
            d = temp_wp_idx - car_wp_idx
            # linear check to find the closest
            if d >= 0 and d < diff:
                diff = d
                closest_light = light
                line_wp_idx = temp_wp_idx

        if closest_light:
            state = self.get_light_state(closest_light)
            return line_wp_idx, state

        return -1, TrafficLight.UNKNOWN
Пример #44
0
    X, Y, THETA = 0, 0.2, 0
    # 速度,车轮间距,间隔时间
    V, L, DT = 0.15, 0.4, 0.1
    # 初始化质点模型
    robot = particle.MODEL(X, Y, THETA,  V, L, DT)

    # 配置增量式PID控制器参数
    pid = incremental_pid.CU(0.8, 0.05, 20)

    # 初始化设备当前位置量
    robot_state = np.zeros(2)

    for _ in range(COUNT):
        # 获取当前位置
        robot_state[0], robot_state[1] = robot.x, robot.y
        _, ind = refer_tree.query(robot_state)

        # 计算当前与路径的横向距离偏差
        _, dy = refer_path[ind] - robot_state

        # 计算下一状态与当前路径x方向的期望距离
        ndx, ndy = refer_path[ind + 1] - refer_path[ind]
        dx = V * np.cos(math.atan2(ndy, ndx)) * robot.dt

        # 计算期望偏转角
        alpha = math.atan2(dy, dx)

        # 计算偏转角偏差
        e = alpha - robot.theta

        # 更新偏差
Пример #45
0
class Simulation(object):
    def __init__(self, path, mag_zeropoint, exposure):
        """
        Parses a Stuff input file. See https://www.astromatic.net/software/stuff
        :param path:
            Path to the .list simulation data
        :param mag_zeropoint:
            Used to compute the flux
        :param exposure:
            Used to compute the flux
        """
        logger.debug(f'Loading stuff list from {path}')
        stars_raw, galaxies_raw = [], []
        with open(path, 'r') as lst:
            for entry in map(str.split, map(str.strip, lst.readlines())):
                if entry[0] == '100':
                    stars_raw.append(entry)
                elif entry[0] == '200':
                    galaxies_raw.append(entry)
                else:
                    raise Exception(f'Unexpected type code {entry[0]}')

        logger.debug(f'Loaded {len(stars_raw)} stars')
        logger.debug(f'Loaded {len(galaxies_raw)} galaxies')

        self.__stars = np.recarray((len(stars_raw), ),
                                   dtype=[('ra', float), ('dec', float),
                                          ('mag', float), ('flux', float)])
        self.__galaxies = np.recarray((len(galaxies_raw), ),
                                      dtype=[('ra', float), ('dec', float),
                                             ('mag', float), ('flux', float),
                                             ('bt_ratio', float),
                                             ('bulge', float),
                                             ('bulge_aspect', float),
                                             ('bulge_angle', float),
                                             ('disk', float),
                                             ('disk_aspect', float),
                                             ('disk_angle', float),
                                             ('redshift', float),
                                             ('type', float)])

        for i, s in enumerate(stars_raw):
            self.__stars[i].ra, self.__stars[i].dec, self.__stars[
                i].mag = float(s[1]), float(s[2]), float(s[3])
        self.__stars.flux = exposure * np.power(
            10, (self.__stars.mag - mag_zeropoint) / -2.5)

        for i, g in enumerate(galaxies_raw):
            self.__galaxies[i].ra, self.__galaxies[i].dec = float(g[1]), float(
                g[2])
            self.__galaxies[i].mag = float(g[3])
            self.__galaxies[i].bt_ratio = float(g[4])
            self.__galaxies[i].bulge, self.__galaxies[
                i].bulge_aspect, self.__galaxies[i].bulge_angle = float(
                    g[5]), float(g[6]), float(g[7])
            self.__galaxies[i].disk, self.__galaxies[
                i].disk_aspect, self.__galaxies[i].disk_angle = float(
                    g[8]), float(g[9]), float(g[10])
            self.__galaxies[i].redshift = float(g[11])
            self.__galaxies[i].type = float(g[12])
        self.__galaxies.flux = exposure * np.power(
            10, (self.__galaxies.mag - mag_zeropoint) / -2.5)

        all_coords = np.column_stack([
            np.append(self.__stars.ra, self.__galaxies.ra),
            np.append(self.__stars.dec, self.__galaxies.dec)
        ])
        self.__kdtree = KDTree(all_coords)
        self.__all_mags = np.append(self.__stars.mag, self.__galaxies.mag)

    @property
    def stars(self):
        return self.__stars

    @property
    def galaxies(self):
        return self.__galaxies

    @property
    def magnitudes(self):
        """
        :return: All the magnitudes on the simulation: first stars, then galaxies.
        """
        return self.__all_mags

    def get_star_count(self):
        """
        :return: Number of stars on the simulation.
        """
        return len(self.__stars)

    def get_galaxy_count(self):
        """
        :return: Number of galaxies on the simulation.
        """
        return len(self.__galaxies)

    def get_closest(self, alpha, delta):
        """
        Find the closest source to the catalog entries. This can be used to cross-relate
        the entries from the Stuff simulation and a catalog
        :param alpha:
            Alpha coordinates
        :param delta:
            Delta coordinates
        :return:
            A numpy array with the columns:
                * distance: distance to closest source
                * catalog: corresponding index on the catalog
                * source: corresponding index on the simulation list
                * magnitude: magnitude of the closest source
                * is_star: True if the closest source is a star
                * is_galaxy: True if the closest source is a galaxy
        """
        assert len(alpha) == len(delta)
        closest = np.recarray((len(alpha), ),
                              dtype=[('distance', float), ('catalog', int),
                                     ('source', int), ('magnitude', float),
                                     ('is_star', bool), ('is_galaxy', bool)])

        closest.catalog[:] = np.arange(0, len(alpha))
        closest.distance[:], closest.source[:] = self.__kdtree.query(
            np.column_stack([alpha, delta]))
        closest.magnitude[:] = self.__all_mags[closest.source]
        closest.is_star[:] = closest.source < self.get_star_count()
        closest.is_galaxy[:] = np.logical_not(closest.is_star[:])
        return closest
Пример #46
0
def fast_knn(data,
             k=3,
             eps=0,
             p=2,
             distance_upper_bound=np.inf,
             leafsize=10,
             idw_fn=idw.shepards,
             init_impute_fn=mean):
    """ Impute using a variant of the nearest neighbours approach

    Basic idea: Impute array with a passed in initial impute fn (mean impute)
    and then use the resulting complete array to construct a KDTree. Use this
    KDTree to compute nearest neighbours.  After finding `k` nearest
    neighbours, take the weighted average of them. Basically, find the nearest
    row in terms of distance

    This approach is much, much faster than the other implementation (fit+transform
    for each subset) which is almost prohibitively expensive.

    Parameters
    ----------
    data: numpy.ndarray
        2D matrix to impute.

    k: int, optional
        Parameter used for method querying the KDTree class object. Number of
        neighbours used in the KNN query. Refer to the docs for
        [`scipy.spatial.KDTree.query`]
        (https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.KDTree.query.html).

    eps: nonnegative float, optional
        Parameter used for method querying the KDTree class object. From the
        SciPy docs: "Return approximate nearest neighbors; the kth returned
        value is guaranteed to be no further than (1+eps) times the distance to
        the real kth nearest neighbor". Refer to the docs for
        [`scipy.spatial.KDTree.query`]
        (https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.KDTree.query.html).

    p : float, 1<=p<=infinity, optional
        Parameter used for method querying the KDTree class object. Straight from the
        SciPy docs: "Which Minkowski p-norm to use. 1 is the
        sum-of-absolute-values Manhattan distance 2 is the usual Euclidean
        distance infinity is the maximum-coordinate-difference distance". Refer to
        the docs for
        [`scipy.spatial.KDTree.query`]
        (https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.KDTree.query.html).

    distance_upper_bound : nonnegative float, optional
        Parameter used for method querying the KDTree class object. Straight
        from the SciPy docs: "Return only neighbors within this distance. This
        is used to prune tree searches, so if you are doing a series of
        nearest-neighbor queries, it may help to supply the distance to the
        nearest neighbor of the most recent point." Refer to the docs for
        [`scipy.spatial.KDTree.query`]
        (https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.KDTree.query.html).

    leafsize: int, optional
        Parameter used for construction of the `KDTree` class object. Straight from
        the SciPy docs: "The number of points at which the algorithm switches
        over to brute-force. Has to be positive". Refer to the docs for
        [`scipy.spatial.KDTree`](https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.spatial.KDTree.html)
        for more information.

    idw_fn: fn, optional
        Function that takes one argument, a list of distances, and returns weighted percentages. You can define a custom
        one or bootstrap from functions defined in `impy.util.inverse_distance_weighting` which can be using
        functools.partial, for example: `functools.partial(impy.util.inverse_distance_weighting.shepards, power=1)`

    init_impute_fn: fn, optional

    Returns
    -------
    numpy.ndarray
        Imputed data.

    Examples
    --------

        >>> data = np.arange(25).reshape((5, 5)).astype(np.float)
        >>> data[0][2] =  np.nan
        >>> data
        array([[ 0.,  1., nan,  3.,  4.],
               [ 5.,  6.,  7.,  8.,  9.],
               [10., 11., 12., 13., 14.],
               [15., 16., 17., 18., 19.],
               [20., 21., 22., 23., 24.]])
        >> fast_knn(data, k=1) # Weighted average (by distance) of nearest 1 neighbour
        array([[ 0.,  1.,  7.,  3.,  4.],
               [ 5.,  6.,  7.,  8.,  9.],
               [10., 11., 12., 13., 14.],
               [15., 16., 17., 18., 19.],
               [20., 21., 22., 23., 24.]])
        >> fast_knn(data, k=2) # Weighted average of nearest 2 neighbours
        array([[ 0.        ,  1.        , 10.08608891,  3.        ,  4.        ],
               [ 5.        ,  6.        ,  7.        ,  8.        ,  9.        ],
               [10.        , 11.        , 12.        , 13.        , 14.        ],
               [15.        , 16.        , 17.        , 18.        , 19.        ],
               [20.        , 21.        , 22.        , 23.        , 24.        ]])
        >> fast_knn(data, k=3)
        array([[ 0.        ,  1.        , 13.40249283,  3.        ,  4.        ],
               [ 5.        ,  6.        ,  7.        ,  8.        ,  9.        ],
               [10.        , 11.        , 12.        , 13.        , 14.        ],
               [15.        , 16.        , 17.        , 18.        , 19.        ],
               [20.        , 21.        , 22.        , 23.        , 24.        ]])
        >> fast_knn(data, k=5) # There are at most only 4 neighbours. Raises error
        ...
        IndexError: index 5 is out of bounds for axis 0 with size 5

    """
    nan_xy = matrix.nan_indices(data)
    data_c = init_impute_fn(data)
    kdtree = KDTree(data_c, leafsize=leafsize)

    for x_i, y_i in nan_xy:
        distances, indices = kdtree.query(
            data_c[x_i],
            k=k + 1,
            eps=eps,
            p=p,
            distance_upper_bound=distance_upper_bound)
        # Will always return itself in the first index. Delete it.
        distances, indices = distances[1:], indices[1:]
        # Add small constant to distances to avoid division by 0
        distances += 1e-3
        weights = idw_fn(distances)
        # Assign missing value the weighted average of `k` nearest neighbours
        data[x_i][y_i] = np.dot(weights, [data_c[ind][y_i] for ind in indices])
    return data
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.base_waypoints = None

        self.waypoints_2d = None
        self.waypoint_tree = None

        #the location of stopline
        self.stopline_2d = [[1148.56, 1184.65], [1559.2, 1158.43],
                            [2122.14, 1526.79], [2175.237, 1795.71],
                            [1493.29, 2947.67], [821.96, 2905.8],
                            [161.76, 2303.82], [351.84, 1574.65]]

        self.stopline_tree = KDTree(self.stopline_2d)

        #set the light's initial state not red
        self.red_light = [-1, -1, -1, -1, -1, -1, -1, -1]

        self.lights = []

        rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                         self.traffic_cb)

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.loop()

    def loop(self):
        rate = rospy.Rate(50)
        while not rospy.is_shutdown():
            if self.pose and self.base_waypoints:
                self.publish_redlight_idx()
            rate.sleep()

    def publish_redlight_idx(self):

        #get the closest traffic light's index in stopline_2d
        closest_light_idx = self.get_closest_redlight_idx()

        #get the state of closest light, if it's red light, get it's index in base_waypoint and publish, if not red light publish -1.
        redlight_state = self.red_light[closest_light_idx]

        if redlight_state == 1 and self.waypoint_tree:
            closest_red_light_idx = self.get_closest_waypoint_idx(
                closest_light_idx)
            self.upcoming_red_light_pub.publish(Int32(closest_red_light_idx))

        else:
            self.upcoming_red_light_pub.publish(Int32(-1))

    def pose_cb(self, msg):
        self.pose = msg

    def waypoints_cb(self, waypoints):
        self.base_waypoints = waypoints
        if not self.waypoints_2d:
            self.waypoints_2d = [[
                waypoint.pose.pose.position.x, waypoint.pose.pose.position.y
            ] for waypoint in waypoints.waypoints]
            self.waypoint_tree = KDTree(self.waypoints_2d)

    def traffic_cb(self, msg):
        self.lights = msg.lights

        light_state = [trafficlight.state for trafficlight in msg.lights]

        for i, state in enumerate(light_state):
            if state == 0:
                self.red_light[i] = 1
            else:
                self.red_light[i] = -1

    def get_closest_redlight_idx(self):
        #use this function to get the closest traffic light's index in stopline_2d
        x = self.pose.pose.position.x
        y = self.pose.pose.position.y

        closest_idx = self.stopline_tree.query([x, y], 1)[1]

        # check if closest is ahead or behind car
        closest_location = self.stopline_2d[closest_idx]
        pre_closest_location = self.stopline_2d[closest_idx - 1]

        cl_vect = np.array(closest_location)
        pre_vect = np.array(pre_closest_location)
        pos_vect = np.array([x, y])

        if np.dot(cl_vect - pre_vect, pos_vect - cl_vect) > 0:
            closest_idx = (closest_idx + 1) % len(self.stopline_2d)
        return closest_idx

    def get_closest_waypoint_idx(self, closest_light_idx):
        #use this function to get the redlight's index in waypoints_2d
        x = self.stopline_2d[closest_light_idx][0]
        y = self.stopline_2d[closest_light_idx][1]

        closest_idx = self.waypoint_tree.query([x, y], 1)[1]

        # check if closest is ahead or behind car
        closest_waypoint = self.waypoints_2d[closest_idx]
        pre_closest_waypoint = self.waypoints_2d[closest_idx - 1]

        cl_vect = np.array(closest_waypoint)
        pre_vect = np.array(pre_closest_waypoint)
        pos_vect = np.array([x, y])

        if np.dot(cl_vect - pre_vect, pos_vect - cl_vect) > 0:
            closest_idx = (closest_idx + 1) % len(self.waypoints_2d)
        return closest_idx
class WaypointUpdater(object):
    def __init__(self):
        rospy.init_node('waypoint_updater')

        rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
	rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)

        self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)

        self.pose = None
	self.stopline_wp_idx = -1
	self.base_waypoints = None
	self.waypoints_2d = None
    	self.waypoints_tree = None	
	
        self.loop()

    def loop(self):
        rate = rospy.Rate(50)
        while not rospy.is_shutdown():
	        if self.pose and self.base_waypoints:
		        self.publish_waypoints()
	        rate.sleep()

    def pose_cb(self, msg):
        self.pose = msg

    def waypoints_cb(self, waypoints):
        self.base_waypoints = waypoints
	if not self.waypoints_2d:
		self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]
		self.waypoints_tree = KDTree(self.waypoints_2d)
        
    def get_closest_waypoint_idx(self):
        x = self.pose.pose.position.x
	y = self.pose.pose.position.y
	closest_idx = self.waypoints_tree.query([x,y],1)[1]

	closest_coord = self.waypoints_2d[closest_idx]
	prev_coord = self.waypoints_2d[closest_idx-1]

	cl_vec = np.array(closest_coord)
	prev_vec = np.array(prev_coord)
	pos_vec = np.array([x,y])

	val = np.dot(cl_vec-prev_vec, pos_vec-cl_vec)
	if val > 0:
		closest_idx = (closest_idx+1) % len(self.waypoints_2d)
	return closest_idx

    def traffic_cb(self, msg):
        self.stopline_wp_idx = msg.data
        pass

    def obstacle_cb(self, msg):
        # TODO: Callback for /obstacle_waypoint message. We will implement it later
        pass

    def get_waypoint_velocity(self, waypoint):
        return waypoint.twist.twist.linear.x

    def set_waypoint_velocity(self, waypoints, waypoint, velocity):
        waypoints[waypoint].twist.twist.linear.x = velocity

    def publish_waypoints(self):
        final_lane = self.generate_lane()
	self.final_waypoints_pub.publish(final_lane)

    def generate_lane(self):
	lane = Lane()
	closest_idx = self.get_closest_waypoint_idx()
	base_waypoints = self.base_waypoints.waypoints[closest_idx:closest_idx+LOOKAHEAD_WPS]

	if self.stopline_wp_idx == -1 or (self.stopline_wp_idx >= (closest_idx+LOOKAHEAD_WPS)): 
		lane.waypoints = base_waypoints
	else:
		lane.waypoints = self.decelerate_waypoints(base_waypoints, closest_idx)
	return lane

    def decelerate_waypoints(self, waypoints, closest_idx):
	new_wp = []
	for i, wp in enumerate(waypoints):
		p = Waypoint()
		p.pose = wp.pose
		stop_idx = max(self.stopline_wp_idx-closest_idx-2, 0)
		dist = self.distance(waypoints, i, stop_idx)
		vel = math.sqrt(2*MAX_DECEL*dist)
		if vel < 1.0:
			vel = 0.
		p.twist.twist.linear.x = min(vel, wp.twist.twist.linear.x)
		new_wp.append(p)
	return new_wp

    def distance(self, waypoints, wp1, wp2):
        dist = 0
        dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2  + (a.z-b.z)**2)
        for i in range(wp1, wp2+1):
            dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
            wp1 = i
        return dist
Пример #49
0
            MPIrank,
            time.time() - tb))

if args.testMode:
    print('TEST MODE')
    sys.exit(1)

###########################################################
### GrC PFs to GoC AD for glutamatergic synapses     ###
###########################################################

vprint('brep.py rank {0} PFs to GoCs'.format(MPIrank))
tb = time.time()
tree = KDTree(PFs[:, 2:])
K = min(len(PFs), K4T)
distKDTree, idxKDTree = tree.query(GoCadend, k=K)

results = np.stack([
    np.repeat(np.arange(len(GoCadend)), K).reshape(len(GoCadend), K),
    PFs[idxKDTree, 1],
    distKDTree,
    idxKDTree,
]).transpose(1, 2, 0)
results = results.reshape(results.shape[0] * results.shape[1],
                          results.shape[2])
results = results[results[:, 2] <= h.PFtoGoCzone]
results = results[np.unique(results[:, :2], axis=0, return_index=True)[1]]
if hasattr(h, "BREP_dendcoeff") and h.BREP_dendcoeff > 0:
    dendLen = h.BREP_dendcoeff * np.linalg.norm(GoCadend[results[:, 0].astype(
        np.int64)] - GoC[GoCadendIdx[results[:, 0].astype(np.int64)]],
                                                axis=1)
Пример #50
0
def plot_fig(output, dx, E, B, cutoff, energies, alpha0, text, xlim, ylim):
    fig = plot.figure(figsize=(244.0 / 72, 135.0 / 72))
    ax = fig.add_subplot(1, 1, 1, aspect='equal')
    ax.set_xlim(*xlim)
    ax.set_ylim(*ylim)

    x0 = np.zeros(3)
    for KE in energies:
        for dalpha, linewidth, opacity in zip(np.array(
            (-5 * deg, 0, 5 * deg)), [0.2, 0.4, 0.2], [0.5, 1.0, 0.5]):
            alpha = alpha0 + dalpha
            n0 = np.array((-np.sin(alpha), np.cos(alpha), 0))
            traj = calc_electron_trajectory(n0, x0, KE, E, B, dx, cutoff)
            ax.plot(traj[0] / mm,
                    traj[1] / mm,
                    linewidth=linewidth,
                    color='#0083b8',
                    alpha=opacity)

    alpha = alpha0 - 5 * deg
    n0 = np.array((-np.sin(alpha), np.cos(alpha), 0))
    x1 = calc_electron_trajectory(n0, x0, energies[-1], E, B, 0.5 * mm,
                                  cutoff)[0:2].T
    N = len(x1)
    x1 = x1[N // 4:N - N // 4]

    alpha = alpha0 + 5 * deg
    n0 = np.array((-np.sin(alpha), np.cos(alpha), 0))
    x2 = calc_electron_trajectory(n0, x0, energies[-1], E, B, 0.5 * mm,
                                  cutoff)[0:2].T
    N = len(x2)
    x2 = x2[N // 4:N - N // 4]

    tree = KDTree(x1)
    x2_scan = np.array([tree.query(q) for q in x2])
    i2 = np.argmin(x2_scan.T[0])
    dist, i1 = tree.query(x2[i2])

    x1_f = 1.05 * x1[i1]
    line_length = norm(x1_f)
    x1_0 = 0.6 * line_length * np.array((-np.sin(alpha0), np.cos(alpha0)))
    ax.plot([0, x1_f[0] / mm], [0, x1_f[1] / mm],
            color='#888888',
            linewidth=0.4,
            zorder=-10)
    ax.text(x1_f[0] / mm,
            x1_f[1] / mm,
            r' $\alpha_f = {:.1f}^\circ$'.format(90 -
                                                 np.arctan(x1_f[1] / x1_f[0]) /
                                                 deg),
            fontsize=7,
            verticalalignment='center')
    ax.plot([0, x1_0[0] / mm], [0, x1_0[1] / mm],
            color='#888888',
            linewidth=0.4,
            zorder=-10)
    alpha0_label = r' $\alpha_0 = {:.1f}^\circ$'.format(alpha0 / deg)
    ax.text(x1_0[0] / mm,
            x1_0[1] / mm,
            alpha0_label,
            fontsize=7,
            verticalalignment='center',
            horizontalalignment='right')

    plot.xlabel(r'$x$ (mm)', labelpad=0.0)
    plot.ylabel(r'$z$ (mm)', labelpad=0.0)

    ax.text(0.03,
            0.96,
            text,
            fontsize=7,
            verticalalignment='top',
            transform=ax.transAxes)
    ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
    ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
    output.savefig(fig, transparent=True)
Пример #51
0
def scatter_to_regulargrid(xcoords=None,
                           ycoords=None,
                           ncellx=None,
                           ncelly=None,
                           values=None,
                           method='nearest',
                           maskland_dist=None):
    """
    interpolates scatter values (x,y,z) or meshgrids to regular grid

    Parameters
    ----------
    xcoords : TYPE, optional
        DESCRIPTION. The default is None.
    ycoords : TYPE, optional
        DESCRIPTION. The default is None.
    ncellx : TYPE, optional
        DESCRIPTION. The default is None.
    ncelly : TYPE, optional
        DESCRIPTION. The default is None.
    values : TYPE, optional
        DESCRIPTION. The default is None.
    method : TYPE, optional
        DESCRIPTION. The default is 'nearest'.
    maskland_dist : TYPE, optional
        Distance used to mask out land cells. Value differs per model resolution. For sperical use maskland_dist of eg 0.01, for cartesian use maskland_dist of eg 100. The default is None.

    Returns
    -------
    x_grid : TYPE
        DESCRIPTION.
    y_grid : TYPE
        DESCRIPTION.
    value_grid : TYPE
        DESCRIPTION.

    """
    import numpy as np
    from scipy.interpolate import griddata
    from scipy.spatial import KDTree

    reg_x_vec = np.linspace(np.min(xcoords), np.max(xcoords), ncellx)
    reg_y_vec = np.linspace(np.min(ycoords), np.max(ycoords), ncelly)
    x_grid, y_grid = np.meshgrid(reg_x_vec, reg_y_vec)

    #first replace masked value with nan (mask is not used in griddata)
    try:
        values[values.mask] = np.nan
    except:
        pass
    value_grid = griddata((xcoords, ycoords),
                          values, (x_grid, y_grid),
                          method=method)

    if maskland_dist is not None:
        #Mask out land values with maskland_dist
        #https://stackoverflow.com/questions/10456143/get-only-valid-points-in-2d-interpolation-of-cloud-point-using-scipy-numpy
        tree = KDTree(np.c_[xcoords, ycoords])
        dist, _ = tree.query(np.c_[x_grid.ravel(), y_grid.ravel()], k=1)
        dist = dist.reshape(x_grid.shape)
        value_grid[dist > maskland_dist] = np.nan

    return x_grid, y_grid, value_grid
Пример #52
0
class kNNBatchDataset(Dataset):
    def __init__(self, *args, **kwargs):
        super(kNNBatchDataset, self).__init__(*args, **kwargs)

        self.tree = None
        self.buildKD()
        self.center_idx = 0

    def buildKD(self):
        logging.info(" -- Building kD-Tree with %d points..." % len(self))
        self.tree = KDTree(self._xyz[:, :2], leafsize=100)  # build only on x/y
        logging.info(" --- kD-Tree built.")

    def getBatches(self, batch_size=1):
        centers = []
        for i in range(batch_size):
            if self.currIdx >= self.num_batches:
                break
            centers.append([
                self.xmin + self.spacing / 2 +
                (self.currIdx // self.num_rows) * self.spacing,
                self.ymin + self.spacing / 2 +
                (self.currIdx % self.num_rows) * self.spacing
            ])
            self.currIdx += 1
            print(centers)
        if centers:
            _, idx = self.tree.query(centers, k=self.k)
            return self.points_and_features[idx, :], self.labels[idx]
        else:
            return None, None

    def getBatches_Point(self, batch_size=1, num_point=1024, num_grid=32):

        batch_points = []
        batch_labels = []
        batch_labels = torch.zeros([batch_size, 1],
                                   dtype=torch.float)  # batch size * 1
        batch_voxels = []
        batch_points = torch.zeros([batch_size, 3, num_point],
                                   dtype=torch.float)
        for i in range(batch_size):
            points = []
            voxels = []
            tmp_index = self.center_idx
            if (self.shuffle == True):
                tmp_index = self.index_for_train[self.center_idx]

            _, idx = self.tree.query(self.points_and_features[tmp_index, :2],
                                     k=num_point)
            label = self.labels[tmp_index]
            # print(label)
            batch_labels[i] = torch.from_numpy(
                np.array(label).astype(np.float32))
            point_knn = np.full((num_point, 3), 1)
            point_knn[:, :] = self.points_and_features[idx, :3]
            batch_points[i, :, :] = torch.from_numpy(
                point_knn.astype(np.float32).T)
            self.center_idx += 1

            # for j in range(len(num_point)):
            #     point_knn = np.full((num_point[j], 3), 1)
            #     point_knn[:, :] = self.points_and_features[idx[:num_point[j]], :3]
            #     # point_knn = torch.from_numpy(np.array(point_knn).astype(np.float32))
            #     # print(point_knn.shape)
            #     batch_voxels[j, i, :, :, :, :] = torch.from_numpy(np.array(voxelization(point_knn,
            #                                         vox_size=num_grid)).astype(np.float32))  # resolution * batchsize *  ch * grid * grid * grid
            # points.append(point_knn)
            # print(point_knn)
            # print(point_knn.shape)
            # print(batch_voxels.shape)

            # batch_points.append(points) # batchsize * resolution * num.of points * xyz
            # batch_points = torch.from_numpy(np.array(batch_points).astype(np.float32)) # batchsize * resolution * num.of points * xyz

        # for i in range(batch_size):

        # batch_voxels.append(voxels)
        # batch_voxels =np.array(batch_voxels)# resolution * batchsize  grid * grid * grid

        # batch_labels = torch.from_numpy(np.array(batch_labels).astype(np.float32))  # batch size * 1

        return batch_points, batch_labels

    # append for inference    KUDO
    def getBatchsWithIdx(self, batch_size=1):
        centers = []
        for i in range(batch_size):
            if self.currIdx >= self.num_batches:
                break
            centers.append([
                self.xmin + self.spacing / 2 +
                (self.currIdx // self.num_rows) * self.spacing,
                self.ymin + self.spacing / 2 +
                (self.currIdx % self.num_rows) * self.spacing
            ])
            self.currIdx += 1
            # print(centers)
        if centers:
            _, idx = self.tree.query(centers, k=self.k)
            return self.points_and_features[
                idx, :], self.labels[idx], np.array(idx)
        else:
            return None, None, None

    def getBatchByIdx(self, batch_idx):
        centers = [[
            self.xmin + self.spacing / 2 +
            (batch_idx // self.num_rows) * self.spacing, self.ymin +
            self.spacing / 2 + (batch_idx % self.num_rows) * self.spacing
        ]]
        _, idx = self.tree.query(centers, k=self.k)
        return self.points_and_features[idx, :], self.labels[idx]
Пример #53
0
class Beacons:
    def __init__(self, grid, beacon_grid=default_beacon_grid):
        self.grid = grid
        self.beacon_grid = beacon_grid

        # self.beacons = [Beacon(location, count) for count, location in enumerate(self._uniform_deployment())]
        # self.beacons = {count: Beacon(location, count) for count, location in enumerate(self._uniform_deployment())}
        # self.beacons = {beac_tag: Beacon(location, beac_tag) for beac_tag, location in self._uniform_deployment()}
        self.beacons = dict()

        self.masks = []
        self.map_closest_beacon = []
        self.n = len(self.beacons)

    # def _uniform_deployment(self):
    #     # return np.array(
    #     #     list(itertools.product(np.linspace(0, self.grid.domain[0], self.beacon_grid[0] + 2)[1:-1],
    #     #                            np.linspace(0, self.grid.domain[1], self.beacon_grid[1] + 2)[1:-1])))
    #     # return np.array([default_nest_location])
    #     return zip([0,1], [default_nest_location, default_food_location])
    #     # return np.array(
    #     #     list(itertools.product(np.linspace(default_nest_location[0]-2, default_nest_location[0]+2,
    #     #                                        self.beacon_grid[0] + 2)[1:-1],
    #     #                            np.linspace(default_nest_location[1]-2, default_nest_location[1]+2,
    #     #                                        self.beacon_grid[1] + 2)[1:-1]))

    def update_amplitude(self):
        for beac_tag in self.beacons:
            self.beacons[beac_tag].amplitude()

    def update_variance(self):
        for beac_tag in self.beacons:
            self.beacons[beac_tag].variance()

    def update_beacon_configuration(self,
                                    position_changed=True,
                                    weights_changed=True,
                                    just_initialized=False):
        if position_changed:
            self.update_masks()
        if weights_changed:
            self.update_neighbours_weights()
        # TODO check why just_initialized condition is needed
        if weights_changed and not just_initialized:
            self.update_amplitude()
            self.update_variance()

    def move_step(self, W):
        self.update_m_c_beacons(W)
        self.update_locations()
        self.update_beacon_configuration()

    # def remove_step(self):
    #     self.remove_beacons()
    #     self.update_masks()
    #     self.update_neighbours_beacons()

    # def switch_ant_beac_step(self,ants):
    #     ants.update_weights(self)
    #     # for ant in ants.ants:
    #     #     if sum(ant.w) < 0.01:
    #     #         self.beacons['aap'] = Beacon(ant.nt[1], 200)
    #     #         ants.ants.remove(ant)
    #     test = 1

    # def remove_beacons(self):
    #     weight_dict = self.check_weights(to_show = 'W',thres=threshold)
    #     ant_dict = self.check_ants()
    #     old_beacons = self.beacons.copy()
    #     for beac_tag in old_beacons:
    #         if beac_tag not in weight_dict and beac_tag not in ant_dict:
    #             del self.beacons[beac_tag]

    def update_masks(self):
        self.tree = KDTree(
            [self.beacons[beac_tag].pt[1] for beac_tag in self.beacons])
        self.map_closest_beacon = np.array([[
            self.tree.query([self.grid.X[cy][cx], self.grid.Y[cy][cx]])[1]
            for cx in range(len(self.grid.x))
        ] for cy in range(len(self.grid.y))])
        self.masks = {
            beac_tag: (self.map_closest_beacon == count) * 1
            for count, beac_tag in enumerate(self.beacons)
        }

        for beac_tag in self.masks:
            extended_mask = self.extend_mask(self.masks[beac_tag])
            self.beacons[beac_tag].neigh = [
                tag for tag in self.masks if True in (
                    extended_mask + self.masks[tag] >= 2) and tag != beac_tag
            ]

    def update_neighbours_weights(self):
        for beac_tag in self.masks:
            self.beacons[beac_tag].neigh_weigh = [[
                self.beacons[tag].w[0] for tag in self.beacons[beac_tag].neigh
            ], [
                self.beacons[tag].w[1] for tag in self.beacons[beac_tag].neigh
            ]]

    def evaporate_weights(self, rho=default_rho):
        for beac_tag in self.beacons:
            self.beacons[beac_tag].w *= (1 - rho)

    def initialize_weights(self):
        for beac_tag in self.beacons:
            # replace n (nr beacons) by nr of foragers
            # beacon.w = np.array([ampFactor * 1 / self.n, ampFactor * 1 / self.n])
            # beacon.w = np.array([0., 0.])
            self.beacons[beac_tag].w = np.array([0., 0.])
            # self.beacons[beac_tag].w = np.array([offset, offset])

    def update_m_c_beacons(self, W):
        # for count, beacon in enumerate(self.beacons):
        #     beacon.mt[0] = beacon.mt[1]
        #     beacon.ct[0][0] = beacon.ct[0][1]
        #     beacon.ct[1][0] = beacon.ct[1][1]
        #     beacon.mt[1] = simps(simps(W * self.masks[count], self.grid.x), self.grid.y)
        #     beacon.ct[0][1] = simps(simps(W * self.grid.X * self.masks[count], self.grid.x), self.grid.y) / beacon.mt[1]
        #     beacon.ct[1][1] = simps(simps(W * self.grid.Y * self.masks[count], self.grid.x), self.grid.y) / beacon.mt[1]

        for beac_tag in self.beacons:
            self.beacons[beac_tag].mt[0] = self.beacons[beac_tag].mt[1]
            self.beacons[beac_tag].ct[0][0] = self.beacons[beac_tag].ct[0][1]
            self.beacons[beac_tag].ct[1][0] = self.beacons[beac_tag].ct[1][1]
            self.beacons[beac_tag].mt[1] = simps(
                simps(W * self.masks[beac_tag], self.grid.x), self.grid.y)
            self.beacons[beac_tag].ct[0][1] = simps(
                simps(W * self.grid.X * self.masks[beac_tag], self.grid.x),
                self.grid.y) / self.beacons[beac_tag].mt[1]
            self.beacons[beac_tag].ct[1][1] = simps(
                simps(W * self.grid.Y * self.masks[beac_tag], self.grid.x),
                self.grid.y) / self.beacons[beac_tag].mt[1]

    def update_locations(self):
        # for beacon in self.beacons:
        #     delta_ct = [(beacon.ct[0][1] - beacon.ct[0][0]), (beacon.ct[1][1] - beacon.ct[1][0])]
        #     delta_x = (delta_ct[0] / dt) - kappa * (beacon.pt[1][0] - beacon.ct[0][1]) + delta_ct[0] * \
        #               self.neigh_control_term(beacon)[0]
        #     delta_y = (delta_ct[1] / dt) - kappa * (beacon.pt[1][1] - beacon.ct[1][1]) + delta_ct[1] * \
        #               self.neigh_control_term(beacon)[1]
        #
        #     beacon.pt[0] = beacon.pt[1]
        #     beacon.pt[1] = [beacon.pt[0][0] + bound(delta_x * dt),
        #                     beacon.pt[0][1] + bound(delta_y * dt)]

        for beac_tag in self.beacons:
            if beac_tag not in [0, 1]:
                delta_ct = [(self.beacons[beac_tag].ct[0][1] -
                             self.beacons[beac_tag].ct[0][0]),
                            (self.beacons[beac_tag].ct[1][1] -
                             self.beacons[beac_tag].ct[1][0])]
                delta_x = (delta_ct[0] / dt) - kappa * (self.beacons[beac_tag].pt[1][0] - self.beacons[beac_tag].ct[0][1]) + \
                          delta_ct[0] * self.neigh_control_term(self.beacons[beac_tag])[0]
                delta_y = (delta_ct[1] / dt) - kappa * (self.beacons[beac_tag].pt[1][1] - self.beacons[beac_tag].ct[1][1]) + \
                          delta_ct[1] * self.neigh_control_term(self.beacons[beac_tag])[1]

                self.beacons[beac_tag].pt[0] = self.beacons[beac_tag].pt[1]
                self.beacons[beac_tag].pt[1] = [
                    self.beacons[beac_tag].pt[0][0] + bound(delta_x * dt),
                    self.beacons[beac_tag].pt[0][1] + bound(delta_y * dt)
                ]

    def neigh_control_term(self, beacon):
        move_i = [0, 0]
        for count_j in beacon.neigh:
            j = self.beacons[count_j]
            move_j = [j.pt[0][1] - j.pt[0][0], j.pt[1][1] - j.pt[1][0]]
            if move_j[0] != 0:
                move_i[0] += (1 / move_j[0]) * (
                    (j.ct[0][1] - j.ct[0][0]) / dt - kappa *
                    (j.pt[0][1] - j.ct[0][1]))
            if move_j[1] != 0:
                move_i[1] += (1 / move_j[1]) * (
                    (j.ct[1][1] - j.ct[1][0]) / dt - kappa *
                    (j.pt[1][1] - j.ct[1][1]))
        return move_i

    def fnc_ants_at_beacons(self, ants):
        # for tag, beacon in enumerate(self.beacons):
        #     beacon.fnc_ants_at_beacon(ants,tag)
        for beac_tag in self.beacons:
            self.beacons[beac_tag].fnc_ants_at_beacon(ants)

    @staticmethod
    def extend_mask(mask):
        return ((shift(mask, [1, 1], cval=0) + shift(mask, [1, -1], cval=0) +
                 shift(mask, [-1, 1], cval=0) + shift(mask, [-1, -1], cval=0) +
                 shift(mask, [0, -1], cval=0) + shift(mask, [0, 1], cval=0) +
                 shift(mask, [1, 0], cval=0) + shift(mask, [-1, 0], cval=0)) >=
                1) * 1

    def check_weights(self, to_check='W1', thres=0.):
        if to_check == 'W1':
            return {
                beac_tag: self.beacons[beac_tag].w[0]
                for beac_tag in self.beacons
                if self.beacons[beac_tag].w[0] > thres
            }
        elif to_check == 'W2':
            return {
                beac_tag: self.beacons[beac_tag].w[1]
                for beac_tag in self.beacons
                if self.beacons[beac_tag].w[1] > thres
            }
        elif to_check == 'W':
            return {
                beac_tag:
                self.beacons[beac_tag].w[0] + self.beacons[beac_tag].w[1]
                for beac_tag in self.beacons
                if self.beacons[beac_tag].w[0] > thres
                or self.beacons[beac_tag].w[1] > thres
            }

    def check_ants(self, thres=0):
        return {
            beac_tag: self.beacons[beac_tag].ants_at_beacon
            for beac_tag in self.beacons
            if self.beacons[beac_tag].ants_at_beacon > thres
        }
Пример #54
0
data = np.concatenate(
    (np.random.randn(n // 2, m), np.random.randn(n - n // 2, m) + np.ones(m)))
queries = np.concatenate(
    (np.random.randn(r // 2, m), np.random.randn(r - r // 2, m) + np.ones(m)))

print "dimension %d, %d points" % (m, n)

t = time.time()
T1 = KDTree(data)
print "KDTree constructed:\t%g" % (time.time() - t)
t = time.time()
T2 = cKDTree(data)
print "cKDTree constructed:\t%g" % (time.time() - t)

t = time.time()
w = T1.query(queries)
print "KDTree %d lookups:\t%g" % (r, time.time() - t)
del w

t = time.time()
w = T2.query(queries)
print "cKDTree %d lookups:\t%g" % (r, time.time() - t)
del w

T3 = cKDTree(data, leafsize=n)
t = time.time()
w = T3.query(queries)
print "flat cKDTree %d lookups:\t%g" % (r, time.time() - t)
del w

t = time.time()
        pix = im.load()
        pix_freqs = Counter(
            [pix[x, y] for x in range(im.size[0]) for y in range(im.size[1])])
        pix_freqs_sorted = sorted(pix_freqs.items(), key=lambda x: x[1])
        pix_freqs_sorted.reverse()
        # print(pix)
        outImg = Image.new('RGB', im.size, color='white')
        outFile = open(
            filepath_bytes + filename + "_" + str(new_w) + "_" + str(new_h) +
            file_text_ext, 'w')
        i = 0
        for y in range(im.size[1]):
            for x in range(im.size[0]):
                pixel = im.getpixel((x, y))
                #print(pixel)
                if (pixel[3] < 200):
                    outImg.putpixel((x, y), palette_rgb[0])
                    outFile.write("%x\n" % (0))
                    #print(i)
                else:
                    index = pixel_tree.query(pixel[:3])[1]
                    outImg.putpixel((x, y), palette_rgb[index])
                    outFile.write("%x\n" % (index))
                i += 1
        outFile.close()
        outImg.save(filepath_converted + filename + "_" + str(new_w) + "_" +
                    str(new_h) + file_picture_ext)
        print 'Done: ' + filename + ', ' + str(iteration)
    new_w = orig_w - width_inc
    new_h = orig_h - height_inc
Пример #56
0
class TLDetector(object):
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.camera_image = None
        self.waypoints_2d = None
        self.waypoint_tree = None
        self.lights = []

        self.state2light = {0: 'Red', 1: 'Yellow', 2: 'Green', 4: 'Unknown'}

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)

        self.pose_lock = threading.RLock()
        self.base_wp_lock = threading.RLock()
        self.lights_lock = threading.RLock()
        self.image_lock = threading.RLock()
        self.bbox_lock = threading.RLock()

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)

        self.stop_line_positions = self.config['stop_line_positions']
        rospy.loginfo("Length of self.stop_line_positions {0}".format(
            len(self.stop_line_positions)))

        self.stoplines_2d = [[stopline[0], stopline[1]]
                             for stopline in self.stop_line_positions]
        self.stopline_tree = KDTree(self.stoplines_2d)
        rospy.loginfo("Length of self.stoplines_2d {0}: {1}".format(
            len(self.stoplines_2d), self.stoplines_2d))

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.bridge = CvBridge()
        self.light_classifier = TLClassifier()
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0
        self.prev_detected = TrafficLight.UNKNOWN

        # darknet_ros message
        sub5 = rospy.Subscriber('/darknet_ros/bounding_boxes', BoundingBoxes,
                                self.detected_bb_cb)

        # Get simulator_mode parameter (1== ON, 0==OFF)
        self.simulator_mode = rospy.get_param("/simulator_mode")

        if int(self.simulator_mode) == 0:
            self.cropped_tl_bb_pub = rospy.Publisher('/cropped_bb',
                                                     Image,
                                                     queue_size=1)

        self.TL_BB_list = []

        # atexit.register(self._ros_atexit)

        #        self.loop()
        #
        #    def loop(self):
        #        rate = rospy.Rate(10)  # 10hz
        #        while not rospy.is_shutdown():
        #            if None not in [self.pose, self.waypoints, self.camera_image, self.waypoints_2d, self.waypoint_tree]:
        #                self.update_waypoints()
        #            rate.sleep()

        rospy.spin()

    def detected_bb_cb(self, msg):
        simulator_bb_size_threshold = 24  #px
        site_bb_size_threshold = 40  #px
        # min probability of detection
        simulator_bb_probability = 0.75
        site_bb_probability = 0.25

        if int(self.simulator_mode) == 1:
            prob_thresh = simulator_bb_probability
            size_thresh = simulator_bb_size_threshold
        else:
            prob_thresh = site_bb_probability
            size_thresh = site_bb_size_threshold

        self.bbox_lock.acquire()
        try:
            self.TL_BB_list = []
            for bb in msg.bounding_boxes:
                if str(bb.Class
                       ) == 'traffic light' and bb.probability >= prob_thresh:
                    if math.sqrt((bb.xmin - bb.xmax)**2 +
                                 (bb.ymin - bb.ymax)**2) >= size_thresh:
                        self.TL_BB_list.append(bb)
            if None not in [
                    self.pose, self.waypoints, self.camera_image,
                    self.waypoints_2d, self.waypoint_tree
            ] and len(self.TL_BB_list) > 0:
                self.update_waypoints()
        finally:
            self.bbox_lock.release()

    def _ros_atexit(self):
        rospy.loginfo("Shutdown signal received")

    def update_waypoints(self):
        '''
        Publish upcoming red lights at camera frequency.
        Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
        of times till we start using it. Otherwise the previous stable state is
        used.
        '''
        self.pose_lock.acquire()
        try:
            pose = self.pose
            vehicle_idx = self.get_closest_waypoint(pose.pose.position.x,
                                                    pose.pose.position.y)
        finally:
            self.pose_lock.release()

        self.base_wp_lock.acquire()
        try:
            waypoints = self.waypoints
        finally:
            self.base_wp_lock.release()

        self.lights_lock.acquire()
        try:
            lights = self.lights
        finally:
            self.lights_lock.release()

        self.image_lock.acquire()
        try:
            camera_image = self.camera_image
        finally:
            self.image_lock.release()

        self.bbox_lock.acquire()
        try:
            TL_BB_list = self.TL_BB_list
        finally:
            self.bbox_lock.release()

        light_wp, state = self.process_traffic_lights(pose, lights, TL_BB_list,
                                                      camera_image)

        if self.state == TrafficLight.UNKNOWN or self.state != state:
            self.state = state
            self.state_count = 0
        elif self.state == state:
            self.state_count += 1

        if self.state_count >= STATE_COUNT_THRESHOLD:
            self.state_count = 0
            self.last_state = self.state
            self.state = state
            behind = self.is_behind(waypoints, light_wp, vehicle_idx)
            return_wp = light_wp if (
                state == TrafficLight.RED
                or state == TrafficLight.UNKNOWN) and not behind else -1
            self.last_wp = return_wp
            rospy.loginfo(
                "{0} light at wp {1}, vehicle_wp: {2}, return_wp: {3} (behind: {4})"
                .format(self.state2light[state], light_wp, vehicle_idx,
                        return_wp, behind))
            self.upcoming_red_light_pub.publish(Int32(return_wp))
        else:
            self.upcoming_red_light_pub.publish(Int32(self.last_wp))

    def pose_cb(self, msg):
        self.pose_lock.acquire()
        try:
            self.pose = msg
        finally:
            self.pose_lock.release()

    def waypoints_cb(self, lane):
        self.base_wp_lock.acquire()
        try:
            self.waypoints = lane.waypoints
            if self.waypoints_2d is None:
                self.waypoints_2d = [[
                    waypoint.pose.pose.position.x,
                    waypoint.pose.pose.position.y
                ] for waypoint in self.waypoints]
                self.waypoint_tree = KDTree(self.waypoints_2d)

        finally:
            self.base_wp_lock.release()

    def traffic_cb(self, msg):
        self.lights_lock.acquire()
        try:
            self.lights = msg.lights
        finally:
            self.lights_lock.release()

    def image_cb(self, msg):
        """Identifies red lights in the incoming camera image and publishes the index
            of the waypoint closest to the red light's stop line to /traffic_waypoint

        Args:
            msg (Image): image from car-mounted camera

        """
        self.image_lock.acquire()
        try:
            self.camera_image = msg
            self.has_image = True
        finally:
            self.image_lock.release()

    def is_behind(self, waypoints, object_wp_idx, vehicle_wp_idx):
        wps_len = len(waypoints)

        if object_wp_idx == vehicle_wp_idx:
            return False

        if (object_wp_idx > vehicle_wp_idx):
            if (vehicle_wp_idx + wps_len) - object_wp_idx < (object_wp_idx -
                                                             vehicle_wp_idx):
                return True
            else:
                return False
        else:
            if (object_wp_idx + wps_len) - vehicle_wp_idx < (vehicle_wp_idx -
                                                             object_wp_idx):
                return False
            else:
                return True

    def get_closest_waypoint(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        wps_len = len(self.waypoints_2d)
        closest_idx = self.waypoint_tree.query([x, y], 1)[1]

        closest_coord = self.waypoints_2d[closest_idx]
        prev_coord = self.waypoints_2d[(wps_len + closest_idx - 1) % wps_len]

        cl_vect = np.array(closest_coord)
        prev_vect = np.array(prev_coord)
        pose_vect = np.array([x, y])

        val = np.dot(cl_vect - prev_vect, pose_vect - cl_vect)
        if (val < 0.0):
            closest_idx = (closest_idx + wps_len - 1) % wps_len

        return closest_idx

    def get_closest_stopline(self, x, y):
        """Identifies the closest path waypoint to the given position
            https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
        Args:
            pose (Pose): position to match a waypoint to

        Returns:
            int: index of the closest waypoint in self.waypoints

        """
        closest_idx = self.stopline_tree.query([x, y], 1)[1]
        #rospy.loginfo("get_closest_stopline x {0} y {1}".format(x, y))
        #rospy.loginfo("Closest stop line {0}: x {1} y {2}".format(closest_idx, x, y))
        #rospy.loginfo("Length of self.stoplines_2d {0}: {1}".format(len(self.stoplines_2d), self.stoplines_2d))
        return closest_idx

    def get_light_state(self, camera_image, light, tl_bb_list):
        """Determines the current color of the traffic light

        Args:
            light (TrafficLight): light to classify

        Returns:
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        #if(not self.has_image):
        #    self.prev_light_loc = None
        #    return False
        state = TrafficLight.UNKNOWN
        if len(tl_bb_list) > 0:
            cv_image = self.bridge.imgmsg_to_cv2(camera_image, "bgr8")

            #if int(self.simulator_mode) == 0:
            #Get classification
            state = self.light_classifier.get_classification(
                cv_image, tl_bb_list, self.simulator_mode)
        return state

    def process_traffic_lights(self, pose, lights, tl_bb_list, camera_image):
        """Finds closest visible traffic light, if one exists, and determines its
            location and color

        Returns:
            int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
            int: ID of traffic light color (specified in styx_msgs/TrafficLight)

        """
        light = None

        # List of positions that correspond to the line to stop in front of for a given intersection
        # stop_line_positions = self.config['stop_line_positions']
        light_idx = self.get_closest_stopline(pose.pose.position.x,
                                              pose.pose.position.y)
        # rospy.loginfo("Pose x {0} y {1}".format(pose.pose.position.x, pose.pose.position.y))
        # rospy.loginfo("light x {0} y {1}".format(self.stop_line_positions[light_idx][0], self.stop_line_positions[light_idx][1]))
        light_wp = self.get_closest_waypoint(
            self.stop_line_positions[light_idx][0],
            self.stop_line_positions[light_idx][1])
        # rospy.loginfo("light wp {0}".format(light_wp))
        light = self.lights[light_idx]

        #TODO find the closest visible traffic light (if one exists)

        if light is not None:
            state = self.get_light_state(camera_image, light, tl_bb_list)
            return light_wp, state
        return -1, TrafficLight.UNKNOWN
Пример #57
0
def process(input_image, f, params, model_params, tf_sess, flist, kalmangroup):

    oriImg = input_image  # B,G,R order
    if f % video_process == 0:
        subset_all, all_peaks_all, t1, t2, t3 = predict(
            oriImg, model_params, tf_sess)
    else:
        subset_all, all_peaks_all, t1, t2, t3 = predict(
            oriImg,
            model_params,
            tf_sess,
            lenimg=len(flist) if len(flist) != 0 else 1,
            flist=flist)

    canvas = input_image  # B,G,R order

    t4 = time.time()
    flistnew = flist.copy()
    tree = []
    suspectpoint = []
    if f != 0 and flist != []:
        points = [[x[0], x[1]] for x in flist]
        tree = KDTree(points)

    for k in range(len(subset_all)):
        subset = subset_all[k]
        all_peaks = all_peaks_all[k]
        newloc_all = []
        if f % video_process == 0:
            locx = 0
            locy = 0
        else:
            locx = flist[k][0] - PAD
            locy = flist[k][1] - PAD

        for n in range(len(subset)):
            loc = []
            newloc = []
            lost = 1
            for i in [1, 0, 2]:
                idx = int(subset[n][i])
                if int(subset[n][i]) != -1:
                    location = list(
                        map(int,
                            all_peaks[i][int(idx - all_peaks[i][0][3])][0:2]))
                    location[0] = location[0] + locx
                    location[1] = location[1] + locy
                    newloc.append(location[0])
                    newloc.append(location[1])
                    location = tuple(location)
                    loc.append(location)
                else:
                    lost = i
            newloc.append(lost)
            newloc_all.append(newloc)

        for newloc in newloc_all:
            if f != 0 and tree != []:
                dis, index = tree.query([newloc[0], newloc[1]])
                if dis > 20:
                    suspectpoint.append([newloc, dis, index])
                else:
                    No = index
                    if detected[No] == 1:
                        flistnew[No] = Fuse(newloc, flistnew[No])
                        continue
                    else:
                        detected[No] = 1
                        flistnew[No] = newloc
            else:
                detected.append(1)
                flistnew.append(newloc)
    if suspectpoint != []:
        for i in range(len(suspectpoint)):
            if detected[suspectpoint[i][2]] == 1:
                detected.append(1)
                flistnew.append(suspectpoint[i][0])
            else:
                if suspectpoint[i][1] <= 80:
                    No = suspectpoint[i][2]
                    detected[No] = 1
                    flistnew[No] = suspectpoint[i][0]
    if Kalman:
        for i in range(len(flistnew)):
            if i >= len(flist):
                newKalman = CreateKalman()
                newloc1 = [0, 0, 0, 0, 0, 0, 1]
                n = 0
                while (n <= 20):
                    newloc1 = Update(flistnew[i],
                                     newKalman,
                                     input_image.shape[1],
                                     input_image.shape[0],
                                     first=True)
                    n = n + 1
                kalmangroup.append(newKalman)
                flistnew[i] = newloc1
            else:
                flistnew[i] = Update(flistnew[i], kalmangroup[i],
                                     input_image.shape[1],
                                     input_image.shape[0])
    for i in range(len(detected)):
        item = detected[i]
        if item <= -20:
            flistnew.remove(flistnew[i])
            detected.remove(detected[i])
            if Kalman:
                kalmangroup.remove(kalmangroup[i])
    for i in range(len(detected)):
        detected[i] = detected[i] - 1
    t5 = time.time()
    for i in range(len(flistnew)):
        newloc = flistnew[i]
        if detected[i] <= 0:
            lost = newloc[-1]
            if lost == 0:
                loc = [(newloc[0], newloc[1]), (newloc[2], newloc[3])]
                cv2.circle(canvas, loc[0], 4, colors[1], thickness=-1)
                cv2.circle(canvas, loc[1], 4, colors[2], thickness=-1)
                canvas = cv2.line(canvas, loc[0], loc[1], colors[1], 2)
            elif lost == 2:
                loc = [(newloc[0], newloc[1]), (newloc[2], newloc[3])]
                cv2.circle(canvas, loc[0], 4, colors[0], thickness=-1)
                cv2.circle(canvas, loc[1], 4, colors[1], thickness=-1)
                canvas = cv2.line(canvas, loc[0], loc[1], colors[0], 2)
            else:
                loc = [(newloc[0], newloc[1]), (newloc[2], newloc[3]),
                       (newloc[4], newloc[5])]
                cv2.circle(canvas, loc[0], 4, colors[1], thickness=-1)
                cv2.circle(canvas, loc[1], 4, colors[0], thickness=-1)
                cv2.circle(canvas, loc[2], 4, colors[2], thickness=-1)
                canvas = cv2.line(canvas, loc[1], loc[0], colors[0], 2)
                canvas = cv2.line(canvas, loc[0], loc[2], colors[1], 2)
            cv2.putText(canvas, str(i), loc[0], font, 0.8, (255, 255, 255), 2)
    flist = flistnew
    t6 = time.time()

    return canvas, t1, t2, t3, t4, t5, t6, flist, kalmangroup
Пример #58
0
class RtreeKDTree(object):
    """ wrap scipy KDTree spatial index to look as much like Rtree class
    as possible
    """
    def __init__(self, stream, interleaved=False):
        """ stream: an iterable, returning tuples of the form (id,[xmin,xmax,ymin,ymax],object)
        for now, requires that xmin==xmax, and ymin==ymax

        For now, only interleaved=False is supported.
        """
        if interleaved:
            raise Exception(
                "No support for interleaved index.  You must use xxyy ordering"
            )

        it = iter(stream)

        self.points = []
        self.data = []
        for fid, xxyy, obj in it:
            if xxyy[0] != xxyy[1] or xxyy[2] != xxyy[3]:
                raise Exception(
                    "No support in kdtree for finite sized objects")
            self.points.append([xxyy[0], xxyy[2]])
            self.data.append((fid, obj))

        self.refresh_tree()

    def refresh_tree(self):
        self.kdt = KDTree(self.points)

    def nearest(self, rect, count):
        xy = [rect[0], rect[2]]
        dists, results = self.kdt.query(xy, k=count)
        if count == 1:
            dists = [dists]
            results = [results]

        return [self.data[r][0] for r in results]

    def intersects(self, xxyy):
        """ This should be made compatible with the regular RTree call...
        """
        raise Exception("Intersects is not implemented in scipy.KDTree")
        return []

    def insert(self, feat_id, rect=None):
        if rect[0] != rect[1] or rect[2] != rect[3]:
            raise Exception("No support in kdtree for finite sized objects")
        if rect is None:
            raise Exception(
                "Not sure what inserting an empty rectangle is supposed to do..."
            )
        self.points.append([rect[0], rect[2]])
        self.data.append([feat_id, None])

        self.refresh_tree()

    def feat_id_to_index(self, feat_id):
        for i in range(len(self.data)):
            if self.data[i][0] == feat_id:
                return i
        raise Exception("feature id not found")

    def delete(self, feat_id, rect):
        index = self.feat_id_to_index(feat_id)
        del self.data[index]
        del self.points[index]
        self.refresh_tree()
Пример #59
0
class SIR_Grid:
    """
    A grid is a set of levels which in turn contain a set of nodes. 
    """
    def __init__(self, log_minimum, n, k):
        #def __init__(self, n):
        """
        """
        self.SIR_Levels = []
        self.log_minimum = log_minimum
        self.k = k
        self.n = n
        self.r0 = []
        self.serial_interval = []
        self.policies = []
        self.best_policy_function = []
        self.best_policy_indexes = []

    def fill(self, r0, serial_interval, policies, fatality_rate,
             healthcare_capacity, overcapacity_fatality_rate, time_to_vaccine):
        shares = make_shares(self.n)
        infected_levels = make_infected_levels(self.log_minimum, log(1 / r0),
                                               self.k)
        #shares = make_shares(self.n)
        last_level = []
        count_policies = [0]
        self.r0 = r0
        self.serial_interval = serial_interval
        herd_immunity_thresholds = [[max(1 - 1 / r0, 0)], [0]]
        available_policies = [SIR_Policy('none', r0, 0)]
        data = []
        self.best_policy_indexes = []
        for policy in policies:
            available_policies.append(
                SIR_Policy(policy[0], policy[1], policy[2]))
            herd_immunity_thresholds[0].append(max(1 - 1 / policy[1], 0))
            herd_immunity_thresholds[1].append(policy[2] * time_to_vaccine)
            count_policies.append(0)
        self.policies = available_policies
        for i, share in enumerate(reversed(shares)):
            self.SIR_Levels.append(SIR_Levels(share))
            current_level = self.SIR_Levels[i]
            minimum_infected = infected_levels[0]
            for j, infected in enumerate(infected_levels):
                if infected <= share:
                    immune = share - infected
                    current_level.nodes.append(SIR_Node(immune, infected))
                    current_node = current_level.nodes[j]
                    current_node.fill(minimum_infected, last_level,
                                      available_policies, serial_interval,
                                      herd_immunity_thresholds, fatality_rate,
                                      healthcare_capacity,
                                      overcapacity_fatality_rate,
                                      time_to_vaccine)
                    count_policies[
                        current_node.best_action_index] = count_policies[
                            current_node.best_action_index] + 1
                    data.append([immune, infected])
                    self.best_policy_indexes.append(
                        current_node.best_action_index)
            if i + 1 < self.n:
                current_level.make_cost()
                last_level = current_level
        self.best_policy_function = KDTree(data)
        print(count_policies)

    def find_best_policy(self, immune, infected):
        """
        This function uses the KDTree object populated in the 'fill' method to approximate the cost minimizing
        policy at the point (immune, infected). That policy will be based on the nearest node.
        """
        distance, index = self.best_policy_function.query([immune, infected])
        return (self.best_policy_indexes[index])

    def plot(self, policies, colors):
        """
        this function creates a plot of the optimal decision at each node in the state space using a color coded
        arrow. The arrow indicates the start and end points of the epidemic under the action.
        """
        ax = plt.axes(xlim=(0, 0.5), ylim=(0, 0.025))
        ax.set_xlabel('share of population immune')
        ax.set_ylabel('share of population infected')
        ax.text(0.05, 0.0255, "none", color='g')
        ax.text(0.15, 0.0255, "sustainable", color='b')
        ax.text(0.25, 0.0255, "unsustainable", color='r')
        for SIR_Level in self.SIR_Levels:
            for node in SIR_Level.nodes:
                action = node.best_action_index
                start_arrow_x = node.actions[action].start_point.immune
                start_arrow_y = node.actions[action].start_point.infected
                length_arrow_x = node.actions[
                    action].end_point.immune - start_arrow_x
                length_arrow_y = node.actions[
                    action].end_point.infected - start_arrow_y
                ax.arrow(start_arrow_x,
                         start_arrow_y,
                         length_arrow_x,
                         length_arrow_y,
                         width=0.0005,
                         head_width=0.001,
                         color=colors[action])
        plt.show()

    def plot_with_projection(self, policies, colors, s0, st, it, date_range):
        ax = plt.axes(xlim=(0, 0.5), ylim=(0, 0.025))
        ax.set_xlabel('share of population immune')
        ax.set_ylabel('share of population infected')
        ax.text(0.05, 0.0255, "none", color='g')
        ax.text(0.15, 0.0255, "sustainable", color='b')
        ax.text(0.25, 0.0255, "unsustainable", color='r')
        for SIR_Level in self.SIR_Levels:
            for node in SIR_Level.nodes:
                action = node.best_action_index
                start_arrow_x = node.actions[action].start_point.immune
                start_arrow_y = node.actions[action].start_point.infected
                length_arrow_x = node.actions[
                    action].end_point.immune - start_arrow_x
                length_arrow_y = node.actions[
                    action].end_point.infected - start_arrow_y
                ax.arrow(start_arrow_x,
                         start_arrow_y,
                         length_arrow_x,
                         length_arrow_y,
                         width=0.0005,
                         head_width=0.001,
                         color=colors[action])
        projection = self.project_epidemic(1, st / s0, it / s0, date_range)
        ax.plot(projection['immune'], projection['infected'], lw=5, color='k')
        plt.show()

    def project_epidemic(self, s0, st, it, date_range):
        """
        This function projects the path of the epidemic through the state space
        """
        projected = pd.DataFrame(
            columns=['date', 'policy', 'immune', 'infected', 'new infections'])
        infected = it / s0
        immune = 1 - st / s0
        for date in date_range:
            best_policy_index = self.find_best_policy(immune, infected)
            policy_r0 = self.policies[best_policy_index].r0
            policy = self.policies[best_policy_index].name
            immune_after, infected_after = SIR_step(immune, infected,
                                                    policy_r0,
                                                    self.serial_interval)
            new_infections = infected_after - infected + immune_after - immune
            immune = immune_after
            infected = infected_after
            projected = projected.append(
                {
                    'date': date,
                    'policy': policy,
                    'immune': immune * s0,
                    'infected': infected * s0,
                    'new infections': new_infections * s0
                },
                ignore_index=True)
        return (projected)
Пример #60
0
class Mesh:
    def __init__(self, filename, kdTree=False):
        ''' A mesh has already been generated on a file.'''
        if filename is None:
            self.vs = []
            self.ws = []
            self.cells = []
            self.neighbours = {}
        else:

            self.filename = filename
            f = open(filename)
            lines = f.readlines()
            if len(lines) == 0:
                raise

            self.dt = float(lines[1])
            # preamble is two lines
            self.n_char = len(lines) - 2
            self.vs = []
            self.ws = []
            self.cells = []

            # Only one inversion may occur in a meshfile
            self.inversion = False
            blocks = self.__split_blocks__(lines)

            # it is now assumed that self.cells is a list of lists of quadrilaterals.
            # this is not enforced yet

            self.cells = []
            # We create a place holder for strip 0. This cell has area 0; this can be used to test whether
            # real stationary cells have been added or not
            self.cells.append([Cell([0.0], [0.0])])

            for block in blocks:
                vs, ws = self.__build_arrays__(block)
                self.vs.append(vs)
                self.ws.append(ws)
                self.__build_grid__(vs, ws)

            if (kdTree == True): self.__build_tree__()
            self.neighbours = {}
            self.__build_neighbours__()

            self.are_labels_drawn = True

    def __split_blocks__(self, lines):

        blocks = []
        newblock = []
        for line in lines[2:]:

            if 'end' in line:
                return blocks

            if 'closed' in line or 'inversion' in line:
                if len(newblock) % 2 != 0:
                    raise ValueError
                blocks.append(list(newblock))
                newblock = []

                if 'inversion' in line:
                    if self.inversion == True:
                        raise ValueError
                    self.inversion = True
                    # demarcate where the inversion happens, so that later the number
                    # of strips before the inversion can be calculated
                    bound = len(blocks)
            else:
                newblock.append(line)
        blocks.append(newblock)

        # self bound is the upper boundary of the regular mesh. From this strip number onwards,
        # strips are made from 'inversions'.
        self.bound = 1
        for i, block in enumerate(blocks):
            if self.inversion == True and i == bound: break
            nr_strips = len(block) / 2 - 1
            self.bound += nr_strips

        return blocks

    def __build_arrays__(self, block):
        '''Builds arrays of characteristics,
        the  v points and the w points of a characteristic
        each have their own list.'''
        vs = []
        ws = []

        for line in block[0::2]:
            items = line.split()
            data = [float(el) for el in items]
            vs.append(data)
        for line in block[1::2]:
            items = line.split()
            data = [float(el) for el in items]
            ws.append(data)
        return vs, ws

    def __build_tree__(self):
        ''' build a KDTree out of the characteristic arrays. The grid points are stored
        in self.data. Thhe same point may occur multiple times, as'''
        points = []

        tandem = zip(self.vs, self.ws)
        for block in tandem:
            blocks = zip(block[0], block[1])
            for el in blocks:
                coords = zip(el[0], el[1])
                for coord in coords:
                    point = [coord[0], coord[1]]
                    points.append(point)

        self.data = np.array(points)
        self.tree = KDTree(self.data)

    def __build_grid__(self, vs, ws):
        ''' Builds a list of lists of cells.'''

        for i, v in enumerate(vs):
            if i < len(vs) - 1:
                m = min(len(vs[i]), len(vs[i + 1]))
                cell = []
                for j in range(m - 1):
                    cellv = [
                        vs[i][j], vs[i][j + 1], vs[i + 1][j + 1], vs[i + 1][j]
                    ]
                    cellw = [
                        ws[i][j], ws[i][j + 1], ws[i + 1][j + 1], ws[i + 1][j]
                    ]
                    try:
                        quad = Quadrilateral(cellv, cellw)
                    except ValueError:
                        print 'An error, probably a degeneracy in strip: ', i, ' cell: ', j
                    cell.append(quad)
                self.cells.append(cell)

    def __build_neighbours__(self):
        '''Builds a dictionary of lists of cells, keyed by a tuple representing a point. The list  contains all cells that
        a meshpoint is part of. Only the quadrilaterals should be included in this neighbours list.'''

        for i, cellist in enumerate(self.cells):
            for j, cell in enumerate(cellist):
                # this test ensures that only quadrilaterals are included in the neighbour list,
                # because cell[0][0] is initialized with a Cell instances that has no area.
                # After initialization cell[0][0] can have an area. i,j must run over the entire
                # cell list as they are to become coordinates.
                if cell.area > 0:
                    for p in range(4):
                        point = (cell.points[p][0], cell.points[p][1])
                        if point in self.neighbours:
                            self.neighbours[point].append([i, j])
                        else:
                            self.neighbours[point] = []
                            self.neighbours[point].append([i, j])
        return

    def __add_label__(self, cell, i, j, labelsize):
        point = cell.centroid
        t = ROOT.TText(point[0], point[1], str(i) + ',' + str(j))
        t.SetTextAlign(22)
        t.SetTextSize(labelsize)
        return t

    def insert_negative(self,
                        perimeter,
                        N=10000,
                        fiducial_distance=0.0,
                        exclusion_list=[]):
        '''Replace the place holder for a stationary point by a Negative instance. If more than one stationary point is present,
        extend the list.'''
        cell = Negative(self.cells, perimeter, N, fiducial_distance,
                        exclusion_list)

        if self.cells[0][0].area > 0:
            self.cells[0].append(cell)
        else:
            self.cells[0][0] = cell

    def insert_stationary(self, quadrilateral):
        '''Quadrilateral needs to be a cell of type Quadrilateral.'''

        if self.cells[0][0].area > 0:
            self.cells[0].append(quadrilateral)
        else:
            self.cells[0][0] = quadrilateral

    def deltat(self):
        '''Time step used in building the grid.'''
        return self.dt

    def draw(self, plotlist, labelsize):
        ''' Draw all cells as list of TLine elements.'''
        #  ignore reversal bin
        for i, celllist in enumerate(self.cells):
            for j, cell in enumerate(celllist):
                cell.draw(plotlist)
                #i+1 because the reversal bin is not included, but enumerate makes i start at 0
                #this only affects the text label
                if self.are_labels_drawn == True:
                    t = self.__add_label__(cell, i, j, labelsize)
                    plotlist.append(t)

        #make sure stationary bins comes on top
        for cell in self.cells[0]:
            cell.draw(plotlist)

    def dimensions(self):
        '''Returns minv, maxv, minw, maxw'''
        vs = [
            point[0] for celllist in self.cells[0:] for cell in celllist
            for point in cell.points
        ]
        ws = [
            point[1] for celllist in self.cells[0:] for cell in celllist
            for point in cell.points
        ]
        return np.array([[min(vs), max(vs)], [min(ws), max(ws)]])

    def bbox(self, i, j):
        ''' Give the bounding box of a given cell '''
        return self.cells[i][j].bbox()

    def query(self, point):
        ''' Give the result of a KDTree query'''
        ret = self.tree.query(point)
        return ret

    def bin_from_point(self, point):
        '''Give the bin the point belongs to. Return None if there is none.'''

        # first check the stationary bins
        for i, cell in enumerate(self.cells[0]):
            if self.isinbin(0, i, point):
                return [0, i]

        # else walk the tree

        close = self.tree.query(point, MAX_NEIGHBOURS)
        nearests = self.data[close[1]]
        for nearest in nearests:
            ns = self.neighbours[(nearest[0], nearest[1])]
            for n in ns:
                if self.isinbin(n[0], n[1], point):
                    return n

        return None

    def isinbin(self, i, j, point):
        ''' True if point(v,w) is in bin i,j. False otherwise.'''
        return self.cells[i][j].isPointInside(point)

    def findvs(self, v):
        ''' Give a list of all bins that contain potential V.'''
        l = []
        for i, cells in enumerate(self.cells):
            for j, cell in enumerate(cells):
                box = cell.bbox()

                if v >= box[0][0] and v < box[1][0]:
                    l.append([i, j])
        return l

    def checkSimple(self):
        '''Checks whether Quadrilaterals are Simple'''
        chksum = 0
        for i, cells in enumerate(self.cells):
            for j, cell in enumerate(cells):
                chkCell = self.cells[i][j]
                if chkCell.__class__.__name__ == 'Quadrilateral':
                    if chkCell.isSimple() == False:
                        chksum += 1
                        print i, j
        return chksum

    def checkSelfIntersection(self):
        '''Checks for concave and self-intersecting Quadrilaterals'''
        chkConcave = 0
        chkSelfIntersect = 0
        x_coords = []
        y_coords = []
        for i, cells in enumerate(self.cells):
            for j, cell in enumerate(cells):
                chkCell = self.cells[i][j]
                if chkCell.__class__.__name__ == 'Quadrilateral':
                    if chkCell.isSimple() == False:
                        if chkCell.isSelfIntersecting() == False:
                            chkConcave += 1
                        else:
                            chkSelfIntersect += 1
                            print i, j
                            x_coords = np.append(x_coords, chkCell.centroid[0])
                            y_coords = np.append(y_coords, chkCell.centroid[1])
        plt.scatter(x_coords, y_coords)
        if chkSelfIntersect > 0:
            plt.show()
        return chkConcave, chkSelfIntersect

    def checkSmallBins(self):
        '''Checks for concave and self-intersecting Quadrilaterals'''
        chkSum = 0
        x_coords = []
        y_coords = []
        for i, cells in enumerate(self.cells):
            for j, cell in enumerate(cells):
                chkCell = self.cells[i][j]
                if chkCell.__class__.__name__ == 'Quadrilateral':
                    if chkCell.isTooSmall() == True:
                        chkSum += 1
                        print i, j
                        x_coords = np.append(x_coords, chkCell.centroid[0])
                        y_coords = np.append(y_coords, chkCell.centroid[1])
        plt.scatter(x_coords, y_coords)
        if chkSum > 0:
            plt.show()
        return chkSum

####under construction

    def mergeSmallBins(self, thresh=2e-3):
        new_mesh = Mesh(None)
        new_mesh.dt = self.dt
        new_mesh.filename = self.filename
        for i, cells in enumerate(self.cells):
            if i == 0:
                new_mesh.cells.append(self.cells[0])
            else:
                for j, cell in enumerate(cells):
                    chkCell = self.cells[i][j]
                    if j == 0:
                        new_mesh.cells.append([chkCell])
                    elif not chkCell.isTooSmall(threshold=thresh):
                        new_mesh.cells[i].append(chkCell)
                    elif chkCell.isTooSmall(threshold=thresh):
                        new_mesh.cells[i].append(
                            mergeQuads(chkCell, self.cells[i][-1]))
                        break
        return new_mesh

    def removeBadBins(self):

        new_mesh = Mesh(None)
        new_mesh.dt = self.dt  # Added (MdK): 6/04/2017

        for i, cells in enumerate(self.cells):

            if i == 0:
                new_mesh.cells.append(self.cells[0])
            else:
                for j, cell in enumerate(cells):

                    chkCell = self.cells[i][j]
                    if j == 0 and chkCell.isSelfIntersecting():
                        new_mesh.cells.append([Cell([0.], [0.])])
                    elif j == 0 and not chkCell.isSelfIntersecting():
                        new_mesh.cells.append([chkCell])
                    elif not chkCell.isSelfIntersecting():
                        new_mesh.cells[i].append(chkCell)
                    elif chkCell.isSelfIntersecting():
                        break

        return new_mesh

    def removeSmallBins(self, thresh=1e-5):

        new_mesh = Mesh(None)
        new_mesh.dt = self.dt
        new_mesh.filename = self.filename
        for i, cells in enumerate(self.cells):
            if i == 0:
                new_mesh.cells.append(self.cells[0])
            else:
                for j, cell in enumerate(cells):
                    chkCell = self.cells[i][j]
                    if j == 0 and chkCell.isTooSmall(threshold=thresh):
                        new_mesh.cells.append([Cell([0.], [0.])])
                    elif j == 0 and not chkCell.isTooSmall(threshold=thresh):
                        new_mesh.cells.append([chkCell])
                    elif not chkCell.isTooSmall(threshold=thresh):
                        new_mesh.cells[i].append(chkCell)
                    elif chkCell.isTooSmall(threshold=thresh):
                        break
        return new_mesh

    def removeWithRenewal(self, threshold=1e-8):
        new_mesh = Mesh(None)
        revname = self.filename.split('.')[0] + '.rev'
        #new_mesh.filename = fn
        f = open(revname, 'w')
        f.write('<Mapping Type=\"Reversal\">\n')
        flagged_cells = []

        #	end_strips = []
        #        limit_coords = []

        for i, cells in enumerate(self.cells):
            if i == 0 or i == 1:
                new_mesh.cells.append(self.cells[i])
            else:
                for j, cell in enumerate(cells):
                    chkCell = self.cells[i][j]

                    if j == 0 and (chkCell.isSelfIntersecting()
                                   or chkCell.isTooSmall(threshold)):
                        new_mesh.cells.append([Cell([0.], [0.])])
                        print i, j
                    elif j == 0 and not (chkCell.isSelfIntersecting()
                                         or chkCell.isTooSmall(threshold)):
                        new_mesh.cells.append([chkCell])
                    elif not (chkCell.isSelfIntersecting()
                              or chkCell.isTooSmall(threshold)):
                        new_mesh.cells[i].append(chkCell)

                        if j == len(self.cells[i]) - 1:
                            #ind = np.argmin(distance.cdist([self.cells[1][k].centroid for k in range(len(self.cells[1]))],[chkCell.centroid],'euclidean'))
                            ind = np.argmin(
                                distance.cdist(
                                    [C.centroid for C in self.cells[1]],
                                    [chkCell.centroid], 'euclidean'))
                            f.write(repr(i) + ',0\t1,' + repr(ind) + '\t1.0\n')

#                            end_strips.append([i, j])
#			    limit_coords.append(ind)
                    elif (chkCell.isSelfIntersecting()
                          or chkCell.isTooSmall(threshold)):
                        if j != 0:
                            flagged_cells.append([i, j])
                        else:
                            print i, j
                        break


#	f.write('</Mapping>')

#	for i,xy in enumerate(end_strips):
#	    C = self.cells[xy[0]][xy[1]].centroid
#	    D = self.cells[1][limit_coords[i]].centroid
#	    plt.scatter(C[0],C[1])
#	    plt.scatter(D[0],D[1], color = 'r')

        for coords in flagged_cells:
            ind = np.argmin(
                distance.cdist(
                    [x.centroid for y in (new_mesh.cells) for x in y],
                    [self.cells[coords[0]][coords[1]].centroid], 'euclidean'))
            i, j = new_mesh.cellIndex(ind)
            f.write(
                repr(coords[0]) + ',0\t' + repr(i) + ',' + repr(j) + '\t1.0\n')
        f.write('</Mapping>')

        #	plt.show()
        return new_mesh

    def cellIndex(self, index):
        test_index = 0
        for i, cells in enumerate(self.cells):
            for j, cell in enumerate(cells):
                if test_index == index:
                    return i, j
                else:
                    test_index += 1

    def ToXML(self, fn):
        with open(fn, 'w') as f:
            f.write('<Mesh>\n')
            f.write('<TimeStep>')
            f.write(str(self.dt))
            f.write('</TimeStep>\n')

            for i, cells in enumerate(self.cells):
                f.write('<Strip>')
                for j, cell in enumerate(cells):
                    for p in cell.points:
                        f.write("{:.12f}".format(p[0]))
                        f.write(' ')
                        f.write("{:.12f}".format(p[1]))
                        f.write(' ')
                f.write('</Strip>\n')
            f.write('</Mesh>\n')

    def ToStat(self, fn):
        with open(fn, 'w') as f:
            f.write('<Stationary>\n')
            for i, cells in enumerate(self.cells):
                for j, cell in enumerate(cells):
                    if len(cell.points) == 4:
                        f.write('<Quadrilateral><vline>')
                        for p in cell.points:
                            f.write("{:.12f}".format(p[0]))
                            f.write(' ')
                        f.write('</vline><wline>')
                        for q in cell.points:
                            f.write("{:.12f}".format(q[1]))
                            f.write(' ')
                        f.write('</wline></Quadrilateral>\n')
            f.write('</Stationary>')

    def FromXML(self, fn, fromString=False):
        '''Constructs a mesh from eithe a file, or a string if fromString == True.'''
        if not fromString:
            self.filename = fn
            tree = ET.parse(fn)
            root = tree.getroot()
        else:
            self.filename = ""
            root = ET.fromstring(fn)

        for ts in root.iter('TimeStep'):
            self.dt = float(ts.text)

        for str in root.iter('Strip'):

            l = []

            # An empty strip should be allowed for example as a place holder for stationary cells
            if str.text is not None:

                coords = [float(x) for x in str.text.split()]
                if len(coords) % 8 != 0:
                    raise ValueError
                n_chunck = len(coords) / 8

                for i in range(0, n_chunck):
                    vs = []
                    ws = []
                    vs.append(coords[8 * i])
                    vs.append(coords[8 * i + 2])
                    vs.append(coords[8 * i + 4])
                    vs.append(coords[8 * i + 6])
                    ws.append(coords[8 * i + 1])
                    ws.append(coords[8 * i + 3])
                    ws.append(coords[8 * i + 5])
                    ws.append(coords[8 * i + 7])
                    quad = Quadrilateral(vs, ws)
                    l.append(quad)

            self.cells.append(l)
        self.__build_neighbours__()