コード例 #1
0
    def __init__(self, force_recompute = False):
        """Initialize the local data files.
        
        The first time it will:
        - Create a directory named "datadir" in this file's current directory
        - Download the Olson database and place it in ./datadir
        - Create an Rtree on the shapes in the database and persist it in ./datadir
        - Create an additional bookmarking dict() and persist (via pickle) it in ./datadir 

        All the other times it will:
        - Load the RTree and the additional bookmarking dict() in memory
        
        Keyword arguments:
        force_recompute -- if True, deletes and recomputes the local data
        """

        data_dir =  "%s/datadir" % os.path.dirname(os.path.realpath(__file__))
        if not os.path.exists(data_dir):
            os.mkdir(data_dir)
            
        data_files = ['rtree.dat', 'rtree.idx', 'rtree.p']

        # at least one file is missing
        if  force_recompute or (not reduce(operator.and_, [os.path.isfile("%s/%s" % (data_dir,x)) for x in data_files])):

            tz_fname = "%s/%s" % (data_dir, 'tz_world.zip')
            print >> sys.stderr, "Downloading the TZ shapefile (Olson database)..."
            urllib.urlretrieve ('http://efele.net/maps/tz/world/tz_world.zip', tz_fname)
            print >> sys.stderr, "Done."

            for x in data_files:
                if  os.path.isfile("%s/%s" % (data_dir,x)): 
                    os.remove("%s/%s" % (data_dir,x))

            self.idx = index.Rtree('%s/rtree' % data_dir)
            with fiona.drivers():
                print >> sys.stderr, "Building the spatial index on the shapefile..."
                with fiona.open('/world/tz_world.shp',
                                vfs='zip://%s' % tz_fname) as collection:
                    self.polyd = {}
                    i = 0
                    for polygon in collection:
                        p = shape(polygon['geometry'])
                        self.idx.add(i,shape(p).bounds)
                        self.polyd[i] = {'shape' : p, 'tzid': polygon['properties']['TZID']}
                        i += 1
                with open('%s/rtree.p' % data_dir, 'w') as f:
                    pickle.dump(self.polyd, f)

                print >> sys.stderr, "Done."

        else:
            print >> sys.stderr, "Loading Rtree and Pickle File"
            self.idx = index.Rtree('%s/rtree' % data_dir)
            with open('%s/rtree.p' % data_dir) as f:
                self.polyd = pickle.load(f)
コード例 #2
0
def getGraphRtree(graph, generator='edges', filename=None, interleaved=True):
    p = index.Property()
    p.overwrite = True

    if filename is None:
        if generator == 'edges':
            return index.Rtree(edgesGenerator(graph, interleaved),
                               properties=p,
                               interleaved=interleaved)
        elif generator == 'nodes':
            return index.Rtree(nodesGenerator(graph, interleaved),
                               properties=p,
                               interleaved=interleaved)
    else:
        return index.Rtree(filename, interleaved)
コード例 #3
0
def test_rtree_constructor_stream_input():
    p = index.Property()
    sindex = index.Rtree(boxes15_stream(), properties=p)

    bounds = (0, 0, 60, 60)
    hits = list(sindex.intersection(bounds))
    assert sorted(hits) == [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]
コード例 #4
0
ファイル: confusionmatrix.py プロジェクト: madi/DeadTrees
def Pointinpolygon(shppoint):
    '''
    Tells if the point is inside the polygon and gives the count (OK or KO)
    '''

    cm = []
    driver = ogr.GetDriverByName("ESRI Shapefile")
    dataSource = driver.Open(shppoint, 0)
    pointsLayer = dataSource.GetLayer()
    print pointsLayer.GetFeatureCount()

    file_idx = index.Rtree('RTREE')
    print file_idx

    for points in pointsLayer:
        p = points.GetGeometryRef()
        #print p
        a = list(file_idx.intersection((p.GetX(), p.GetY())))
        if not a:
            cm.append(points.GetFID())

    driver = None
    dataSource = None
    pointsLayer = None

    print "Number of points inside polygons:" + len(cm)
    print "List of FIDs of the above polygons:" + cm
コード例 #5
0
def crear_insertar():
    p = index.Property()
    p.dimension = 128
    idx = index.Rtree('rtree', properties=p)

    with open('names.txt') as f:
        aux = f.readlines()

    nombres = []
    for i in aux:
        i = i.rstrip()
        nombres.append(i)

        #print(i)

    feature_vectors = []
    np.savetxt('nombres.txt', nombres, fmt='%s')

    for i in nombres:
        image = fr.load_image_file('uploads/' + i)
        # No tiene cara o la leyo mal
        if not fr.face_encodings(image):
            continue

        encoding = fr.face_encodings(image)[0]
        feature_vectors.append(np.concatenate([encoding, encoding]))
        #print(encoding)

    cont = 0
    for i in feature_vectors:
        idx.insert(cont, i)
        cont += 1
コード例 #6
0
    def __bulk_load(self, data, lim=1600):
        """
        expect columns ('LONG', 'LAT', 'TIMESTAMP') in data
        :param lim: defines number of lines to read from data
        :return: 3D Rtree
        """
        start_time = time.time()

        if lim:
            data = data[:lim]
        # Function required to bulk load
        def generator_function():
            for i, (x, y, tm) in enumerate(data[['LONG', 'LAT',
                                                 'TIMESTAMP']].values):
                tm = int(tm)
                i += 100
                # print(100 + i,x,y,tm)
                hash = sha256(binstr(''.join(
                    (str(x), str(y), str(tm))))).hexdigest()
                self.items[i] = {'coord': (x, y, tm), 'hash': hash}
                yield (i, (x, x, y, y, tm, tm), hash)

        p = self.__prepare_property()
        idx3d = index.Rtree(generator_function(),
                            properties=p,
                            interleaved=False)
        print('Time elaspsed (s)', time.time() - start_time)

        return idx3d
コード例 #7
0
ファイル: finnairComponent.py プロジェクト: Mantower/TRIP
    def __init__(self):
        global host
        host = 'https://offer-junction.ecom.finnair.com'
        global departure_code
        departure_code = 'HEL'
        global adults
        adults = 1

        # build R-tree
        global airports
        airports = index.Rtree()

        # import csv
        with open('../model/finnair_airport_geo.csv') as csvfile:
            reader = csv.reader(csvfile)
            for row in reader:
                if reader.line_num == 1:
                    continue
                airports.insert(
                    reader.line_num, (float(row[6]), float(row[5])), {
                        "id": row[0],
                        "code": row[1],
                        "name": row[2],
                        "city": row[3],
                        "country": row[4],
                        "latitude": float(row[5]),
                        "longitude": float(row[6]),
                        "timezone": row[7]
                    })
コード例 #8
0
ファイル: lookup_tests.py プロジェクト: meyersj/geotweet
def init_polygon_1_index():
    location = SpatialLookup()
    idx = index.Rtree()
    polygon = location._build_obj(POLYGON_1)
    location.data_store[1] = polygon
    idx.insert(1, polygon['geometry'].bounds)
    location.idx = idx
    return location
コード例 #9
0
 def handle_noargs(self, **options):
     with fiona.drivers():
         with fiona.open(settings.ZIPCODES_SHP, 'r') as zipcodes_data:
             idx = index.Rtree(settings.RTREE_INDEX_FILE)
             for feature in zipcodes_data:
                 geometry = shape(feature['geometry'])
                 idx.insert(int(feature['id']), geometry.bounds)
     print 'Successfully created an Rtree index file at %s' % settings.RTREE_INDEX_FILE_NAME
コード例 #10
0
def storeNew(filename, model, info=None):
    """
    This creates a pickle file containing the list of earthquake sources
    representing an earthquake source model.

    :parameter filename:
        The name of the file where the to store the model
    :parameter model:
        A list of OpenQuake hazardlib source instances
    """
    # Preparing output filenames
    dname = os.path.dirname(filename)
    slist = re.split('\.', os.path.basename(filename))
    # SIDx
    p = index.Property()
    p.dimension = 3
    sidx = index.Rtree(os.path.join(dname, slist[0]), properties=p)
    #
    cpnt = 0
    l_other = []
    l_points = []
    for src in model:
        if isinstance(src,
                      (AreaSource, SimpleFaultSource, ComplexFaultSource,
                       CharacteristicFaultSource, NonParametricSeismicSource)):
            l_other.append(src)
        else:

            if len(src.hypocenter_distribution.data) == 1:
                srcs = [src]
            else:
                srcs = _split_point_source(src)

            x, y = getcoo(srcs[0].location.longitude,
                          srcs[0].location.latitude)

            for src in srcs:
                l_points.append(src)
                # calculate distances
                z = src.hypocenter_distribution.data[0][1]
                sidx.insert(cpnt, (x, y, z, x, y, z))
                cpnt += 1

    # All the other sources
    fou = open(filename, 'wb')
    pickle.dump(l_other, fou)
    fou.close()
    # Load info
    if info is None:
        info = _get_model_info(model)
    # Points
    fou = open(os.path.join(dname, slist[0]) + '_points.pkl', 'wb')
    pickle.dump(l_points, fou)
    fou.close()
    # Info
    fou = open(os.path.join(dname, slist[0]) + '_info.pkl', 'wb')
    pickle.dump(info, fou)
    fou.close()
コード例 #11
0
ファイル: optics.py プロジェクト: helianglen/optics-cluster
 def __init__(self, epsilon=sys.float_info.max, minPts=5):
     self.rtree = index.Rtree()
     self.processedIds = set()
     self.nPoints = None
     self.nodes = None
     self.epsilon = epsilon
     #self.epsilon2 = epsilon**2 if epsilon < sys.float_info.max else sys.float_info.max
     self.minPts = minPts
     #self.clusterOrder = Optics.ClusterOrder()
     self.clusterOrder = []
     return
コード例 #12
0
def create_node_rtree(node_vector, file_name):

    p = index.Property()
    p.dimension = 3
    p.overwrite = True
    node_index_3d = index.Rtree('node_index_3d_' + file_name, properties=p)

    for node_id in node_vector.keys():
        node_index_3d.insert(
            int(node_id), (node_vector[node_id][0], node_vector[node_id][1],
                           node_vector[node_id][2], node_vector[node_id][0],
                           node_vector[node_id][1], node_vector[node_id][2]))
    print "node rtree created"
コード例 #13
0
    def test_index_properties(self):
        """Setting index properties returns expected values"""
        idx = index.Rtree()
        p = index.Property()

        p.leaf_capacity = 100
        p.fill_factor = 0.5
        p.index_capacity = 10
        p.near_minimum_overlap_factor = 7
        p.buffering_capacity = 10
        p.variant = 0
        p.dimension = 3
        p.storage = 0
        p.pagesize = 4096
        p.index_pool_capacity = 1500
        p.point_pool_capacity = 1600
        p.region_pool_capacity = 1700
        p.tight_mbr = True
        p.overwrite = True
        p.writethrough  = True
        p.tpr_horizon  = 20.0
        p.reinsert_factor  = 0.3
        p.idx_extension = 'index'
        p.dat_extension = 'data'

        idx = index.Index(properties = p)

        props = idx.properties
        self.assertEqual(props.leaf_capacity, 100)
        self.assertEqual(props.fill_factor, 0.5)
        self.assertEqual(props.index_capacity, 10)
        self.assertEqual(props.near_minimum_overlap_factor, 7)
        self.assertEqual(props.buffering_capacity, 10)
        self.assertEqual(props.variant, 0)
        self.assertEqual(props.dimension, 3)
        self.assertEqual(props.storage, 0)
        self.assertEqual(props.pagesize, 4096)
        self.assertEqual(props.index_pool_capacity, 1500)
        self.assertEqual(props.point_pool_capacity, 1600)
        self.assertEqual(props.region_pool_capacity, 1700)
        self.assertEqual(props.tight_mbr, True)
        self.assertEqual(props.overwrite, True)
        self.assertEqual(props.writethrough, True)
        self.assertEqual(props.tpr_horizon, 20.0)
        self.assertEqual(props.reinsert_factor, 0.3)
        self.assertEqual(props.idx_extension, 'index')
        self.assertEqual(props.dat_extension, 'data')
コード例 #14
0
def create_edge_rtree(edge_list, file_name, node_vector):

    p = index.Property()
    p.dimension = 3
    p.overwrite = True
    edge_index_3d = index.Rtree('edge_index_3d_' + file_name, properties=p)

    for edge_id in edge_list:
        [end0, end1] = edge_id.split(',')
        mbr_min = np.minimum(node_vector[end0], node_vector[end1])
        mbr_max = np.maximum(node_vector[end0], node_vector[end1])

        edge_index_3d.insert(edge_list[edge_id][0],
                             (mbr_min[0], mbr_min[1], mbr_min[2], mbr_max[0],
                              mbr_max[1], mbr_max[2]))

    print "edge tree created"
コード例 #15
0
ファイル: confusionmatrix.py プロジェクト: madi/DeadTrees
def Rtree(shppoly):
    '''
    construction of the segmentation tree of the polygons
    '''
    file_idx = index.Rtree('RTREE')  #RTREE is the name
    driver = ogr.GetDriverByName("ESRI Shapefile")
    dataSource = driver.Open(shppoly, 0)
    polylayer = dataSource.GetLayer()
    print polylayer.GetFeatureCount()

    for polys in polylayer:
        env = polys.GetGeometryRef().GetEnvelope()
        file_idx.insert(polys.GetFID(), (env[0], env[2], env[1], env[3]))
    print "listo"
    driver = None
    dataSource = None
    polylayer = None
コード例 #16
0
def range_q(nombre, rango):
    rango = rango / 100
    p = index.Property()
    p.dimension = 128
    idx = index.Rtree('rtree', properties=p)
    names = np.loadtxt('nombres.txt', dtype=str)
    image = fr.load_image_file(nombre)
    encoding = fr.face_encodings(image)[0]
    vec = np.concatenate([encoding - rango, encoding + rango])

    ret = list(idx.intersection(vec))

    ret1 = {}
    cont = 1
    for i in ret:
        ret1[cont] = names[i]
        cont += 1
        # print(names[i])
    return ret1
コード例 #17
0
ファイル: utils.py プロジェクト: zbwade/street-addresses
def rtree_for_way_edges(ways, nodes, o_dir):
        """ build an R-tree for all edges of the given ways """
        rtree_idx = index.Rtree(o_dir + '/' + 'rtree')
        iid = 0
        for wi, w in enumerate(ways):
            dist = 0
            nds = w[1]
            name = w[2]
            for i in range(len(nds) - 1):
                n1, n2 = nodes[nds[i]], nodes[nds[i + 1]]

                if n1 != n2:
                    rtree_idx.insert(
                        iid,
                        bbox([n1, n2]),
                        ((n1[0], n1[1]), (n2[0], n2[1]), dist, name)
                    )
                    iid += 1
                    dist += haversine((n1[0], n1[1]), (n2[0], n2[1]))
        return rtree_idx
コード例 #18
0
def createProjectIndex(keys_coords, radius=10):

    proj_index = index.Rtree()

    idx = 0
    for key_coord in keys_coords:

        key = key_coord[0]
        xcoord, ycoord = key_coord[1]

        #find bounding box around lot
        left, right = xcoord - radius, xcoord + radius
        bottom, top = ycoord - radius, ycoord + radius

        #insert key in the rtree index
        #print("inserting idx=" + str(idx) + " left= " + str(left) + " bottom = " + str(bottom) +" right= " + str(right) + "top= " + str(top))
        proj_index.insert(idx, (left, bottom, right, top),
                          obj=(key, (xcoord, ycoord)))

        idx += 1

    return proj_index
コード例 #19
0
def knn_r(nombre, cant):
    p = index.Property()
    p.dimension = 128
    idx = index.Rtree('rtree', properties=p)
    names = np.loadtxt('nombres.txt', dtype=str)
    image = fr.load_image_file(nombre)
    encoding = fr.face_encodings(image)[0]
    vec = np.concatenate([encoding, encoding])

    ret = list(idx.nearest(vec, cant))

    pasto = []

    ret1 = {}
    cont = 1
    for i in ret:
        ret1[cont] = names[i]
        cont += 1
        # print(names[i])

    for key, value in ret1.items():
        pasto.append({'id': key, 'name': value})
    return pasto
コード例 #20
0
ファイル: model.py プロジェクト: ftbernales/oq-mbtk
def load(filename, what='all'):
    """
    This loads a pickle file containing the list of earthquake sources
    representing an earthquake source model.

    :parameter filename:
        The name of the file where the to store the model
    :parameter what:
        Can be 'other', 'all', 'point'
    :returns:
        A list of source instances
    """
    other = None
    points = None
    # Filename
    dname = os.path.dirname(filename)
    slist = re.split('\\.', os.path.basename(filename))
    # SIDx
    p = index.Property()
    p.dimension = 3
    sidx = index.Rtree(os.path.join(dname, slist[0]), properties=p)
    # Other
    if re.search('other', what) or re.search('all', what):
        fou = open(filename, 'rb')
        other = pickle.load(fou)
        fou.close()
    # Point
    if re.search('points', what) or re.search('all', what):
        fou = open(os.path.join(dname, slist[0]) + '_points.pkl', 'rb')
        points = pickle.load(fou)
        fou.close()
    # Info
    fou = open(os.path.join(dname, slist[0]) + '_info.pkl', 'rb')
    info = pickle.load(fou)
    fou.close()
    return other, points, info, sidx
コード例 #21
0
def create_edge_rtree_graph(gr, file_name, node_vector, edge_list):

    p = index.Property()
    p.dimension = 3
    p.overwrite = True
    edge_index_3d = index.Rtree('edge_index_3d_' + file_name, properties=p)

    for edge in gr.edges():
        e = Edge(int(edge[0]), int(edge[1]), -1, -1, -1, "-1", -1, -1)
        at0 = gr.node_attributes(edge[0])
        at1 = gr.node_attributes(edge[1])
        #e_attrs = gr.edge_attributes(edge)

        if at0[0][1] == "y" and at1[0][1] == "y":
            mbr_min = np.minimum(node_vector[str(edge[0])],
                                 node_vector[str(edge[1])])
            mbr_max = np.maximum(node_vector[str(edge[0])],
                                 node_vector[str(edge[1])])
            edge_index_3d.insert(int(edge_list[e.edge_label_str][0]),
                                 (mbr_min[0], mbr_min[1], mbr_min[2],
                                  mbr_max[0], mbr_max[1], mbr_max[2]),
                                 int(edge_list[e.edge_label_str][0]))

    print "edge tree created"
コード例 #22
0
 def test_result_offset(self):
     idx = index.Rtree()
     idx.set_result_offset(3)
     self.assertEqual(idx.result_offset, 3)
コード例 #23
0
ファイル: country.py プロジェクト: sauravcsvt/geocoding
    def __init__(self):
        shape_file = os.path.join(os.path.dirname(__file__),
                                  "TM_WORLD_BORDERS-0.3.shp")
        self.data = list(collection(shape_file))
        self.tree_idx = index.Rtree()
        self.jsondata = {}
        for s in self.data:
            s['shape'] = shape(s['geometry'])
            ccode = s['properties']['ISO2']
            self.jsondata[ccode] = s['properties']
            del (s['geometry'])
            self.tree_idx.insert(int(s['id']), s['shape'].bounds, None)

        self.set_version(shape_file)
        self.regionMap = {
            2: {
                'continent': 'Africa',
                'subregions': {
                    14: 'Eastern Africa',
                    17: 'Middle Africa',
                    18: 'Southern Africa',
                    11: 'Western Africa',
                    15: 'Northern Africa'
                }
            },
            19: {
                'continent': 'Americas',
                'subregions': {
                    13: 'Central America',
                    29: 'Carribean',
                    5: 'South America',
                    21: 'Northern America'
                },
            },
            142: {
                'continent': 'Asia',
                'subregions': {
                    143: 'Central Asia',
                    30: 'Eastern Asia',
                    34: 'Southern Asia',
                    35: 'South-Eastern Asia',
                    145: 'Western Asia'
                }
            },
            150: {
                'continent': 'Europe',
                'subregions': {
                    151: 'Eastern Europe',
                    154: 'Northern Europe',
                    39: 'Southern Europe',
                    155: 'Western Europe'
                }
            },
            9: {
                'continent': 'Oceania',
                'subregions': {
                    53: 'Austrailia and New Zealand',
                    54: 'Melanesia',
                    57: 'Micronesia',
                    61: 'polynesia'
                }
            }
        }
コード例 #24
0
def clip_panos_oceancity_with_panoId():
    try:
        shape_file = r'X:\My Drive\Research\StreetGraph\data\oceancity\vectors\ocean_parcel_panoid.shp'
        saved_path = r'X:\My Drive\Research\StreetGraph\data\oceancity\images3'

        pano_path = r'X:\My Drive\Research\StreetGraph\data\oceancity\panos'

        setup_logging(yaml_path,
                      logName=shape_file.replace(".shp", "_info.log"))

        inEPSG = 'EPSG:3424'
        outEPSG = 'EPSG:4326'

        fov_h_degree = 90  # degree
        fov_h_radian = radians(fov_h_degree)

        h_w_ratio = 1

        rtree_path = shape_file.replace(".shp", '_rtree.idx')
        r_tree = None

        if os.path.exists(rtree_path):
            r_tree = index.Rtree(rtree_path.replace(".idx", ''))
            logger.info("Loading the Rtree: %s", rtree_path)
        else:
            logger.info("Creating the Rtree: %s", rtree_path)
            Shoot_objects.create_rtree(shape_file,
                                       inEPSG=inEPSG,
                                       outEPSG=outEPSG)

            logger.info("Loading the Rtree: %s", rtree_path)
            r_tree = index.Rtree(rtree_path.replace(".idx", ''))

        logging.basicConfig(stream=sys.stderr, level=logging.INFO)
        buildings = fiona.open(shape_file)

        transformer = Transformer.from_crs(inEPSG, outEPSG, always_xy=True)

        # generate shaple.Polygons.

        start = 10000
        for idx in tqdm(range(start, len(buildings))):
            try:
                building = buildings[idx]

                geometry = building['geometry']['coordinates']
                ID = str(building['properties']['ID'])

                logger.info("Processing polyogn # ID: %s", str(ID))

                if len(geometry) > 1:
                    logger.info('Polygon # %s have multiple (%d) parts.', idx,
                                len(geometry))
                    geometry = geometry[:1]
                geometry = np.array(geometry).squeeze(0)

                xs, ys = transformer.transform(geometry[:, 0], geometry[0:, 1])

                polygon = Polygon(zip(xs, ys))

                x, y = polygon.centroid.xy  # x is an array, the number is x[0]
                x = x[0]
                y = y[0]

                logger.info("polygon.centroid: %f, %f", x, y)

                # panoId, lon, lat = gpano.getPanoIDfrmLonlat(x, y)

                panoId = building['properties']['panoIds']
                lon = building['properties']['lon']
                lat = building['properties']['lat']

                if panoId == 0:
                    logger.info("Cannot find a street view image at : %s, %s ",
                                x, y)
                    continue

                viewpoint = np.array((lon, lat))
                # triangle = getShooting_triangle(viewpoint, polygon)

                min_rotated_rectangle = polygon.minimum_rotated_rectangle
                # points_list = min_rotated_rectangle.exterior.coords

                # triangle, heading = Shoot_objects.getShooting_triangle(viewpoint, min_rotated_rectangle)
                heading = gpano.getDegreeOfTwoLonlat(lat, lon, y, x)
                heading = round(heading, 2)
                GSV_url = gpano.getGSV_url_frm_lonlat(lon, lat, heading)
                logger.info("GSV url: %s", GSV_url)

                # find intersects in the r-tree
                # bound = triangle.bounds
                # intersects = r_tree.intersection(bound)
                # intersects = list(intersects)

                # isIntersected = False
                # for inter in intersects:
                #     if inter == idx:
                #         continue
                #     building = buildings[inter]['geometry']
                #     building = Shoot_objects.fionaPolygon2shaple(building)
                #     building = Shoot_objects.shapelyReproject(transformer, building)
                #     isIntersected = triangle.intersects(building)
                #     if isIntersected:
                #         logger.info("Occluded by other houses.")
                #         break
                #
                # if isIntersected:
                #     logger.info("Occluded by other houses.")
                #     continue

                json_file = os.path.join(pano_path, panoId + ".json")
                jdata = json.load(open(json_file, 'r'))
                pano_yaw = jdata["Projection"]['pano_yaw_deg']
                pano_yaw = float(pano_yaw)
                phi = float(heading) - pano_yaw

                if phi > math.pi:
                    phi = phi - 2 * math.pi
                if phi < -math.pi:
                    phi = phi + 2 * math.pi
                phi = round(phi, 2)

                car_heading = pano_yaw

                _, fov = gpano.get_fov4edge(
                    (lon, lat),
                    car_heading,
                    polygon,
                    saved_path=saved_path,
                    file_name=str(ID) + "_" + panoId + "_" + str(heading) +
                    '_shooting.png')

                fov_h_degree = fov  # degree
                fov_h_radian = radians(fov_h_degree)

                # open panorama image
                pano_file = os.path.join(pano_path, panoId + ".jpg")
                img = cv2.imread(pano_file)
                h_img, w_img, channel = img.shape
                w = int(fov_h_degree / 360 * w_img)
                h = int(w * h_w_ratio)
                fov_v_radian = atan((h * tan((fov_h_radian / 2)) / w)) * 2
                print("panorama shape:", img.shape)

                theta0 = 0
                pano_pitch = 0

                rimg = gpano.clip_pano(theta0, radians(phi), fov_h_radian,
                                       fov_v_radian, w, img, pano_pitch)
                basename = f"{ID}_{panoId}_{str(heading)}.jpg"
                new_name = os.path.join(saved_path, basename)
                cv2.imwrite(new_name, rimg)
                # logger.info("Google Street View: %s", gpano.getGSV_url_frm_lonlat(lon, lat, ))

                logger.info("Clipped: %s", new_name)

            except Exception as e:
                logger.error("Error in building polygons: %s", e)
                continue

    except Exception as e:
        logger.error("shoot_philly_building: %s", e)
コード例 #25
0
    def sam_to_baitmap(self, sam_file, out_bam, rtree_dat, rtree_idx,
                       rtree_prefix, chr_handler):  # pylint: disable=no-self-use
        """
        This function take the sam file, output of bwa
        and the Rtree_files, and output a baitmap file

        Parameters
        ----------
        sam_file : str
            path to output file from bwa_for_probes
            complete path to .rmap file
        """

        args = ["samtools", "view", "-h", "-o", sam_file, out_bam]

        logger.info("samtools args: " + ' '.join(args))

        try:
            with open(sam_file, "w") as f_out:
                process = subprocess.Popen(' '.join(args),
                                           shell=True,
                                           stdout=f_out,
                                           stderr=f_out)
            process.wait()

        except (IOError, OSError) as msg:
            logger.fatal("I/O error({0}): {1}\n{2}".format(
                msg.errno, msg.strerror, args))
            return False

        chr_dict = {}
        with open(chr_handler, "r") as chr_file:
            for line in chr_file:
                line_hdl = line.rstrip().split("\t")
                chr_dict[line_hdl[1]] = int(line_hdl[0])

        copy(rtree_idx, rtree_prefix + ".idx")
        copy(rtree_dat, rtree_prefix + ".dat")

        idx = index.Rtree(rtree_prefix)

        baitmap = []

        features = []

        with open(sam_file, "r") as file_in:
            for line in file_in:
                if line[0] != "@":
                    line = line.rstrip().split("\t")

                    try:
                        crm = chr_dict[line[2]]
                    except IOError:
                        continue

                    features.append(line[0])

                    srt_pos = int(line[3])
                    end_pos = srt_pos + int(len(line[9]))

                    id_object = idx.intersection((srt_pos, crm, end_pos, crm),
                                                 objects=True)

                    hits = [[i.id, i.bbox] for i in id_object]

                    if len(hits) > 1:
                        logger.warning("probe map to two RE fragmnets, " +
                                       " ".join(line) + " start pos" +
                                       str(srt_pos) + " end pos" +
                                       str(end_pos))

                    elif not hits:
                        logger.warn("Sequence does not" +
                                    "match with any RE fragment, " +
                                    " ".join(line))
                        continue

                    else:
                        fragment_coord = [
                            int(hits[0][1][1]),
                            int(hits[0][1][0]),
                            int(hits[0][1][2]), hits[0][0]
                        ]

                        if fragment_coord not in baitmap:
                            baitmap.append(fragment_coord)

        os.remove(rtree_prefix + ".dat")
        os.remove(rtree_prefix + ".idx")

        return baitmap, features
コード例 #26
0
ファイル: Shoot_objects.py プロジェクト: gladcolor/StreetView
def shoot_HamptonRoads_building():
    try:
        shape_file = r'K:\Dataset\HamptonRoads\Hampton_Roads_Elevation_Certificates__NAVD_88_-shp/within_25m_SpatialJoin.shp'
        saved_path = r'K:\Dataset\HamptonRoads\Google_thumbnails_house_only_cleaned'

        setup_logging(yaml_path,
                      logName=shape_file.replace(".shp", "_info.log"))

        inEPSG = 'EPSG:3688'
        outEPSG = 'EPSG:4326'

        rtree_path = shape_file.replace(".shp", '_rtree.idx')
        r_tree = None

        w = 1024
        h = 768

        if os.path.exists(rtree_path):
            r_tree = index.Rtree(rtree_path.replace(".idx", ''))
            logger.info("Loading the Rtree: %s", rtree_path)
        else:
            logger.info("Creating the Rtree: %s", rtree_path)
            create_rtree(shape_file, inEPSG=inEPSG, outEPSG=outEPSG)

            logger.info("Loading the Rtree: %s", rtree_path)
            r_tree = index.Rtree(rtree_path.replace(".idx", ''))

        # test = r_tree.intersection((-95.608977, 29.736570, -95.408977, 29.936570))

        logging.basicConfig(stream=sys.stderr, level=logging.INFO)
        buildings = fiona.open(shape_file)

        transformer = Transformer.from_crs(inEPSG, outEPSG, always_xy=True)

        # generate shaple.Polygons.
        shoot_ply = shoot_polygons([])
        start = 0
        for idx in tqdm(range(start, len(buildings))):
            try:
                building = buildings[idx]
                logger.info("Processing polyogn #: %d", idx)
                geometry = building['geometry']['coordinates']
                ID = str(building['properties']['ID'])
                if len(geometry) > 1:
                    logger.info('Polygon # %s have multiple (%d) parts.', idx,
                                len(geometry))
                    geometry = geometry[:1]
                geometry = np.array(geometry).squeeze(0)

                xs, ys = transformer.transform(geometry[:, 0], geometry[0:, 1])

                polygon = Polygon(zip(xs, ys))

                x, y = polygon.centroid.xy  # x is an array, the number is x[0]
                x = x[0]
                y = y[0]

                logger.info("polygon.centroid: %f, %f", x, y)

                panoId, lon, lat = gpano.getPanoIDfrmLonlat(x, y)

                if panoId == 0:
                    logger.info("Cannot find a street view image at : %s, %s ",
                                x, y)
                    continue

                viewpoint = np.array((lon, lat))
                # triangle = getShooting_triangle(viewpoint, polygon)

                min_rotated_rectangle = polygon.minimum_rotated_rectangle
                # points_list = min_rotated_rectangle.exterior.coords

                triangle, heading = getShooting_triangle(
                    viewpoint, min_rotated_rectangle)
                GSV_url = gpano.getGSV_url_frm_lonlat(lon, lat, heading)
                logger.info("GSV url: %s", GSV_url)

                # find intersects in the r-tree
                bound = triangle.bounds
                intersects = r_tree.intersection(bound)
                intersects = list(intersects)

                isIntersected = False
                for inter in intersects:
                    if inter == idx:
                        continue
                    building = buildings[inter]['geometry']
                    building = fionaPolygon2shaple(building)
                    building = shapelyReproject(transformer, building)
                    isIntersected = triangle.intersects(building)
                    if isIntersected:
                        logger.info("Occluded by other houses.")
                        break

                if isIntersected:
                    # logger.info("Occluded by other houses.")
                    continue

                ret = gpano.shootLonlat(lon,
                                        lat,
                                        polygon=min_rotated_rectangle,
                                        saved_path=saved_path,
                                        prefix=ID,
                                        width=w,
                                        height=h,
                                        fov=90)
                # logger.info("Google Street View: %s", gpano.getGSV_url_frm_lonlat(lon, lat, ))

                # logger.info("intersects: %s", intersects)

            except Exception as e:
                logger.error("Error in building polygons: %s", e)
                continue

    except Exception as e:
        logger.error("shoot_philly_building: %s", e)
コード例 #27
0
ファイル: views.py プロジェクト: andrestone/osm-export-tool
from tasks.task_runners import ExportTaskRunner

from .permissions import IsHDXAdmin, IsOwnerOrReadOnly, IsMemberOfGroup
from .renderers import HOTExportApiRenderer

from hdx_exports.hdx_export_set import sync_region
from rtree import index

# Get an instance of a logger
LOG = logging.getLogger(__name__)

# controls how api responses are rendered
renderer_classes = (JSONRenderer, HOTExportApiRenderer)

DIR = os.path.dirname(os.path.abspath(__file__))
idx = index.Rtree(os.path.join(DIR, 'reverse_geocode'))


def bbox_to_geom(s):
    try:
        return GEOSGeometry(Polygon.from_bbox(s.split(',')), srid=4326)
    except Exception:
        raise ValidationError({'bbox': 'Query bounding box is malformed.'})


class JobViewSet(viewsets.ModelViewSet):
    """
    ##Export API Endpoint.

    Main endpoint for export creation and managment. Provides endpoints
    for creating, listing and deleting export jobs.
コード例 #28
0
ファイル: Shoot_objects.py プロジェクト: gladcolor/StreetView
def shoot_Baltimore_buildings():
    try:
        shape_file = r'K:\OneDrive_NJIT\OneDrive - NJIT\Research\vacant_house\data\vacant_building.shp'
        saved_path = r'K:\OneDrive_NJIT\OneDrive - NJIT\Research\vacant_house\vacant_images'

        setup_logging(yaml_path,
                      logName=shape_file.replace(".shp", "info.log"))
        w = 768
        h = 1024
        # logger.info(os.path.basename(file))

        logger = logging.getLogger('console_only')

        rtree_path = shape_file.replace(".shp", '_rtree.idx')
        r_tree = None

        buildings = fiona.open(shape_file)

        inEPSG = 'EPSG:4326'
        outEPSG = 'EPSG:4326'

        if os.path.exists(rtree_path):
            r_tree = index.Rtree(rtree_path.replace(".idx", ''))
            logger.info("Loading the Rtree: %s", rtree_path)
        else:
            logger.info("Creating the Rtree: %s", rtree_path)
            create_rtree(shape_file, inEPSG=inEPSG, outEPSG=outEPSG)

            logger.info("Loading the Rtree: %s", rtree_path)
            r_tree = index.Rtree(rtree_path.replace(".idx", ''))

        # logging.basicConfig(stream=sys.stderr, level=logging.INFO)

        transformer = Transformer.from_crs(inEPSG, outEPSG, always_xy=True)

        # generate shaple.Polygons.
        shoot_ply = shoot_polygons([])
        start = 0
        for idx in tqdm(range(start, len(buildings))):
            try:
                building = buildings[idx]
                # logger.info("\n\n")
                print('\n')
                logger.info("Processing polyogn #: %d", idx)
                geometry = building['geometry']['coordinates']

                row_id = building['properties']['row_id_1']
                row_id = int(row_id)
                row_id = str(row_id)

                if len(geometry) > 1:
                    logger.info('Polygon # %s have multiple (%d) parts.', idx,
                                len(geometry))
                    geometry = geometry[:1]
                geometry = np.array(geometry).squeeze(0)

                # coords = polygon.exterior.coords.xy  # coords:

                xs, ys = transformer.transform(geometry[:, 0], geometry[0:, 1])

                polygon = Polygon(zip(xs, ys))

                x, y = polygon.centroid.xy  # x is an array, the number is x[0]
                x = x[0]
                y = y[0]

                logger.info("polygon.centroid: %f, %f", x, y)

                panoId, lon, lat = gpano.getPanoIDfrmLonlat(x, y)

                if panoId == 0:
                    logger.info("Cannot find a street view image at : %s, %s ",
                                x, y)
                    continue

                viewpoint = np.array((lon, lat))
                # triangle = getShooting_triangle(viewpoint, polygon)

                min_rotated_rectangle = polygon.minimum_rotated_rectangle
                # points_list = min_rotated_rectangle.exterior.coords

                triangle, heading = getShooting_triangle(
                    viewpoint, min_rotated_rectangle)

                triangle = triangle.buffer(
                    -0.000007)  # about 1 m, needs to be improved. !!!!!!!

                GSV_url = gpano.getGSV_url_frm_lonlat(lon, lat, heading)
                logger.info("GSV url: %s", GSV_url)

                # find intersects in the r-tree
                bound = triangle.bounds
                intersects = r_tree.intersection(bound)
                intersects = list(intersects)

                isIntersected = False
                for inter in intersects:
                    if inter == idx:
                        continue
                    building = buildings[inter]['geometry']
                    building = fionaPolygon2shaple(building)
                    building = shapelyReproject(transformer, building)
                    isIntersected = triangle.intersects(building)
                    if isIntersected:
                        logger.info("Occluded by other houses.")
                        break

                if isIntersected:
                    # logger.info("Occluded by other houses.")
                    continue

                ret = gpano.shootLonlat(x,
                                        y,
                                        polygon=min_rotated_rectangle,
                                        saved_path=saved_path,
                                        prefix=row_id,
                                        width=w,
                                        height=h,
                                        fov=90)
                # logger.info("Google Street View: %s", gpano.getGSV_url_frm_lonlat(lon, lat, ))

                # logger.info("intersects: %s", intersects)

            except Exception as e:
                logger.error("Error in building polygons: %s",
                             e,
                             exc_info=True)
                continue

        # for idx, polygon in enumerate(shoot_ply.polygons):
        #     try:
        #         x, y = polygon.centroid
        #     except Exception as e:
        #         logger.error("Error in enumerate polygons: %s", e)
        #         continue

    except Exception as e:
        logger.error("shoot_houston_building: %s", e)
コード例 #29
0
ファイル: rmap_tool.py プロジェクト: Zhu-Liu/pyCHiC
    def from_frag_to_rmap(self, enzyme_name, genome_fa, rtree, rtree_dat,
                          rtree_idx, RMAP, chr_handler):
        """
        This function takes the fragment output from digestion and
        convert them into rmap files.

        It also save the renzime sites positions and ID into a file using Rtree
        python module. This file will be used by makeBatmap.py to generate
        .batmap file using spatial indexing

        Parameters
        ----------
        enzyme_name: str
            described in map_re_sites2
        genome_fa: str
            full path to genome FASTA format
        frags : dict
            dict containing chromosomes as keys and
            renzime sites as values
        out_dir_rmap: str
            path to the output directory
        out_prefix_rmap: str
            name of the output file.
        """
        #include creation folders
        frags, chromo_dict = self.map_re_sites2(enzyme_name, genome_fa)

        logger.info("coverting renzime fragments into rmap file")

        try:
            idx = index.Rtree(rtree)
        except AttributeError:
            logger.info("index failed =(")

        with open(RMAP, "w") as out:
            counter_id = 0
            for crm in frags:
                counter = 0
                for re_site in frags[crm]:
                    counter_id += 1
                    counter += 1
                    if counter == 1:
                        out.write(
                            "{}\t{}\t{}\t{}\n".format(chromo_dict[crm], 1,
                                                      re_site, counter_id), )
                        idx.insert(counter_id, (1, crm, re_site, crm))
                    else:
                        out.write(
                            "{}\t{}\t{}\t{}\n".format(
                                chromo_dict[crm],
                                prev_re_site + 1,  # pylint: disable=used-before-assignment
                                re_site,
                                counter_id), )
                        idx.insert(counter_id,
                                   (prev_re_site + 1, crm, re_site, crm))

                    prev_re_site = re_site

        idx.close()

        with open(chr_handler, "w") as chr_file:
            for chr_fake, chr_real in chromo_dict.items():
                chr_file.write("{}\t{}\n".format(chr_fake, chr_real))

        try:
            move(rtree + ".dat", rtree_dat)
            move(rtree + ".idx", rtree_idx)
            return True

        except IOError:
            logger.fatal("makeRmap_Tool.py failed to generate .rmap file")
            return False
コード例 #30
0
 def test_result_limit(self):
     idx = index.Rtree()
     idx.set_result_limit(44)
     self.assertEqual(idx.result_limit, 44)