Example #1
0
def multi_feature_to_polygon(geojson):
    """ Convert multi features to be multipolygon
    as single geometry.

    :param geojson: Geojson that
    :type geojson: dict

    :return: Nice geojson with multipolygon
    :rtype:dict
    """
    cascaded_geojson = None
    if len(geojson['features']) > 0:
        polygons = []
        for feature in geojson['features']:
            polygons.append(
                shapely_geometry.Polygon(
                    feature['geometry']['coordinates'][0]))
        cascaded_polygons = cascaded_union(polygons)
        cascaded_geojson = mapping(cascaded_polygons)

        coords_length = len(json.dumps(cascaded_geojson['coordinates']))

        if coords_length > 1000:
            # Simplify the polygons
            simplified_polygons = simplify_polygon(cascaded_polygons, 0.001)
            cascaded_geojson = mapping(simplified_polygons)

    geojson['features'] = [{
        "type": "Feature",
        "properties": {},
        "geometry": cascaded_geojson
    }]
    return geojson
Example #2
0
def to_geojson_burnt(burnt_polys, building_polys):
    """Convert shapes into geojson with new labelled building footprints. """

    results = ({
        'type': 'Feature',
        'properties': {
            'color': 'red'
        },
        'geometry': geo.mapping(r)
    } for r in burnt_polys)

    list_results = list(results)

    # append the building footprints to geojson
    results_buildings = ({
        'type': 'Feature',
        'properties': {
            'BuildingID': b[1][0],
            'color': b[1][1]
        },
        'geometry': geo.mapping(b[0])
    } for b in building_polys)

    list_results_buildings = list(results_buildings)

    collection = {
        'type': 'FeatureCollection',
        'features': list_results + list_results_buildings
    }

    return collection
Example #3
0
def to_geojson_groundtruth(burnt_polys, data_labelled):
    """Convert shapes into geojson for the groundtruth."""

    results = ({
        'type': 'Feature',
        'properties': {
            'color': 'red'
        },
        'geometry': geo.mapping(r)
    } for r in burnt_polys)

    list_results = list(results)

    # append the building footprints to geojson
    results_buildings = ({
        'type': 'Feature',
        'properties': {
            'BuildingID': b['properties']['BuildingID'],
            'color': b['properties']['color'],
            'Burnt_Label': b['properties']['Burnt_Label']
        },
        'geometry': b['geometry']
    } for b in data_labelled['features'])

    list_results_buildings = list(results_buildings)

    collection = {
        'type': 'FeatureCollection',
        'features': list_results + list_results_buildings
    }

    return collection
Example #4
0
 def _write_coll_(self,f,coll):
     writer = f['csv_writer']
     file_fiona = f['fiona_object']
     rstore = self._ugid_gid_store
     is_aggregated = self.ops.aggregate
     
     for geom,row in coll.get_iter_dict(use_upper_keys=True):
         writer.writerow(row)
         if not is_aggregated:
             did,gid,ugid = row['DID'],row['GID'],row['UGID']
             try:
                 if gid in rstore[did][ugid]:
                     continue
                 else:
                     raise(KeyError)
             except KeyError:
                 if did not in rstore:
                     rstore[did] = {}
                 if ugid not in rstore[did]:
                     rstore[did][ugid] = []
                 if gid not in rstore[did][ugid]:
                     rstore[did][ugid].append(gid)
                 
                 ## for multivariate calculation outputs the dataset identifier
                 ## is None.
                 try:
                     converted_did = int(did)
                 except TypeError:
                     converted_did = None
                 feature = {'properties':{'GID':int(gid),'UGID':int(ugid),'DID':converted_did},
                            'geometry':mapping(geom)}
                 file_fiona.write(feature)
Example #5
0
    def write_RCM3(self):
        rd = self.oblique_mercator
        ds = nc.Dataset(rd.uri)
        path = os.path.join(tempfile.mkdtemp(prefix='RCM3'), 'RCM3.shp')
        crs = fiona.crs.from_epsg(4326)
        driver = 'ESRI Shapefile'
        schema = {'geometry': 'Point', 'properties': {}}
        #        path = os.path.join(tempfile.mkdtemp(prefix='RCM3'),'RCM3.shp')
        #        polygon = Polygon(coordinates)
        #            feature = {'id':feature_idx,'properties':{},'geometry':mapping(polygon)}
        #            f.write(feature)

        #    with fiona.open(out_path,'w',driver=driver,crs=crs,schema=schema) as f:
        try:
            lats = ds.variables['lat'][:]
            lons = ds.variables['lon'][:] - 360
            n = lons.shape[0] * lons.shape[1]
            print n
            with fiona.open(path, 'w', driver=driver, crs=crs,
                            schema=schema) as f:
                for ctr, (ii, jj) in enumerate(iter_array(lats,
                                                          use_mask=False)):
                    if ctr % 100 == 0:
                        print ctr, n
                    point = Point(lons[ii, jj], lats[ii, jj])
                    feature = {'properties': {}, 'geometry': mapping(point)}
                    f.write(feature)
            import ipdb
            ipdb.set_trace()
        finally:
            ds.close()
Example #6
0
    def write_RCM3(self):
        rd = self.oblique_mercator
        ds = nc.Dataset(rd.uri)
        path = os.path.join(tempfile.mkdtemp(prefix='RCM3'),'RCM3.shp')
        crs = fiona.crs.from_epsg(4326)
        driver = 'ESRI Shapefile'
        schema = {'geometry':'Point',
                  'properties':{}}
#        path = os.path.join(tempfile.mkdtemp(prefix='RCM3'),'RCM3.shp')
#        polygon = Polygon(coordinates)
#            feature = {'id':feature_idx,'properties':{},'geometry':mapping(polygon)}
#            f.write(feature)
    
#    with fiona.open(out_path,'w',driver=driver,crs=crs,schema=schema) as f:
        try:
            lats = ds.variables['lat'][:]
            lons = ds.variables['lon'][:] - 360
            n = lons.shape[0]*lons.shape[1]
            print n
            with fiona.open(path,'w',driver=driver,crs=crs,schema=schema) as f:
                for ctr,(ii,jj) in enumerate(iter_array(lats,use_mask=False)):
                    if ctr % 100 == 0:
                        print ctr,n
                    point = Point(lons[ii,jj],lats[ii,jj])
                    feature = {'properties':{},'geometry':mapping(point)}
                    f.write(feature)
            import ipdb;ipdb.set_trace()
        finally:
            ds.close()
Example #7
0
def write_geom_dict(dct, path=None, filename=None, epsg=4326, crs=None):
    """
    :param dct:
    :type dct: dict

    >>> dct = {1: Point(1, 2), 2: Point(3, 4)}

    :param path:
    :type path: str
    :param filename:
    :type filename: str
    """

    filename = filename or 'out'
    path = path or os.path.join(mkdtemp(), '{0}.shp'.format(filename))

    crs = crs or from_epsg(epsg)
    driver = 'ESRI Shapefile'
    schema = {
        'properties': {
            'UGID': 'int'
        },
        'geometry': list(dct.values())[0].geom_type
    }
    with fiona.open(path, 'w', driver=driver, crs=crs,
                    schema=schema) as source:
        for k, v in dct.items():
            rec = {'properties': {'UGID': k}, 'geometry': mapping(v)}
            source.write(rec)
    return path
Example #8
0
 def make_record(self,dct):
     properties = dct.copy()
     geom = wkt.loads(properties.pop('wkt'))
     properties.update({'UGID':self._ugid})
     self._ugid += 1
     record = {'geometry':mapping(geom),
               'properties':properties}
     return(record)
Example #9
0
 def save_polygons(self, out_filename):
     out = open(out_filename, 'w')
     for i in xrange(self.i):
         poly = self.polygons[i]
         feature = {
             'type': 'Feature',
             'geometry': mapping(poly.context),
         }
         out.write(json.dumps(feature) + u'\n')
Example #10
0
 def save_polygons(self, out_filename):
     out = open(out_filename, 'w')
     for props, poly in self.polygons:
         feature = {
             'type': 'Feature',
             'geometry': mapping(poly.context),
             'properties': props
         }
         out.write(json.dumps(feature) + u'\n')
Example #11
0
 def save_polygons(self, out_filename):
     out = open(out_filename, 'w')
     for props, poly in self.polygons:
         feature = {
             'type': 'Feature',
             'geometry': mapping(poly.context),
             'properties': props
         }
         out.write(json.dumps(feature) + u'\n')
Example #12
0
 def to_geojson(self):
     result = super().to_geojson()
     original_geometry = result['geometry']
     draw = self.geometry.buffer(self.width / 2,
                                 join_style=JOIN_STYLE.mitre,
                                 cap_style=CAP_STYLE.flat)
     result['geometry'] = format_geojson(mapping(draw))
     result['original_geometry'] = original_geometry
     return result
Example #13
0
def write_geom_dict(dct,path=None):
    crs = from_epsg(4326)
    driver = 'ESRI Shapefile'
    schema = {'properties':{'UGID':'int'},'geometry':dct.values()[0].geom_type}
    path = path or os.path.join(mkdtemp(),'out.shp')
    with fiona.open(path,'w',driver=driver,crs=crs,schema=schema) as source:
        for k,v in dct.iteritems():
            rec = {'properties':{'UGID':k},'geometry':mapping(v)}
            source.write(rec)
    return(path)
Example #14
0
 def save_polygons(self, out_filename):
     out = open(out_filename, 'w')
     features = []
     for props, poly in self.polygons:
         features.append({
             'type': 'Feature',
             'geometry': mapping(poly.context),
             'properties': props
         })
     json.dump({'type': 'FeatureCollection',
                'features': features},
               out)
def get_exact_trimmed_geom(image, crs=3031, step=48):
    xs, ys = [], []
    ds = rasterio.open(image)
    # get source and target crs
    src_crs = ds.crs
    src_trans = ds.transform
    target_crs = from_epsg(crs)
    # read raster
    inband = ds.read(1).astype(np.uint8)
    nd = ds.nodata
    if nd is None:
        nd = 0
    else:
        inband[inband == nd] = 0
        nd = 0
    height = inband.shape[0]
    pixelst = []
    pixelsb = []
    pts = []
    # For every 'n' line, find first and last data pixel
    lines = list(range(0, height, step))
    try:
        lines_flatnonzero = [np.flatnonzero(inband[l, :] != nd) for l in lines]
    except AttributeError:
        print("Error reading image block.  Check image for corrupt data.")
    i = 0
    for nz in lines_flatnonzero:
        nzmin = nz[0] if nz.size > 0 else 0
        nzmax = nz[-1] if nz.size > 0 else 0
        if nz.size > 0:
            pixelst.append((nzmax + 1, i))
            pixelsb.append((nzmin, i))
        i += step
    pixelsb.reverse()
    pixels = pixelst + pixelsb
   
    # reproject pixels
    for px in pixels:
        x, y = src_trans * px
        xs.append(x)
        ys.append(y)
        pts.append((x, y))
    # write polygon (remove redundant vertices)
    geom = Polygon(pts)
    geom = geom.simplify(5)
    # transform crs to target crs
    src_pol = gpd.GeoDataFrame(crs=src_crs, data={'geometry': [geom]}, index=[0])
    target_pol = src_pol.to_crs(target_crs)
    # return geometry
    return mapping(target_pol.geometry)['features'][0]['geometry']
Example #16
0
 def write_fiona(self,path,crs,driver='ESRI Shapefile'):
     schema = {'geometry':self._geom_type,
               'properties':{'UGID':'int'}}
     ref_prep = self._write_fiona_prep_geom_
     ref_uid = self.uid
     
     with fiona.open(path,'w',driver=driver,crs=crs,schema=schema) as f:
         for (ii,jj),geom in iter_array(self.value,return_value=True):
             geom = ref_prep(geom)
             uid = ref_uid[ii,jj]
             feature = {'properties':{'UGID':uid},'geometry':mapping(geom)}
             f.write(feature)
             feature = {'UGID'}
     
     return(path)
Example #17
0
 def test_keep_touches(self):
     points = self.geom_michigan_point_grid
     si = SpatialIndex()
     ids = points.keys()
     geoms = [points[i] for i in ids]
     si.add(ids,geoms)
     touch_geom = Point(*mapping(self.geom_michigan)['coordinates'][0][0][3])
     si.add(1000,touch_geom)
     points[1000] = touch_geom
     for keep_touches in [True,False]:
         intersects_ids = list(si.iter_intersects(self.geom_michigan,points,keep_touches=keep_touches))
         if keep_touches:
             self.assertIn(1000,intersects_ids)
         else:
             self.assertNotIn(1000,intersects_ids)
Example #18
0
    def _write_coll_(self, f, coll):
        writer = f['csv_writer']
        file_fiona = f['fiona_object']
        rstore = self._ugid_gid_store
        is_aggregated = self.ops.aggregate

        for geom, row in self.get_iter_from_spatial_collection(coll):
            writer.writerow(row)

        if not is_aggregated:
            for ugid, field_dict in coll.iteritems():
                for field in field_dict.itervalues():
                    did = field.uid
                    for _, _, geom, gid in field.spatial.get_geom_iter():
                        try:
                            if gid in rstore[did][ugid]:
                                continue
                            else:
                                raise KeyError
                        except KeyError:
                            if did not in rstore:
                                rstore[did] = {}
                            if ugid not in rstore[did]:
                                rstore[did][ugid] = []
                            if gid not in rstore[did][ugid]:
                                rstore[did][ugid].append(gid)

                            # for multivariate calculation outputs the dataset identifier is None.
                            try:
                                converted_did = int(did)
                            except TypeError:
                                converted_did = None

                            feature = {'properties': {constants.HEADERS.ID_GEOMETRY.upper(): int(gid),
                                                      self.geom_uid: int(ugid),
                                                      constants.HEADERS.ID_DATASET.upper(): converted_did},
                                       'geometry': mapping(geom)}
                            try:
                                file_fiona.write(feature)
                            except ValueError as e:
                                if feature['geometry']['type'] != file_fiona.meta['schema']['geometry']:
                                    msg = 'Spatial abstractions do not match. You may need to override "abstraction" and/or "s_abstraction"'
                                    msg = '{0}. Original error message from Fiona is "ValueError({1})".'.format(msg,
                                                                                                                e.message)
                                    raise ValueError(msg)
                                else:
                                    raise
Example #19
0
File: app.py Project: TUW-GEO/poets
def get_geojson(region):
    """
    Gets list of coordinates from polygon of region.

    Parameters
    ----------
    region : str
        Region to get coordinates from.

    Returns
    -------
    coordinates : list
    """

    shape = Shape(region, p.shapefile).polygon

    return jsonify(mapping(shape))
Example #20
0
def get_geojson(region):
    """
    Gets list of coordinates from polygon of region.

    Parameters
    ----------
    region : str
        Region to get coordinates from.

    Returns
    -------
    coordinates : list
    """

    shape = Shape(region, p.shapefile).polygon

    return jsonify(mapping(shape))
Example #21
0
 def to_shadow_geojson(self):
     shadow = self.geometry.parallel_offset(0.03,
                                            'right',
                                            join_style=JOIN_STYLE.mitre)
     shadow = shadow.buffer(0.019,
                            join_style=JOIN_STYLE.mitre,
                            cap_style=CAP_STYLE.flat)
     return OrderedDict((
         ('type', 'Feature'),
         ('properties',
          OrderedDict((
              ('type', 'shadow'),
              ('original_type', self.__class__.__name__.lower()),
              ('original_name', self.name),
              ('level', self.level.name),
          ))),
         ('geometry', format_geojson(mapping(shadow), round=False)),
     ))
Example #22
0
def write_tile_to_shape(tile_extents,shp_file, tile_size, node_type):
    min_x, min_y, max_x, max_y = tile_extents
    tile_ulp = (min_x, max_y)
    tile_dlp = (min_x, min_y)
    tile_drp = (max_x, min_y)
    tile_urp = (max_x, max_y)
    gridref = "E{0}N{1}".format(min_x / tile_size, max_y / tile_size,)
    shp_file.write({
                #'geometry': mapping(Polygon([tile_ulp, tile_dlp, tile_drp, tile_urp])),
                'geometry': mapping(Polygon([tile_ulp, tile_dlp, tile_drp, tile_urp])),
                'properties': {'EN_REF': gridref,
                               'TYPE' : node_type,
                               'MINX' : min_x,
                               'MINY' : min_y,
                               'MAXX' : max_x,
                               'MAXY' : max_y,
                               },
            })
Example #23
0
 def _iter_source_(self):
     with fiona.open(self.path, 'r') as source:
         for feature in source:
             # ensure the feature is valid
             # https://github.com/Toblerity/Fiona/blob/master/examples/with-shapely.py
             geom = shape(feature['geometry'])
             try:
                 if not geom.is_valid:
                     clean = geom.buffer(0.0)
                     geom = clean
                     feature['geometry'] = mapping(geom)
                     assert clean.is_valid
                     assert (clean.geom_type == 'Polygon')
             except (AssertionError, AttributeError) as e:
                 warn('{2}. Invalid geometry found with id={0} and properties: {1}'.format(feature['id'],
                                                                                           feature['properties'],
                                                                                           e))
             feature['shapely'] = geom
             yield feature
Example #24
0
 def test_keep_touches(self):
     points = self.geom_michigan_point_grid
     si = SpatialIndex()
     ids = list(points.keys())
     geoms = [points[i] for i in ids]
     si.add(ids, geoms)
     touch_geom = Point(
         *mapping(self.geom_michigan)['coordinates'][0][0][3])
     si.add(1000, touch_geom)
     points[1000] = touch_geom
     for keep_touches in [True, False]:
         intersects_ids = list(
             si.iter_intersects(self.geom_michigan,
                                points,
                                keep_touches=keep_touches))
         if keep_touches:
             self.assertIn(1000, intersects_ids)
         else:
             self.assertNotIn(1000, intersects_ids)
Example #25
0
def cascaded_union_geohash(geohash_list):
    """
    Depreciated. Keep for backward compatibility.
    Calculate the cascaded union
    :param geohash_list:
    :return: dict, a geojson gemetry
    """
    logger.warning("The function cascaded_union_geohash is depreciated, "
                   "use geohash_2_multipolygon(geohash_list, union=True). "
                   "It is kept for backward compatibility.")

    geometry = geohash_2_multipolygon(geohash_list)

    geometry_shp = shape(geometry)
    polygon_union = cascaded_union(geometry_shp)

    new_geometry = geo.mapping(polygon_union)

    return new_geometry
Example #26
0
 def _iter_source_(self):
     with fiona.open(self.path, 'r') as source:
         for feature in source:
             # ensure the feature is valid
             # https://github.com/Toblerity/Fiona/blob/master/examples/with-shapely.py
             geom = shape(feature['geometry'])
             try:
                 if not geom.is_valid:
                     clean = geom.buffer(0.0)
                     geom = clean
                     feature['geometry'] = mapping(geom)
                     assert clean.is_valid
                     assert (clean.geom_type == 'Polygon')
             except (AssertionError, AttributeError) as e:
                 warn('{2}. Invalid geometry found with id={0} and properties: {1}'.format(feature['id'],
                                                                                           feature['properties'],
                                                                                           e))
             feature['shapely'] = geom
             yield feature
Example #27
0
 def _write_coll_(self,f,coll):
     writer = f['csv_writer']
     file_fiona = f['fiona_object']
     rstore = self._ugid_gid_store
     is_aggregated = self.ops.aggregate
     
     for geom,row in coll.get_iter_dict(use_upper_keys=True):
         writer.writerow(row)
         if not is_aggregated:
             did,gid,ugid = row['DID'],row['GID'],row['UGID']
             try:
                 if gid in rstore[did][ugid]:
                     continue
                 else:
                     raise(KeyError)
             except KeyError:
                 if did not in rstore:
                     rstore[did] = {}
                 if ugid not in rstore[did]:
                     rstore[did][ugid] = []
                 if gid not in rstore[did][ugid]:
                     rstore[did][ugid].append(gid)
                 
                 ## for multivariate calculation outputs the dataset identifier
                 ## is None.
                 try:
                     converted_did = int(did)
                 except TypeError:
                     converted_did = None
                     
                 feature = {'properties':{'GID':int(gid),'UGID':int(ugid),'DID':converted_did},
                            'geometry':mapping(geom)}
                 try:
                     file_fiona.write(feature)
                 except ValueError as e:
                     if feature['geometry']['type'] != file_fiona.meta['schema']['geometry']:
                         msg = 'Spatial abstractions do not match. You may need to override "abstraction" and/or "s_abstraction"'
                         msg = '{0}. Original error message from Fiona is "ValueError({1})".'.format(msg,e.message)
                         raise(ValueError(msg))
                     else:
                         raise
Example #28
0
def get_working_area(
        *,
        organization_id: int,
        auth=Depends(authorization("organizations:get_working_area")),
        db: Session = Depends(get_db),
) -> Any:
    """
    get working_area geojson from one organization
    """
    organization_in_db = organization.get(db, id=organization_id)

    if not organization_in_db:
        raise HTTPException(status_code=404, detail="Organization not found")

    if organization_in_db.working_area is None:
        raise HTTPException(status_code=404,
                            detail="Organization working area is empty")

    shape = to_shape(organization_in_db.working_area)

    return {"type": "Feature", "geometry": mapping(shape), "properties": {}}
Example #29
0
 def _iter_source_(self):
     with fiona.open(self.path, "r") as source:
         for feature in source:
             ## ensure the feature is valid
             ## https://github.com/Toblerity/Fiona/blob/master/examples/with-shapely.py
             try:
                 geom = shape(feature["geometry"])
                 if not geom.is_valid:
                     clean = geom.buffer(0.0)
                     geom = clean
                     feature["geometry"] = mapping(geom)
                     assert clean.is_valid
                     assert clean.geom_type == "Polygon"
             except (AssertionError, AttributeError) as e:
                 warn(
                     "{2}. Invalid geometry found with id={0} and properties: {1}".format(
                         feature["id"], feature["properties"], e
                     )
                 )
             feature["shapely"] = geom
             yield (feature)
Example #30
0
def geohash_2_multipolygon(geohash_list, union=False):
    """
    Convert a list of geohash code to a MultiPolygon geometry
    :param geohash_list:
    :param union: if True, then return a cascaded union of all the multipolygons
    :return: a geometry of multipolygon. dict
    {
        "type": "MultiPolygon",
        "coordinates": [...]
    }
    """

    if not isinstance(geohash_list, list):
        geohash_list = list(geohash_list)

    coordinates = []

    for hash_code in geohash_list:
        _box = geohash.bbox(hash_code)

        to_append = [[
            [_box["w"], _box["s"]],
            [_box["e"], _box["s"]],
            [_box["e"], _box["n"]],
            [_box["w"], _box["n"]],
            [_box["w"], _box["s"]],
        ]]
        coordinates += [to_append]

    geometry = {"type": "MultiPolygon", "coordinates": coordinates}

    if union:
        geometry_shp = shape(geometry)
        polygon_union = cascaded_union(geometry_shp)
        geometry = geo.mapping(polygon_union)

    return geometry
Example #31
0
def write_geom_dict(dct, path=None, filename=None, epsg=4326, crs=None):
    """
    :param dct:
    :type dct: dict

    >>> dct = {1: Point(1, 2), 2: Point(3, 4)}

    :param path:
    :type path: str
    :param filename:
    :type filename: str
    """

    filename = filename or 'out'
    path = path or os.path.join(mkdtemp(), '{0}.shp'.format(filename))

    crs = crs or from_epsg(epsg)
    driver = 'ESRI Shapefile'
    schema = {'properties': {'UGID': 'int'}, 'geometry': dct.values()[0].geom_type}
    with fiona.open(path, 'w', driver=driver, crs=crs, schema=schema) as source:
        for k, v in dct.iteritems():
            rec = {'properties': {'UGID': k}, 'geometry': mapping(v)}
            source.write(rec)
    return path
Example #32
0
    def write(self):
        ocgis_lh('starting write method',self._log,logging.DEBUG)
        
        unique_geometry_store = []

        # indicates if user geometries should be written to file
        write_ugeom = False
        
        try:
            build = True

            for coll in iter(self.colls):
                if build:

                    # write the user geometries if configured and there is one present on the incoming collection.
                    if self._add_ugeom and coll.geoms.values()[0] is not None:
                        write_ugeom = True

                    f = self._build_(coll)
                    if write_ugeom:
                        ugid_shp_name = self.prefix + '_ugid.shp'
                        ugid_csv_name = self.prefix + '_ugid.csv'
                        
                        if self._add_ugeom_nest:
                            fiona_path = os.path.join(self._get_or_create_shp_folder_(),ugid_shp_name)
                            csv_path = os.path.join(self._get_or_create_shp_folder_(),ugid_csv_name)
                        else:
                            fiona_path = os.path.join(self.outdir,ugid_shp_name)
                            csv_path = os.path.join(self.outdir,ugid_csv_name)

                        if coll.meta is None:
                            # convert the collection properties to fiona properties
                            from fiona_ import FionaConverter
                            fiona_properties = {}
                            for k, v in coll.properties.values()[0].iteritems():
                                fiona_properties[k] = FionaConverter.get_field_type(type(v))

                            fiona_schema = {'geometry':'MultiPolygon',
                                            'properties':fiona_properties}
                            fiona_meta = {'schema':fiona_schema,'driver':'ESRI Shapefile'}
                        else:
                            fiona_meta = coll.meta
                            
                        ## always use the CRS from the collection. shapefile metadata
                        ## will always be WGS84, but it may be overloaded in the
                        ## operations.
                        fiona_meta['crs'] = coll.crs.value
                        
                        ## always upper for the properties definition as this happens
                        ## for each record.
                        fiona_meta['schema']['properties'] = {k.upper():v for k,v in fiona_meta['schema']['properties'].iteritems()}
                        
                        ## selection geometries will always come out as MultiPolygon
                        ## regardless if they began as points. points are buffered
                        ## during the subsetting process.
                        fiona_meta['schema']['geometry'] = 'MultiPolygon'

                        fiona_object = fiona.open(fiona_path,'w',**fiona_meta)
                        csv_file = open(csv_path,'w')
                        
                        from ocgis.conv.csv_ import OcgDialect
                        csv_object = DictWriter(csv_file,fiona_meta['schema']['properties'].keys(),dialect=OcgDialect)
                        csv_object.writeheader()
                        
                    build = False
                self._write_coll_(f,coll)
                if write_ugeom:
                    ## write the overview geometries to disk
                    r_geom = coll.geoms.values()[0]
                    if isinstance(r_geom,Polygon):
                        r_geom = MultiPolygon([r_geom])
                    ## see if this geometry is in the unique geometry store
                    should_append = self._get_should_append_to_unique_geometry_store_(
                     unique_geometry_store,
                     r_geom,
                     coll.properties.values()[0]['UGID'])
                    if should_append:
                        unique_geometry_store.append({'geom':r_geom,
                                                      'ugid':coll.properties.values()[0]['UGID']})
                    
                        ## if it is unique write the geometry to the output files
                        to_write = {'geometry':mapping(r_geom),
                                    'properties':{k.upper():v for k,v in coll.properties.values()[0].iteritems()}}
                        fiona_object.write(to_write)
                        
                        ## write the geometry attributes to the corresponding shapefile
                        for row in coll.properties.itervalues():
                            csv_object.writerow({k.upper():v for k,v in row.iteritems()})
                    
        finally:
            
            ## errors are masked if the processing failed and file objects, etc.
            ## were not properly created. if there are UnboundLocalErrors pass
            ## them through to capture the error that lead to the objects not
            ## being created.
            
            try:
                try:
                    self._finalize_(f)
                except UnboundLocalError:
                    pass
            except Exception as e:
                ## this the exception we want to log
                ocgis_lh(exc=e,logger=self._log)
            finally:
                if write_ugeom:
                    try:
                        fiona_object.close()
                    except UnboundLocalError:
                        pass
                    try:
                        csv_file.close()
                    except UnboundLocalError:
                        pass
        
        ## the metadata and dataset descriptor files may only be written if
        ## OCGIS operations are present.
        if self.ops is not None and self.add_auxiliary_files == True:
            ## added OCGIS metadata output if requested.
            if self.add_meta:
                ocgis_lh('adding OCGIS metadata file','conv',logging.DEBUG)
                lines = MetaConverter(self.ops).write()
                out_path = os.path.join(self.outdir,self.prefix+'_'+MetaConverter._meta_filename)
                with open(out_path,'w') as f:
                    f.write(lines)
            
            ## add the dataset descriptor file if specified and OCGIS operations
            ## are present.
            if self._add_did_file:
                ocgis_lh('writing dataset description (DID) file','conv',logging.DEBUG)
                from ocgis.conv.csv_ import OcgDialect
                
                headers = ['DID','VARIABLE','ALIAS','URI','STANDARD_NAME','UNITS','LONG_NAME']
                out_path = os.path.join(self.outdir,self.prefix+'_did.csv')
                with open(out_path,'w') as f:
                    writer = csv.writer(f,dialect=OcgDialect)
                    writer.writerow(headers)
                    for rd in self.ops.dataset.itervalues():
                        for d in rd:
                            row = [rd.did,d['variable'],d['alias'],rd.uri]
                            ref_variable = rd.source_metadata['variables'][d['variable']]['attrs']
                            row.append(ref_variable.get('standard_name',None))
                            row.append(ref_variable.get('units',None))
                            row.append(ref_variable.get('long_name',None))
                            writer.writerow(row)
                
            ## add source metadata if requested
            if self._add_source_meta:
                ocgis_lh('writing source metadata file','conv',logging.DEBUG)
                out_path = os.path.join(self.outdir,self.prefix+'_source_metadata.txt')
                to_write = []
                for rd in self.ops.dataset.itervalues():
                    ip = Inspect(meta=rd.source_metadata, uri=rd.uri)
                    to_write += ip.get_report_no_variable()
                with open(out_path,'w') as f:
                    f.writelines('\n'.join(to_write))
        
        ## return the internal path unless overloaded by subclasses.
        ret = self._get_return_()
        
        return(ret)
Example #33
0
 def to_geojson(self):
     return OrderedDict((
         ('type', 'Feature'),
         ('properties', self.get_geojson_properties()),
         ('geometry', format_geojson(mapping(self.geometry), round=False)),
     ))
Example #34
0
 def tofile(self, form=None):
     result = super().tofile()
     result['geometry'] = format_geojson(mapping(self.geometry))
     return result
Example #35
0
def nearestPoint(intersectPolygons, xyLine, end):
    start = numpy.array(mapping(xyLine)['coordinates'][0])
    point = end
    end = numpy.array(end)
    #print(intersectPolygons)
    leastpoint = sys.maxsize
    for polygon in intersectPolygons:
        for i in range(len(mapping(polygon)['coordinates'][0])):
            current = numpy.array(mapping(polygon)['coordinates'][0][i])
            if ((mapping(xyLine)['coordinates'][0]
                 == mapping(polygon)['coordinates'][0][i]) &
                (mapping(polygon)['coordinates'][0][i] in visited)):
                #print("The same")
                continue
            else:
                if ((mapping(xyLine)['coordinates'][0])
                        in mapping(polygon)['coordinates'][0]):
                    if (LineString([(mapping(xyLine)['coordinates'][0]),
                                    (mapping(polygon)['coordinates'][0][i])
                                    ]).touches(polygon)):
                        #print(mapping(polygon)['coordinates'][0][i],False)
                        distance = numpy.linalg.norm(
                            start - current) + numpy.linalg.norm(current - end)
                        if distance < leastpoint:
                            leastpoint = distance
                            point = mapping(polygon)['coordinates'][0][i]
                    else:
                        #print(mapping(polygon)['coordinates'][0][i],True)
                        continue
                else:
                    intersectFlag = False
                    newLine = LineString([
                        (mapping(xyLine)['coordinates'][0]),
                        (mapping(polygon)['coordinates'][0][i])
                    ])
                    for p in polygons:
                        if newLine.intersects(p) & (~(newLine.touches(p))):
                            intersectFlag = True
                    if intersectFlag == False:
                        distance = numpy.linalg.norm(
                            start - current) + numpy.linalg.norm(current - end)
                        if distance < leastpoint:
                            leastpoint = distance
                            point = mapping(polygon)['coordinates'][0][i]
    return point
Example #36
0
 def polygon_geojson(self, poly, properties):
     return {
         'type': 'Feature',
         'geometry': mapping(poly),
     }
Example #37
0
 def _write_coll_(self,f,coll):
     fiona_object = f['fiona_object']
     for geom,properties in coll.get_iter_dict(use_upper_keys=True,conversion_map=f['fiona_conversion']):
         to_write = {'geometry':mapping(geom),'properties':properties}
         fiona_object.write(to_write)
Example #38
0
import geojson
import requests

req = requests.get('http://ercot.com/content/cdr/contours/rtmSppPoints.kml')

k = kml.KML()
k.from_string(req.text)

osm_ids = []
features = []

try:
    with open('ercot_rtm.map', 'r') as f:
        for line in f:
            osm_ids.extend(line.split('\t'))
except:
    pass

for p in list(list(k.features())[0].features())[1].features():
    if not p.name in osm_ids:
        features.append(
            geojson.Feature(geometry=mapping(p.geometry),
                            properties={
                                'name': p.name,
                                'description': p.description
                            }))

features = geojson.FeatureCollection(features)
with open('ercot_rtm.geojson', 'w') as f:
    geojson.dump(features, f)
Example #39
0
            geom = shape(source["geometry"])  # type: ignore

            # Check if geometry is a valid (e.g. no intersection etc.)
            if not geom.is_valid:  # type: ignore
                try:
                    reason = explain_validity(geom)  # type: ignore
                    messages.append(
                        Message(
                            level=MessageLevel.ERROR,
                            message=f"{filename} invalid geometry: {reason}",
                        ))
                    valid_geom = make_valid(geom)  # type: ignore
                    valid_geom = eliutils.orient_geometry_rfc7946(
                        valid_geom)  # type: ignore
                    valid_geom_json = json.dumps(
                        mapping(valid_geom),
                        sort_keys=False,
                        ensure_ascii=False)  # type: ignore
                    messages.append(
                        Message(
                            level=MessageLevel.ERROR,
                            message=
                            f"{filename} please consider using corrected geometry: {valid_geom_json}",
                        ))
                    geom = valid_geom  # type: ignore
                except Exception as e:
                    logger.warning("Geometry check failed: {e}")

            # Check ring orientation to correspond with GeoJSON rfc7946:
            # A linear ring MUST follow the right-hand rule with respect to the
            # area it bounds, i.e., exterior rings are counterclockwise, and
Example #40
0
    def __init__(self,
                 *args,
                 space_id=None,
                 request=None,
                 geometry_editable=False,
                 is_json=False,
                 **kwargs):
        self.request = request
        super().__init__(*args, **kwargs)
        creating = not self.instance.pk

        if hasattr(self.instance, 'author_id'):
            if self.instance.author_id is None:
                self.instance.author = request.user

        if 'geometry' in self.fields:
            if not geometry_editable:
                # can't see this geometry in editor
                self.fields.pop('geometry')
            else:
                # hide geometry widget
                self.fields['geometry'].widget = HiddenInput()
                if not creating:
                    self.initial['geometry'] = json.dumps(
                        mapping(self.instance.geometry), separators=(',', ':'))

        if self._meta.model.__name__ == 'Source' and self.request.user.is_superuser:
            Source = self.request.changeset.wrap_model('Source')

            sources = {
                s['name']: s
                for s in
                Source.objects.all().values('name', 'access_restriction_id',
                                            'left', 'bottom', 'right', 'top')
            }
            used_names = set(sources.keys())
            all_names = set(os.listdir(settings.SOURCES_ROOT))
            if not creating:
                used_names.remove(self.instance.name)
                all_names.add(self.instance.name)
            self.fields['name'].widget = Select(choices=tuple(
                (s, s) for s in sorted(all_names - used_names)))

            if creating:
                for s in sources.values():
                    s['access_restriction'] = s['access_restriction_id']
                    del s['access_restriction_id']
                self.fields['copy_from'] = ChoiceField(choices=tuple(
                    (('', '---------'), )) + tuple(
                        (json.dumps(sources[name],
                                    separators=(',', ':'),
                                    cls=DjangoJSONEncoder), name)
                        for name in sorted(used_names)),
                                                       required=False)

            self.fields['fixed_x'] = DecimalField(label='fixed x',
                                                  required=False,
                                                  max_digits=7,
                                                  decimal_places=3,
                                                  initial=0)
            self.fields['fixed_y'] = DecimalField(label='fixed y',
                                                  required=False,
                                                  max_digits=7,
                                                  decimal_places=3,
                                                  initial=0)
            self.fields['scale_x'] = DecimalField(label='scale x (m/px)',
                                                  required=False,
                                                  max_digits=7,
                                                  decimal_places=3,
                                                  initial=1)
            self.fields['scale_y'] = DecimalField(label='scale y (m/px)',
                                                  required=False,
                                                  max_digits=7,
                                                  decimal_places=3,
                                                  initial=1)
            self.fields['lock_aspect'] = BooleanField(
                label='lock aspect ratio', required=False, initial=True)
            self.fields['lock_scale'] = BooleanField(
                label='lock scale (for moving)', required=False, initial=True)

            self.fields.move_to_end('lock_scale', last=False)
            self.fields.move_to_end('lock_aspect', last=False)
            self.fields.move_to_end('scale_y', last=False)
            self.fields.move_to_end('scale_x', last=False)
            self.fields.move_to_end('fixed_y', last=False)
            self.fields.move_to_end('fixed_x', last=False)
            self.fields.move_to_end('access_restriction', last=False)
            if creating:
                self.fields.move_to_end('copy_from', last=False)
            self.fields.move_to_end('name', last=False)

        if self._meta.model.__name__ == 'AccessRestriction':
            AccessRestrictionGroup = self.request.changeset.wrap_model(
                'AccessRestrictionGroup')

            self.fields['groups'].label_from_instance = lambda obj: obj.title
            self.fields[
                'groups'].queryset = AccessRestrictionGroup.qs_for_request(
                    self.request)

        elif 'groups' in self.fields:
            LocationGroupCategory = self.request.changeset.wrap_model(
                'LocationGroupCategory')

            kwargs = {
                'allow_' + self._meta.model._meta.default_related_name: True
            }
            categories = LocationGroupCategory.objects.filter(
                **kwargs).prefetch_related('groups')
            if self.instance.pk:
                instance_groups = tuple(
                    self.instance.groups.values_list('pk', flat=True))
            else:
                instance_groups = ()

            self.fields.pop('groups')

            for category in categories:
                choices = tuple((str(group.pk), group.title)
                                for group in sorted(category.groups.all(),
                                                    key=self.sort_group))
                category_groups = set(group.pk
                                      for group in category.groups.all())
                initial = tuple(
                    str(pk) for pk in instance_groups if pk in category_groups)
                if category.single:
                    name = 'group_' + category.name
                    initial = initial[0] if initial else ''
                    choices = (('', '---'), ) + choices
                    field = ChoiceField(label=category.title,
                                        required=False,
                                        initial=initial,
                                        choices=choices,
                                        help_text=category.help_text)
                else:
                    name = 'groups_' + category.name
                    field = MultipleChoiceField(label=category.title_plural,
                                                required=False,
                                                initial=initial,
                                                choices=choices,
                                                help_text=category.help_text)
                self.fields[name] = field

        if 'category' in self.fields:
            self.fields['category'].label_from_instance = lambda obj: obj.title

        if 'access_restriction' in self.fields:
            AccessRestriction = self.request.changeset.wrap_model(
                'AccessRestriction')

            self.fields[
                'access_restriction'].label_from_instance = lambda obj: obj.title
            self.fields[
                'access_restriction'].queryset = AccessRestriction.qs_for_request(
                    self.request)

        if 'base_mapdata_accessible' in self.fields:
            if not request.user.is_superuser:
                self.fields['base_mapdata_accessible'].disabled = True

        if space_id and 'target_space' in self.fields:
            Space = self.request.changeset.wrap_model('Space')

            GraphNode = self.request.changeset.wrap_model('GraphNode')
            GraphEdge = self.request.changeset.wrap_model('GraphEdge')

            cache_key = 'editor:neighbor_spaces:%s:%s%d' % (
                self.request.changeset.raw_cache_key_by_changes,
                AccessPermission.cache_key_for_request(
                    request, with_update=False), space_id)
            other_spaces = cache.get(cache_key, None)
            if other_spaces is None:
                AccessPermission.cache_key_for_request(
                    request, with_update=False) + ':' + str(request.user.pk
                                                            or 0)
                space_nodes = set(
                    GraphNode.objects.filter(space_id=space_id).values_list(
                        'pk', flat=True))
                space_edges = GraphEdge.objects.filter(
                    Q(from_node_id__in=space_nodes)
                    | Q(to_node_id__in=space_nodes)).values_list(
                        'from_node_id', 'to_node_id')
                other_nodes = set(chain(*space_edges)) - space_nodes
                other_spaces = set(
                    GraphNode.objects.filter(pk__in=other_nodes).values_list(
                        'space_id', flat=True))
                other_spaces.discard(space_id)
                cache.set(cache_key, other_spaces, 900)

            for space_field in ('origin_space', 'target_space'):
                other_space_id = getattr(self.instance, space_field + '_id',
                                         None)
                if other_space_id:
                    other_spaces.add(other_space_id)

            space_qs = Space.qs_for_request(
                self.request).filter(pk__in=other_spaces)

            for space_field in ('origin_space', 'target_space'):
                if space_field in self.fields:
                    self.fields[
                        space_field].label_from_instance = lambda obj: obj.title
                    self.fields[space_field].queryset = space_qs

        self.redirect_slugs = None
        self.add_redirect_slugs = None
        self.remove_redirect_slugs = None
        if 'slug' in self.fields:
            self.redirect_slugs = sorted(
                self.instance.redirects.values_list('slug', flat=True))
            self.fields['redirect_slugs'] = CharField(
                label=_('Redirecting Slugs (comma seperated)'),
                required=False,
                initial=','.join(self.redirect_slugs))
            self.fields.move_to_end('redirect_slugs', last=False)
            self.fields.move_to_end('slug', last=False)

        if 'from_node' in self.fields:
            self.fields['from_node'].widget = HiddenInput()

        if 'to_node' in self.fields:
            self.fields['to_node'].widget = HiddenInput()

        if 'data' in self.fields and 'data' in self.initial:
            self.initial['data'] = json.dumps(self.initial['data'])

        self.is_json = is_json
        self.missing_fields = tuple(
            (name, field) for name, field in self.fields.items()
            if name not in self.data and not field.required)
Example #41
0
    def decode_geohash(self, multipolygon=False, union=True, overwrite=False):
        """
        Decode a geohash list and return a GeoJSON format dict
        :param multipolygon: by default, decode_geohash will create a GeoJSON polygon for each geohash code, by
                             setting multipolygon to True, only one multipolygon that contains all geohash codes
                             will be created.
        :param union: set to True to calculate the cascaded union of all the polygons
        :param overwrite:
        :return: a GeoJSON format dict
        """
        if self.__geojson and not overwrite:
            raise ExistedValueError(
                'The GeoJsonHasher object has existing geojson. Set overwrite '
                'to True to overwrite it.')

        if not self.__geohash_codes:
            raise ValueError('GeoJsonHasher has no GeoHash codes.')

        if multipolygon:
            coordinates = [
                self._polygon_coordinates(geohash.bbox(i))
                for i in self.__geohash_codes
            ]
            logger.debug('Calculating coordinates.')

            __geometry = {"type": "MultiPolygon", "coordinates": coordinates}

            if union:
                geometry_shp = shape(__geometry)
                polygon_union = cascaded_union(geometry_shp)
                __geometry = geo.mapping(polygon_union)
                logger.debug('Calculate cascaded union.')

            __feature = {
                "type": "Feature",
                "properties": {
                    "geohash": self.__geohash_codes
                },
                "geometry": __geometry
            }

            __features = [__feature]

        else:
            __features = [{
                "type": "Feature",
                "properties": {
                    "geohash": [i]
                },
                "geometry": {
                    "type": "Polygon",
                    "coordinates": self._polygon_coordinates(geohash.bbox(i))
                }
            } for i in self.__geohash_codes]
            logger.debug('multipolygon: False, creating features.')

        __feature_collection = {
            "type": "FeatureCollection",
            "features": __features
        }
        self.__geojson = __feature_collection
        return self.__geojson
Example #42
0
    def __init__(self, *args, space_id=None, request=None, **kwargs):
        self.request = request
        super().__init__(*args, **kwargs)
        creating = not self.instance.pk

        if hasattr(self.instance, 'author_id'):
            if self.instance.author_id is None:
                self.instance.author = request.user

        if 'level' in self.fields:
            # hide level widget
            self.fields['level'].widget = HiddenInput()

        if 'space' in self.fields:
            # hide space widget
            self.fields['space'].widget = HiddenInput()

        if 'geometry' in self.fields:
            # hide geometry widget
            self.fields['geometry'].widget = HiddenInput()
            if not creating:
                self.initial['geometry'] = json.dumps(mapping(
                    self.instance.geometry),
                                                      separators=(',', ':'))

        if self._meta.model.__name__ == 'AccessRestriction':
            AccessRestrictionGroup = self.request.changeset.wrap_model(
                'AccessRestrictionGroup')

            self.fields['groups'].label_from_instance = lambda obj: obj.title
            self.fields[
                'groups'].queryset = AccessRestrictionGroup.qs_for_request(
                    self.request)

        elif 'groups' in self.fields:
            LocationGroupCategory = self.request.changeset.wrap_model(
                'LocationGroupCategory')

            kwargs = {
                'allow_' + self._meta.model._meta.default_related_name: True
            }
            categories = LocationGroupCategory.objects.filter(
                **kwargs).prefetch_related('groups').order_by('priority')
            if self.instance.pk:
                instance_groups = tuple(
                    self.instance.groups.values_list('pk', flat=True))
            else:
                instance_groups = ()

            self.fields.pop('groups')

            for category in categories:
                choices = tuple((str(group.pk), group.title)
                                for group in category.groups.all())
                category_groups = set(group.pk
                                      for group in category.groups.all())
                initial = tuple(
                    str(pk) for pk in instance_groups if pk in category_groups)
                if category.single:
                    name = 'group_' + category.name
                    initial = initial[0] if initial else ''
                    choices = (('', '---'), ) + choices
                    field = ChoiceField(label=category.title,
                                        required=False,
                                        initial=initial,
                                        choices=choices)
                else:
                    name = 'groups_' + category.name
                    field = MultipleChoiceField(label=category.title_plural,
                                                required=False,
                                                initial=initial,
                                                choices=choices)
                self.fields[name] = field

        if 'category' in self.fields:
            self.fields['category'].label_from_instance = lambda obj: obj.title

        if 'access_restriction' in self.fields:
            AccessRestriction = self.request.changeset.wrap_model(
                'AccessRestriction')

            self.fields[
                'access_restriction'].label_from_instance = lambda obj: obj.title
            self.fields[
                'access_restriction'].queryset = AccessRestriction.qs_for_request(
                    self.request)

        if space_id and 'target_space' in self.fields:
            Space = self.request.changeset.wrap_model('Space')

            GraphNode = self.request.changeset.wrap_model('GraphNode')
            GraphEdge = self.request.changeset.wrap_model('GraphEdge')

            cache_key = 'editor:neighbor_spaces:%s:%s%d' % (
                self.request.changeset.raw_cache_key_by_changes,
                AccessPermission.cache_key_for_request(
                    request, with_update=False), space_id)
            other_spaces = cache.get(cache_key, None)
            if other_spaces is None:
                AccessPermission.cache_key_for_request(
                    request, with_update=False) + ':' + str(request.user.pk
                                                            or 0)
                space_nodes = set(
                    GraphNode.objects.filter(space_id=space_id).values_list(
                        'pk', flat=True))
                space_edges = GraphEdge.objects.filter(
                    Q(from_node_id__in=space_nodes)
                    | Q(to_node_id__in=space_nodes)).values_list(
                        'from_node_id', 'to_node_id')
                other_nodes = set(chain(*space_edges)) - space_nodes
                other_spaces = set(
                    GraphNode.objects.filter(pk__in=other_nodes).values_list(
                        'space_id', flat=True))
                other_spaces.discard(space_id)
                cache.set(cache_key, other_spaces, 900)

            for space_field in ('origin_space', 'target_space'):
                other_space_id = getattr(self.instance, space_field + '_id',
                                         None)
                if other_space_id:
                    other_spaces.add(other_space_id)

            space_qs = Space.qs_for_request(
                self.request).filter(pk__in=other_spaces)

            for space_field in ('origin_space', 'target_space'):
                if space_field in self.fields:
                    self.fields[
                        space_field].label_from_instance = lambda obj: obj.title
                    self.fields[space_field].queryset = space_qs

        self.redirect_slugs = None
        self.add_redirect_slugs = None
        self.remove_redirect_slugs = None
        if 'slug' in self.fields:
            self.redirect_slugs = sorted(
                self.instance.redirects.values_list('slug', flat=True))
            self.fields['redirect_slugs'] = CharField(
                label=_('Redirecting Slugs (comma seperated)'),
                required=False,
                initial=','.join(self.redirect_slugs))
            self.fields.move_to_end('redirect_slugs', last=False)
            self.fields.move_to_end('slug', last=False)

        if 'from_node' in self.fields:
            self.fields['from_node'].widget = HiddenInput()

        if 'to_node' in self.fields:
            self.fields['to_node'].widget = HiddenInput()

        if 'data' in self.fields and 'data' in self.initial:
            self.initial['data'] = json.dumps(self.initial['data'])
Example #43
0
    def __init__(self, *args, space_id=None, request=None, geometry_editable=False, is_json=False, **kwargs):
        self.request = request
        super().__init__(*args, **kwargs)
        creating = not self.instance.pk

        if hasattr(self.instance, 'author_id'):
            if self.instance.author_id is None:
                self.instance.author = request.user

        if 'geometry' in self.fields:
            if not geometry_editable:
                # can't see this geometry in editor
                self.fields.pop('geometry')
            else:
                # hide geometry widget
                self.fields['geometry'].widget = HiddenInput()
                if not creating:
                    self.initial['geometry'] = json.dumps(mapping(self.instance.geometry), separators=(',', ':'))

        if self._meta.model.__name__ == 'Source' and self.request.user.is_superuser:
            Source = self.request.changeset.wrap_model('Source')

            sources = {s['name']: s for s in Source.objects.all().values('name', 'access_restriction_id',
                                                                         'left', 'bottom', 'right', 'top')}
            used_names = set(sources.keys())
            all_names = set(os.listdir(settings.SOURCES_ROOT))
            if not creating:
                used_names.remove(self.instance.name)
                all_names.add(self.instance.name)
            self.fields['name'].widget = Select(choices=tuple((s, s) for s in sorted(all_names-used_names)))

            if creating:
                for s in sources.values():
                    s['access_restriction'] = s['access_restriction_id']
                    del s['access_restriction_id']
                self.fields['copy_from'] = ChoiceField(
                    choices=tuple((('', '---------'), ))+tuple(
                        (json.dumps(sources[name], separators=(',', ':'), cls=DjangoJSONEncoder), name)
                        for name in sorted(used_names)
                    ),
                    required=False
                )

            self.fields['fixed_x'] = DecimalField(label='fixed x', required=False,
                                                  max_digits=7, decimal_places=3, initial=0)
            self.fields['fixed_y'] = DecimalField(label='fixed y', required=False,
                                                  max_digits=7, decimal_places=3, initial=0)
            self.fields['scale_x'] = DecimalField(label='scale x (m/px)', required=False,
                                                  max_digits=7, decimal_places=3, initial=1)
            self.fields['scale_y'] = DecimalField(label='scale y (m/px)', required=False,
                                                  max_digits=7, decimal_places=3, initial=1)
            self.fields['lock_aspect'] = BooleanField(label='lock aspect ratio', required=False, initial=True)
            self.fields['lock_scale'] = BooleanField(label='lock scale (for moving)', required=False, initial=True)

            self.fields.move_to_end('lock_scale', last=False)
            self.fields.move_to_end('lock_aspect', last=False)
            self.fields.move_to_end('scale_y', last=False)
            self.fields.move_to_end('scale_x', last=False)
            self.fields.move_to_end('fixed_y', last=False)
            self.fields.move_to_end('fixed_x', last=False)
            self.fields.move_to_end('access_restriction', last=False)
            if creating:
                self.fields.move_to_end('copy_from', last=False)
            self.fields.move_to_end('name', last=False)

        if self._meta.model.__name__ == 'AccessRestriction':
            AccessRestrictionGroup = self.request.changeset.wrap_model('AccessRestrictionGroup')

            self.fields['groups'].label_from_instance = lambda obj: obj.title
            self.fields['groups'].queryset = AccessRestrictionGroup.qs_for_request(self.request)

        elif 'groups' in self.fields:
            LocationGroupCategory = self.request.changeset.wrap_model('LocationGroupCategory')

            kwargs = {'allow_'+self._meta.model._meta.default_related_name: True}
            categories = LocationGroupCategory.objects.filter(**kwargs).prefetch_related('groups')
            if self.instance.pk:
                instance_groups = tuple(self.instance.groups.values_list('pk', flat=True))
            else:
                instance_groups = ()

            self.fields.pop('groups')

            for category in categories:
                choices = tuple((str(group.pk), group.title)
                                for group in sorted(category.groups.all(), key=self.sort_group))
                category_groups = set(group.pk for group in category.groups.all())
                initial = tuple(str(pk) for pk in instance_groups if pk in category_groups)
                if category.single:
                    name = 'group_'+category.name
                    initial = initial[0] if initial else ''
                    choices = (('', '---'), )+choices
                    field = ChoiceField(label=category.title, required=False, initial=initial, choices=choices,
                                        help_text=category.help_text)
                else:
                    name = 'groups_'+category.name
                    field = MultipleChoiceField(label=category.title_plural, required=False,
                                                initial=initial, choices=choices,
                                                help_text=category.help_text)
                self.fields[name] = field

        if 'category' in self.fields:
            self.fields['category'].label_from_instance = lambda obj: obj.title

        if 'access_restriction' in self.fields:
            AccessRestriction = self.request.changeset.wrap_model('AccessRestriction')

            self.fields['access_restriction'].label_from_instance = lambda obj: obj.title
            self.fields['access_restriction'].queryset = AccessRestriction.qs_for_request(self.request)

        if 'base_mapdata_accessible' in self.fields:
            if not request.user.is_superuser:
                self.fields['base_mapdata_accessible'].disabled = True

        if space_id and 'target_space' in self.fields:
            Space = self.request.changeset.wrap_model('Space')

            GraphNode = self.request.changeset.wrap_model('GraphNode')
            GraphEdge = self.request.changeset.wrap_model('GraphEdge')

            cache_key = 'editor:neighbor_spaces:%s:%s%d' % (
                self.request.changeset.raw_cache_key_by_changes,
                AccessPermission.cache_key_for_request(request, with_update=False),
                space_id
            )
            other_spaces = cache.get(cache_key, None)
            if other_spaces is None:
                AccessPermission.cache_key_for_request(request, with_update=False) + ':' + str(request.user.pk or 0)
                space_nodes = set(GraphNode.objects.filter(space_id=space_id).values_list('pk', flat=True))
                space_edges = GraphEdge.objects.filter(
                    Q(from_node_id__in=space_nodes) | Q(to_node_id__in=space_nodes)
                ).values_list('from_node_id', 'to_node_id')
                other_nodes = set(chain(*space_edges)) - space_nodes
                other_spaces = set(GraphNode.objects.filter(pk__in=other_nodes).values_list('space_id', flat=True))
                other_spaces.discard(space_id)
                cache.set(cache_key, other_spaces, 900)

            for space_field in ('origin_space', 'target_space'):
                other_space_id = getattr(self.instance, space_field+'_id', None)
                if other_space_id:
                    other_spaces.add(other_space_id)

            space_qs = Space.qs_for_request(self.request).filter(pk__in=other_spaces)

            for space_field in ('origin_space', 'target_space'):
                if space_field in self.fields:
                    self.fields[space_field].label_from_instance = lambda obj: obj.title
                    self.fields[space_field].queryset = space_qs

        self.redirect_slugs = None
        self.add_redirect_slugs = None
        self.remove_redirect_slugs = None
        if 'slug' in self.fields:
            self.redirect_slugs = sorted(self.instance.redirects.values_list('slug', flat=True))
            self.fields['redirect_slugs'] = CharField(label=_('Redirecting Slugs (comma seperated)'), required=False,
                                                      initial=','.join(self.redirect_slugs))
            self.fields.move_to_end('redirect_slugs', last=False)
            self.fields.move_to_end('slug', last=False)

        if 'from_node' in self.fields:
            self.fields['from_node'].widget = HiddenInput()

        if 'to_node' in self.fields:
            self.fields['to_node'].widget = HiddenInput()

        if 'data' in self.fields and 'data' in self.initial:
            self.initial['data'] = json.dumps(self.initial['data'])

        self.is_json = is_json
        self.missing_fields = tuple((name, field) for name, field in self.fields.items()
                                    if name not in self.data and not field.required)
Example #44
0
    def __init__(self, *args, request=None, **kwargs):
        self.request = request
        super().__init__(*args, **kwargs)
        creating = not self.instance.pk

        # disable name on non-direct editing
        if not creating and not settings.DIRECT_EDITING:
            self.fields['name'].disabled = True

        if creating:
            self.fields['name'].initial = hex(int(time.time()*1000000))[2:]

        # restrict package choices and field_name
        if not creating:
            if not settings.DIRECT_EDITING:
                self.fields['package'].widget = HiddenInput()
                self.fields['package'].disabled = True
            self.initial['package'] = self.instance.package.name
        elif not settings.DIRECT_EDITING:
            unlocked_packages = get_unlocked_packages(request)
            if len(unlocked_packages) == 1:
                self.fields['package'].widget = HiddenInput()
                self.fields['package'].initial = next(iter(unlocked_packages))
                self.fields['package'].disabled = True
            else:
                self.fields['package'] = ModelChoiceField(
                    queryset=Package.objects.filter(name__in=unlocked_packages),
                )
        self.fields['package'].to_field_name = 'name'

        if 'level' in self.fields:
            # hide level widget and set field_name
            self.fields['level'].widget = HiddenInput()
            self.fields['level'].to_field_name = 'name'
            if not creating:
                self.initial['level'] = self.instance.level.name

        if 'crop_to_level' in self.fields:
            # set field_name
            self.fields['crop_to_level'].to_field_name = 'name'
            if not creating and self.instance.crop_to_level is not None:
                self.initial['crop_to_level'] = self.instance.crop_to_level.name

        if 'levels' in self.fields:
            # set field_name
            self.fields['levels'].to_field_name = 'name'

        if 'groups' in self.fields:
            # set field_name
            self.fields['groups'].to_field_name = 'name'

        if 'geometry' in self.fields:
            # hide geometry widget
            self.fields['geometry'].widget = HiddenInput()
            if not creating:
                self.initial['geometry'] = json.dumps(mapping(self.instance.geometry), separators=(',', ':'))

        # parse titles
        self.titles = None
        if hasattr(self.instance, 'titles'):
            titles = OrderedDict((lang_code, '') for lang_code, language in settings.LANGUAGES)
            if self.instance is not None and self.instance.pk:
                titles.update(self.instance.titles)

            language_titles = dict(settings.LANGUAGES)
            for language in titles.keys():
                new_title = self.data.get('title_' + language)
                if new_title is not None:
                    titles[language] = new_title
                self.fields['title_' + language] = CharField(label=language_titles.get(language, language),
                                                             required=False,
                                                             initial=titles[language].strip(), max_length=50)
            self.titles = titles
Example #45
0
def create_county_district_features(county_features, district_features,
                                    valid_county_districts,
                                    county_district_dem_votes):
    county_polys = [
        shape(county['geometry']) for county in county_features['features']
    ]

    district_polys = [
        shape(district['geometry'])
        for district in district_features['features']
    ]

    county_district_features = []

    district_dem_votes = build_district_dem_votes(county_district_dem_votes)
    county_dem_votes = build_county_dem_votes(county_district_dem_votes)
    single_district_counties = build_single_district_counties(
        county_district_dem_votes)

    # Not a huge number of comparisons, naive method is fine
    for i, county_poly in enumerate(county_polys):
        for j, district_poly in enumerate(district_polys):
            district = district_features['features'][j]['properties'][
                'CD115FP']
            county = county_features['features'][i]['properties']['NAME']
            if (county, district) not in valid_county_districts:
                continue

            if district_poly.contains(county_poly):
                geom = county_poly
            elif district_poly.intersects(
                    county_poly) and not district_poly.touches(county_poly):
                geom = district_poly.intersection(county_poly)
                if geom.type not in ('Polygon', 'MultiPolygon'):
                    buffered = geom.buffer(0.0)
                    if not buffered.is_valid:
                        continue
                    geom = buffered
                    if geom.type not in ('Polygon', 'MultiPolygon'):
                        continue
            else:
                continue

            district = district_features['features'][j]['properties'][
                'CD115FP']
            county = county_features['features'][i]['properties']['NAME']

            props = {'district': district, 'county': county}

            county_district_signatures = valid_county_districts[(county,
                                                                 district)]

            props.update(county_district_signatures)

            valid_signatures = int(
                county_district_signatures['Valid Signatures'])

            dem_votes = county_district_dem_votes.get((county, district), {})
            if not dem_votes and county in single_district_counties:
                dem_votes = county_district_dem_votes.get(
                    (county, single_district_counties[county]), {})

            if dem_votes:
                dem_votes_approx = int(dem_votes['Dem Votes 2016'])
                expected_county_share_of_signatures = float(
                    dem_votes['County Share of Dem Votes in District'])
                expected_county_share_method = '2016 Election Precinct Results'
            else:
                num_dem_votes_in_county = county_dem_votes[county]
                num_dem_votes_in_district = district_dem_votes[district]
                valid_in_county = int(county_district_signatures[
                    'Total Valid Signatures in County'])
                valid_in_county_district = valid_signatures
                if valid_in_county > 0:
                    dem_votes_approx = num_dem_votes_in_county * (
                        valid_in_county_district / float(valid_in_county))
                else:
                    dem_votes_approx = 0
                expected_county_share_of_signatures = dem_votes_approx / num_dem_votes_in_district
                expected_county_share_method = 'Share of Signatures in County Split'

            props['Dem Votes 2016'] = dem_votes_approx
            props[
                'Expected County Share of Dems in District'] = expected_county_share_of_signatures
            props[
                'Expected County Share Method'] = expected_county_share_method

            total_valid_in_district = int(county_district_signatures[
                'Total Valid Signatures in District'])
            total_needed = int(
                county_district_signatures['Total Needed in District'])
            expected = int(expected_county_share_of_signatures * total_needed)
            if total_needed < total_valid_in_district:
                expected = min(expected, valid_signatures)
            props['Expected Signatures'] = int(expected)
            props['Expected Signatures Remaining'] = max(
                expected - valid_signatures,
                0) if total_needed > total_valid_in_district else 0

            try:
                geom_mapping = mapping(geom)
            except Exception:
                continue

            county_district_features.append({
                'type': 'Feature',
                'geometry': geom_mapping,
                'properties': props
            })

    return county_district_features