def traverse(id): resource = VectorLayer.filter_by(keyname='docs').one() request.resource_permission(PERM_READ, resource) query = resource.feature_query() query.filter_by(id=id) query.limit(1) query.srs(srs) query.geom() for doc in query(): result = _serialize(doc) result['related'] = {} for key, name in DOCUMENTS: result['related'][key] = [] resource = VectorLayer.filter_by(keyname=key).one() request.resource_permission(PERM_READ, resource) query = resource.feature_query() query.filter_by(doc_id=doc.id) query.srs(srs) query.geom() for reldoc in query(): if key == 'docs' and traversal: result['related'][key].append(traverse(reldoc.id)) else: result['related'][key].append(_serialize(reldoc)) return result
def getschema(request): """Возвращаем схему участка лесонарушения """ p_size = map(int, request.GET.get('size').split(',')) fid = request.matchdict.get('fid') if fid is None: return None resource = VectorLayer.filter_by(keyname='docs').one() request.resource_permission(PERM_READ, resource) query = resource.feature_query() query.filter_by(id=fid) query.limit(1) query.geom() query.box() for doc in query(): extent = fix_aspect_ratio(doc.box.bounds, p_size) extent = scale_extent(extent, 3) # cadastre - лесоделение # docs - территории лесонарушений resstyles = [] for key in ('cadastre', 'docs'): resource = VectorLayer.filter_by(keyname=key).one() request.resource_permission(PERM_READ, resource) resstyle = filter(lambda r: IRenderableStyle.providedBy(r), resource.children)[0] resstyles.append(resstyle) img = None for style in resstyles: request.resource_permission(PERM_READ, style) # Отфильтровываем документы для отрисовки if style.feature_layer.keyname == 'docs': cond = {'id': fid} else: cond = None req = style.render_request(style.srs, cond) rimg = req.render_extent(extent, p_size) img = rimg if img is None else Image.alpha_composite(img, rimg) buf = StringIO() img.save(buf, 'png') buf.seek(0) return Response(body_file=buf, content_type=b'image/png')
def get_districts_from_resource(as_dict=False, sort=False): vector_res = VectorLayer.filter_by(keyname=DISTRICT_KEYNAME).first() if not vector_res: return [] fields_names = [field.keyname for field in vector_res.fields] if DISTRICT_ID_FIELD not in fields_names or \ DISTRICT_NAME_FIELD not in fields_names or \ DISTRICT_PARENT_ID_FIELD not in fields_names: return [] query = vector_res.feature_query() features = [] for f in query(): features.append({ 'name': f.fields[DISTRICT_NAME_FIELD], 'id': f.fields[DISTRICT_ID_FIELD], 'parent_id': f.fields[DISTRICT_PARENT_ID_FIELD] }) if sort: features.sort(key=lambda x: x['name']) if as_dict: return {feat['id']: feat['name'] for feat in features} return features
def get_regions_from_resource(as_dict=False, sort=False): # get dictionary vector_res = VectorLayer.filter_by(keyname=REGIONS_KEYNAME).first() if not vector_res: return [] # check fields fields_names = [field.keyname for field in vector_res.fields] if REGIONS_ID_FIELD not in fields_names or REGIONS_NAME_FIELD not in fields_names: return [] # receive values query = vector_res.feature_query() features = [] for f in query(): features.append({'name': f.fields[REGIONS_NAME_FIELD], 'id': f.fields[REGIONS_ID_FIELD]}) if sort: features.sort(key=lambda x: x['name']) if as_dict: return {feat['id']: feat['name'] for feat in features} return features
def test_geom_field(ngw_resource_group): res = VectorLayer(parent_id=ngw_resource_group, display_name='test-geom-fld', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex).persist() src = os.path.join(DATA_PATH, 'geom-fld.geojson') ds = ogr.Open(src) layer = ds.GetLayer(0) with pytest.raises(ValidationError): res.setup_from_ogr(layer) res.setup_from_ogr(layer, fix_errors=ERROR_FIX.SAFE) res.load_from_ogr(layer) DBSession.flush() query = res.feature_query() feature = query().one() assert feature.id == 1 assert list(feature.fields.keys()) == ['geom_1']
def getdoc_diff(request): """Возвращаем историю изменений документов и связанные данные """ docs = VectorLayer.filter_by(keyname='docs').one() diff = _diff(request, docs.id) result = {'added': [], 'changed': [], 'deleted': []} for state in ('added', 'changed', 'deleted'): if state != 'deleted': for feat in diff[state]: result[state].append(_getdoc(request, feat['id'])) else: result[state] = diff[state] return Response( json.dumps(result), content_type=b'application/json')
def test_from_ogr(txn, data): src = os.path.join(DATA_PATH, data) dsource = ogr.Open(src) layer = dsource.GetLayer(0) res = VectorLayer( parent_id=0, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=unicode(uuid4().hex), ) res.persist() res.setup_from_ogr(layer, lambda x: x) res.load_from_ogr(layer, lambda x: x) DBSession.flush()
def resource(ngw_txn, ngw_resource_group): src = os.path.join(DATA_PATH, 'geojson-point.zip/layer.geojson') dsource = ogr.Open('/vsizip/' + src) layer = dsource.GetLayer(0) resource = VectorLayer( parent_id=ngw_resource_group, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ) resource.persist() resource.setup_from_ogr(layer, lambda x: x) resource.load_from_ogr(layer, lambda x: x) DBSession.flush() return resource
def create_feature_layer(ogrlayer, parent_id, **kwargs): with transaction.manager: layer = VectorLayer( parent_id=parent_id, display_name='Feature layer (vector)', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex, ).persist() layer.setup_from_ogr(ogrlayer) layer.load_from_ogr(ogrlayer) DBSession.flush() yield layer
def service(ngw_resource_group): with transaction.manager: res_vl = VectorLayer( parent_id=ngw_resource_group, display_name='test_vector_layer', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ).persist() dsource = type_geojson_dataset() layer = dsource.GetLayer(0) res_vl.setup_from_ogr(layer, lambda x: x) res_vl.load_from_ogr(layer, lambda x: x) DBSession.flush() res_wfs = WFSService( parent_id=ngw_resource_group, display_name='test_wfsserver_service', owner_user=User.by_keyname('administrator'), ).persist() res_wfs.layers.append( WFSLayer( resource=res_vl, keyname='test', display_name='test', maxfeatures=1000, )) DBSession.flush() DBSession.expunge(res_vl) DBSession.expunge(res_wfs) yield res_wfs.id with transaction.manager: DBSession.delete(VectorLayer.filter_by(id=res_vl.id).one()) DBSession.delete(WFSService.filter_by(id=res_wfs.id).one())
def test_from_fields(txn): res = VectorLayer( parent_id=0, display_name='from_fields', owner_user=User.by_keyname('administrator'), geometry_type='POINT', srs=SRS.filter_by(id=3857).one(), tbl_uuid=unicode(uuid4().hex), ) res.setup_from_fields([ dict(keyname='integer', datatype=FIELD_TYPE.INTEGER), dict(keyname='bigint', datatype=FIELD_TYPE.BIGINT), dict(keyname='real', datatype=FIELD_TYPE.REAL), dict(keyname='string', datatype=FIELD_TYPE.STRING), dict(keyname='date', datatype=FIELD_TYPE.DATE), dict(keyname='time', datatype=FIELD_TYPE.TIME), dict(keyname='datetime', datatype=FIELD_TYPE.DATETIME), ]) res.persist() DBSession.flush()
def load_shape_dicts(cls, force=False): print 'Loading shapes...' shape_dicts = { FEDERAL_KEYNAME: ('federal_districts.zip', 'Федеральные округа', { FEDERAL_ID_FIELD: 'Идентификатор', FEDERAL_NAME_FIELD: 'Наименование', FEDERAL_SHORT_NAME_FIELD: 'Короткое название', } ), REGIONS_KEYNAME: ('regions.zip', 'Регионы РФ', { REGIONS_ID_FIELD: 'Идентификатор', REGIONS_NAME_FIELD: 'Наименование', } ), DISTRICT_KEYNAME: ('districts.zip', 'Районы', { DISTRICT_ID_FIELD: 'Идентификатор', DISTRICT_NAME_FIELD: 'Наименование', DISTRICT_PARENT_ID_FIELD: 'Ид. родительского региона', DISTRICT_SHORT_NAME_FIELD: 'Короткое название' } ), } # get principals adminusr = User.filter_by(keyname='administrator').one() admingrp = Group.filter_by(keyname='administrators').one() everyone = User.filter_by(keyname='everyone').one() # get root resource try: root_res = ResourceGroup.filter_by(keyname=DICTIONARY_GROUP_KEYNAME).one() except NoResultFound: raise Exception('Need dictionaries group resource!') # create shapes for (dict_keyname, (dict_file, dict_display_name, dict_fields)) in shape_dicts.iteritems(): try: vec_res = VectorLayer.filter_by(keyname=dict_keyname).one() print ' Dictionary "%s" already exists' % dict_keyname if force: print ' Force recreate "%s"' % dict_keyname # try to drop old table try: VectorLayerUpdater.drop_vector_layer_table(vec_res.tbl_uuid) except: pass else: continue except NoResultFound: vec_res = VectorLayer(owner_user=adminusr, display_name=dict_display_name, keyname=dict_keyname, parent=root_res) vec_res.acl.append(ACLRule( principal=admingrp, action='allow')) vec_res.srs = SRS.filter_by(id=3857).one() datafile = path.join(BASE_PATH, dict_file) encoding = 'utf-8' iszip = zipfile.is_zipfile(datafile) try: #open ogr ds if iszip: ogrfn = tempfile.mkdtemp() zipfile.ZipFile(datafile, 'r').extractall(path=ogrfn) else: ogrfn = datafile with _set_encoding(encoding) as sdecode: ogrds = ogr.Open(ogrfn) recode = sdecode if ogrds is None: raise VE("Библиотеке OGR не удалось открыть файл") drivername = ogrds.GetDriver().GetName() if drivername not in ('ESRI Shapefile', ): raise VE("Неподдерживаемый драйвер OGR: %s" % drivername) # check datasource if ogrds.GetLayerCount() < 1: raise VE("Набор данных не содержит слоёв.") if ogrds.GetLayerCount() > 1: raise VE("Набор данных содержит более одного слоя.") # open ogrlayer ogrlayer = ogrds.GetLayer(0) if ogrlayer is None: raise VE("Не удалось открыть слой.") # check layer if ogrlayer.GetSpatialRef() is None: raise VE("Не указана система координат слоя.") feat = ogrlayer.GetNextFeature() while feat: geom = feat.GetGeometryRef() if geom is None: raise VE("Объект %d не содержит геометрии." % feat.GetFID()) feat = ogrlayer.GetNextFeature() ogrlayer.ResetReading() vec_res.tbl_uuid = uuid.uuid4().hex with DBSession.no_autoflush: vec_res.setup_from_ogr(ogrlayer, recode) vec_res.load_from_ogr(ogrlayer, recode) finally: if iszip: shutil.rmtree(ogrfn) # display names for fields for field_keyname, field_dispname in dict_fields.iteritems(): VectorLayerUpdater.change_field_display_name(vec_res, field_keyname, field_dispname) vec_res.persist()
def vector_layer_id(ngw_resource_group): with transaction.manager: obj = VectorLayer( parent_id=ngw_resource_group, display_name='vector_layer', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ).persist() geojson = { "type": "FeatureCollection", "name": "polygon_extent", "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::3857" } }, "features": [ { "type": "Feature", "properties": { "name": "west" }, "geometry": { "type": "Polygon", "coordinates": [[ [5542180, 8799167], [6191082, 7551279], [4668659, 7126998], [5542180, 8799167], ]], }, }, { "type": "Feature", "properties": { "name": "east" }, "geometry": { "type": "Polygon", "coordinates": [[ [15100999, 10396463], [16498633, 10546209], [16673337, 9223449], [15175872, 8948913], [15100999, 10396463], ]], }, }, ], } dsource = ogr.Open(json.dumps(geojson)) layer = dsource.GetLayer(0) obj.setup_from_ogr(layer, lambda x: x) obj.load_from_ogr(layer, lambda x: x) DBSession.flush() DBSession.expunge(obj) yield obj.id with transaction.manager: DBSession.delete(VectorLayer.filter_by(id=obj.id).one())
def test_error_limit(ngw_resource_group): res = VectorLayer(parent_id=ngw_resource_group, display_name='error-limit', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex) res.persist() ds = ogr.GetDriverByName('Memory').CreateDataSource('') srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer = ds.CreateLayer('layer_with_errors', srs=srs, geom_type=ogr.wkbPoint) defn = layer.GetLayerDefn() some = 3 for i in range(error_limit + some): feature = ogr.Feature(defn) if i < error_limit: feature.SetGeometry(None) else: feature.SetGeometry(ogr.CreateGeometryFromWkt('POINT (0 0)')) layer.CreateFeature(feature) res.setup_from_ogr(layer) opts = dict(fix_errors=ERROR_FIX.NONE, skip_other_geometry_types=False) with pytest.raises(ValidationError) as excinfo: res.load_from_ogr(layer, **opts, skip_errors=False) assert excinfo.value.detail is not None res.load_from_ogr(layer, **opts, skip_errors=True) DBSession.flush() assert res.feature_query()().total_count == some
def service(ngw_resource_group): with transaction.manager: vl_type = VectorLayer( parent_id=ngw_resource_group, display_name='type', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ).persist() dsource = type_geojson_dataset('type.geojson') layer = dsource.GetLayer(0) vl_type.setup_from_ogr(layer, lambda x: x) vl_type.load_from_ogr(layer, lambda x: x) DBSession.flush() # NOTE: GDAL doesn't support time fields in GML / WFS. It completely breaks # XSD schema parsing. Delete the time field to pass tests. DBSession.delete(vl_type.field_by_keyname('time')) vl_pointz = VectorLayer( parent_id=ngw_resource_group, display_name='pointz', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ).persist() dsource = type_geojson_dataset('pointz.geojson') layer = dsource.GetLayer(0) vl_pointz.setup_from_ogr(layer, lambda x: x) vl_pointz.load_from_ogr(layer, lambda x: x) DBSession.flush() res_wfs = WFSService( parent_id=ngw_resource_group, display_name='test_wfsserver_service', owner_user=User.by_keyname('administrator'), ).persist() res_wfs.layers.extend(( WFSLayer(resource=vl_type, keyname='type', display_name='type', maxfeatures=1000), WFSLayer(resource=vl_pointz, keyname='pointz', display_name='pointz', maxfeatures=1000), )) DBSession.flush() DBSession.expunge(vl_type) DBSession.expunge(vl_pointz) DBSession.expunge(res_wfs) yield res_wfs.id with transaction.manager: DBSession.delete(VectorLayer.filter_by(id=vl_type.id).one()) DBSession.delete(VectorLayer.filter_by(id=vl_pointz.id).one()) DBSession.delete(WFSService.filter_by(id=res_wfs.id).one())
def service_id(ngw_resource_group): with transaction.manager: res_vl = VectorLayer( parent_id=ngw_resource_group, display_name='test_vector_layer', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ).persist() geojson = { 'type': 'FeatureCollection', 'crs': { 'type': 'name', 'properties': { 'name': 'urn:ogc:def:crs:EPSG::3857' } }, 'features': [{ 'type': 'Feature', 'properties': { 'name': 'feature1' }, 'geometry': { 'type': 'Point', 'coordinates': [0, 0] } }, { 'type': 'Feature', 'properties': { 'price': -1 }, 'geometry': { 'type': 'Point', 'coordinates': [10, 10] } }] } dsource = ogr.Open(json.dumps(geojson)) layer = dsource.GetLayer(0) res_vl.setup_from_ogr(layer, lambda x: x) res_vl.load_from_ogr(layer, lambda x: x) DBSession.flush() res_wfs = WFSService( parent_id=ngw_resource_group, display_name='test_wfsserver_service', owner_user=User.by_keyname('administrator'), ).persist() res_wfs.layers.append( WFSLayer( resource=res_vl, keyname='test', display_name='test', maxfeatures=1000, )) DBSession.flush() DBSession.expunge(res_vl) DBSession.expunge(res_wfs) yield res_wfs.id with transaction.manager: DBSession.delete(VectorLayer.filter_by(id=res_vl.id).one()) DBSession.delete(WFSService.filter_by(id=res_wfs.id).one())
def test_id_field(data, ngw_resource_group): res = VectorLayer(parent_id=ngw_resource_group, display_name=f'test-{data}', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex).persist() src = os.path.join(DATA_PATH, f'{data}.geojson') ds = ogr.Open(src) layer = ds.GetLayer(0) with pytest.raises(ValidationError): res.setup_from_ogr(layer) fid_params = dict(fid_source=FID_SOURCE.FIELD, fid_field=['id']) with pytest.raises(ValidationError): res.setup_from_ogr(layer, fid_params=fid_params) res.setup_from_ogr(layer, fix_errors=ERROR_FIX.SAFE, fid_params=fid_params) res.load_from_ogr(layer) DBSession.flush() query = res.feature_query() feature = query().one() assert feature.id == 1 assert list(feature.fields.keys()) == ['id_1']