def test_from_ogr(data, ngw_resource_group, ngw_txn): src = os.path.join(DATA_PATH, data) dsource = ogr.Open('/vsizip/' + src) layer = dsource.GetLayer(0) res = VectorLayer( parent_id=ngw_resource_group, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ) res.persist() res.setup_from_ogr(layer, lambda x: x) res.load_from_ogr(layer, lambda x: x) DBSession.flush() features = list(res.feature_query()()) assert len(features) == 1 feature = features[0] assert feature.id == 1 fields = feature.fields assert fields['int'] == -1 # TODO: Date, time and datetime tests fails on shapefile # assert fields['date'] == date(2001, 1, 1) # assert fields['time'] == time(23, 59, 59) # assert fields['datetime'] == datetime(2001, 1, 1, 23, 59, 0) assert fields['string'] == "Foo bar" assert fields[ 'unicode'] == 'Значимость этих проблем настолько очевидна, что реализация намеченных плановых заданий требуют определения и уточнения.' # NOQA: E501
def test_fid(fid_source, fid_field, id_expect, ngw_resource_group, ngw_txn): src = Path(__file__).parent / 'data' / 'type.geojson' dataset = ogr.Open(str(src)) assert dataset is not None, gdal.GetLastErrorMsg() layer = dataset.GetLayer(0) assert layer is not None, gdal.GetLastErrorMsg() res = VectorLayer(parent_id=ngw_resource_group, display_name='test_fid', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex) res.persist() res.setup_from_ogr(layer, fid_params=dict(fid_source=fid_source, fid_field=fid_field)) res.load_from_ogr(layer) DBSession.flush() query = res.feature_query() query.filter_by(id=id_expect) assert query().total_count == 1
def test_from_fields(ngw_resource_group, ngw_txn): res = VectorLayer( parent_id=ngw_resource_group, display_name='from_fields', owner_user=User.by_keyname('administrator'), geometry_type='POINT', srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ) res.setup_from_fields([ dict(keyname='integer', datatype=FIELD_TYPE.INTEGER), dict(keyname='bigint', datatype=FIELD_TYPE.BIGINT), dict(keyname='real', datatype=FIELD_TYPE.REAL), dict(keyname='string', datatype=FIELD_TYPE.STRING, label_field=True), dict(keyname='date', datatype=FIELD_TYPE.DATE), dict(keyname='time', datatype=FIELD_TYPE.TIME), dict(keyname='datetime', datatype=FIELD_TYPE.DATETIME), ]) res.persist() assert res.feature_label_field.keyname == 'string' DBSession.flush()
def test_type_geojson(ngw_resource_group, ngw_txn): src = Path(__file__).parent / 'data' / 'type.geojson' dataset = ogr.Open(str(src)) assert dataset is not None, gdal.GetLastErrorMsg() layer = dataset.GetLayer(0) assert layer is not None, gdal.GetLastErrorMsg() res = VectorLayer(parent_id=ngw_resource_group, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex)) res.persist() res.setup_from_ogr(layer, lambda x: x) res.load_from_ogr(layer, lambda x: x) layer.ResetReading() DBSession.flush() def field_as(f, n, t): fidx = f.GetFieldIndex(n) if f.IsFieldNull(fidx): return None attr = getattr(f, 'GetFieldAs' + t) result = attr(fidx) if t in ('Date', 'Time', 'DateTime'): result = [int(v) for v in result] if t == 'String' and six.PY2: result = result.decode('utf-8') return result for feat, ref in zip(res.feature_query()(), layer): fields = feat.fields assert fields['null'] == field_as(ref, 'null', None) assert fields['int'] == field_as(ref, 'int', 'Integer') assert fields['real'] == field_as(ref, 'real', 'Double') assert fields['date'] == date(*field_as(ref, 'date', 'DateTime')[0:3]) assert fields['time'] == time(*field_as(ref, 'time', 'DateTime')[3:6]) assert fields['datetime'] == datetime( *field_as(ref, 'datetime', 'DateTime')[0:6]) assert fields['string'] == field_as(ref, 'string', 'String') assert fields['unicode'] == field_as(ref, 'unicode', 'String')
def test_from_ogr(txn, data): src = os.path.join(DATA_PATH, data) dsource = ogr.Open(src) layer = dsource.GetLayer(0) res = VectorLayer( parent_id=0, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=unicode(uuid4().hex), ) res.persist() res.setup_from_ogr(layer, lambda x: x) res.load_from_ogr(layer, lambda x: x) DBSession.flush()
def resource(ngw_txn, ngw_resource_group): src = os.path.join(DATA_PATH, 'geojson-point.zip/layer.geojson') dsource = ogr.Open('/vsizip/' + src) layer = dsource.GetLayer(0) resource = VectorLayer( parent_id=ngw_resource_group, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ) resource.persist() resource.setup_from_ogr(layer, lambda x: x) resource.load_from_ogr(layer, lambda x: x) DBSession.flush() return resource
def test_error_limit(ngw_resource_group): res = VectorLayer(parent_id=ngw_resource_group, display_name='error-limit', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex) res.persist() ds = ogr.GetDriverByName('Memory').CreateDataSource('') srs = osr.SpatialReference() srs.ImportFromEPSG(4326) layer = ds.CreateLayer('layer_with_errors', srs=srs, geom_type=ogr.wkbPoint) defn = layer.GetLayerDefn() some = 3 for i in range(error_limit + some): feature = ogr.Feature(defn) if i < error_limit: feature.SetGeometry(None) else: feature.SetGeometry(ogr.CreateGeometryFromWkt('POINT (0 0)')) layer.CreateFeature(feature) res.setup_from_ogr(layer) opts = dict(fix_errors=ERROR_FIX.NONE, skip_other_geometry_types=False) with pytest.raises(ValidationError) as excinfo: res.load_from_ogr(layer, **opts, skip_errors=False) assert excinfo.value.detail is not None res.load_from_ogr(layer, **opts, skip_errors=True) DBSession.flush() assert res.feature_query()().total_count == some
def test_from_fields(txn): res = VectorLayer( parent_id=0, display_name='from_fields', owner_user=User.by_keyname('administrator'), geometry_type='POINT', srs=SRS.filter_by(id=3857).one(), tbl_uuid=unicode(uuid4().hex), ) res.setup_from_fields([ dict(keyname='integer', datatype=FIELD_TYPE.INTEGER), dict(keyname='bigint', datatype=FIELD_TYPE.BIGINT), dict(keyname='real', datatype=FIELD_TYPE.REAL), dict(keyname='string', datatype=FIELD_TYPE.STRING), dict(keyname='date', datatype=FIELD_TYPE.DATE), dict(keyname='time', datatype=FIELD_TYPE.TIME), dict(keyname='datetime', datatype=FIELD_TYPE.DATETIME), ]) res.persist() DBSession.flush()
def load_shape_dicts(cls, force=False): print 'Loading shapes...' shape_dicts = { FEDERAL_KEYNAME: ('federal_districts.zip', 'Федеральные округа', { FEDERAL_ID_FIELD: 'Идентификатор', FEDERAL_NAME_FIELD: 'Наименование', FEDERAL_SHORT_NAME_FIELD: 'Короткое название', } ), REGIONS_KEYNAME: ('regions.zip', 'Регионы РФ', { REGIONS_ID_FIELD: 'Идентификатор', REGIONS_NAME_FIELD: 'Наименование', } ), DISTRICT_KEYNAME: ('districts.zip', 'Районы', { DISTRICT_ID_FIELD: 'Идентификатор', DISTRICT_NAME_FIELD: 'Наименование', DISTRICT_PARENT_ID_FIELD: 'Ид. родительского региона', DISTRICT_SHORT_NAME_FIELD: 'Короткое название' } ), } # get principals adminusr = User.filter_by(keyname='administrator').one() admingrp = Group.filter_by(keyname='administrators').one() everyone = User.filter_by(keyname='everyone').one() # get root resource try: root_res = ResourceGroup.filter_by(keyname=DICTIONARY_GROUP_KEYNAME).one() except NoResultFound: raise Exception('Need dictionaries group resource!') # create shapes for (dict_keyname, (dict_file, dict_display_name, dict_fields)) in shape_dicts.iteritems(): try: vec_res = VectorLayer.filter_by(keyname=dict_keyname).one() print ' Dictionary "%s" already exists' % dict_keyname if force: print ' Force recreate "%s"' % dict_keyname # try to drop old table try: VectorLayerUpdater.drop_vector_layer_table(vec_res.tbl_uuid) except: pass else: continue except NoResultFound: vec_res = VectorLayer(owner_user=adminusr, display_name=dict_display_name, keyname=dict_keyname, parent=root_res) vec_res.acl.append(ACLRule( principal=admingrp, action='allow')) vec_res.srs = SRS.filter_by(id=3857).one() datafile = path.join(BASE_PATH, dict_file) encoding = 'utf-8' iszip = zipfile.is_zipfile(datafile) try: #open ogr ds if iszip: ogrfn = tempfile.mkdtemp() zipfile.ZipFile(datafile, 'r').extractall(path=ogrfn) else: ogrfn = datafile with _set_encoding(encoding) as sdecode: ogrds = ogr.Open(ogrfn) recode = sdecode if ogrds is None: raise VE("Библиотеке OGR не удалось открыть файл") drivername = ogrds.GetDriver().GetName() if drivername not in ('ESRI Shapefile', ): raise VE("Неподдерживаемый драйвер OGR: %s" % drivername) # check datasource if ogrds.GetLayerCount() < 1: raise VE("Набор данных не содержит слоёв.") if ogrds.GetLayerCount() > 1: raise VE("Набор данных содержит более одного слоя.") # open ogrlayer ogrlayer = ogrds.GetLayer(0) if ogrlayer is None: raise VE("Не удалось открыть слой.") # check layer if ogrlayer.GetSpatialRef() is None: raise VE("Не указана система координат слоя.") feat = ogrlayer.GetNextFeature() while feat: geom = feat.GetGeometryRef() if geom is None: raise VE("Объект %d не содержит геометрии." % feat.GetFID()) feat = ogrlayer.GetNextFeature() ogrlayer.ResetReading() vec_res.tbl_uuid = uuid.uuid4().hex with DBSession.no_autoflush: vec_res.setup_from_ogr(ogrlayer, recode) vec_res.load_from_ogr(ogrlayer, recode) finally: if iszip: shutil.rmtree(ogrfn) # display names for fields for field_keyname, field_dispname in dict_fields.iteritems(): VectorLayerUpdater.change_field_display_name(vec_res, field_keyname, field_dispname) vec_res.persist()