def test_fid(fid_source, fid_field, id_expect, ngw_resource_group, ngw_txn): src = Path(__file__).parent / 'data' / 'type.geojson' dataset = ogr.Open(str(src)) assert dataset is not None, gdal.GetLastErrorMsg() layer = dataset.GetLayer(0) assert layer is not None, gdal.GetLastErrorMsg() res = VectorLayer(parent_id=ngw_resource_group, display_name='test_fid', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex) res.persist() res.setup_from_ogr(layer, fid_params=dict(fid_source=fid_source, fid_field=fid_field)) res.load_from_ogr(layer) DBSession.flush() query = res.feature_query() query.filter_by(id=id_expect) assert query().total_count == 1
def receive_events(request): json_body = request.json_body parsed_events_set = EventsSetSchema().loads(json_body) if len(parsed_events_set.errors.keys()) > 0: raise HTTPBadRequest() last_update_ts = int(parsed_events_set.data['stop']) events = [] if 'data' in parsed_events_set.data: events = parsed_events_set.data['data'] events_to_insert = [] for event in events: existed = DBSession.query(exists().where(Event.event_id==event.event_id)).scalar() exists_in_inserted_list = any(ev.event_id == event.event_id for ev in events_to_insert) if not existed and not exists_in_inserted_list: events_to_insert.append(event) with transaction.manager: Meta.filter_by(key=LAST_UPDATE_KEY).update({ 'value': last_update_ts }) for event in events_to_insert: DBSession.add(event) return response_ok(dict( count=len(events), start=parsed_events_set.data['start'], stop=parsed_events_set.data['stop'], lastUpdateTs=last_update_ts ))
def test_storage(ngw_env, ngw_webtest_app, ngw_auth_administrator): reserve_storage = ngw_env.core.reserve_storage with freeze_time() as dt, transaction.manager: assert 'storage_reservations' not in DBSession().info reserve_storage('test_comp_1', TestKOD1, value_data_volume=100) reserve_storage('test_comp_1', TestKOD2, value_data_volume=20) reserve_storage('test_comp_2', TestKOD1, value_data_volume=400) reserve_storage('test_comp_2', TestKOD2, value_data_volume=80) assert 'storage_reservations' in DBSession().info assert len(DBSession().info['storage_reservations']) == 4 assert len(DBSession().info['storage_reservations']) == 0 cur = ngw_env.core.query_storage() assert cur[''] == dict(estimated=None, updated=dt(), data_volume=600) assert cur[TestKOD1.identity] == dict(estimated=None, updated=dt(), data_volume=500) res = ngw_webtest_app.get('/api/component/pyramid/storage', status=200) assert res.json['']['updated'] == dt().isoformat() assert res.json['']['data_volume'] == 600 assert res.json[TestKOD1.identity]['data_volume'] == 500 assert res.json[TestKOD2.identity]['data_volume'] == 100
def test_lookup(svg_lib, ngw_env, ngw_webtest_app): svg_marker_library = ngw_env.svg_marker_library file_storage = ngw_env.file_storage def lookup_marker(name): return svg_marker_library.lookup(name, library=svg_lib) def filename(fileobj): return file_storage.filename(fileobj, makedirs=False) marker1 = svg_lib.find_svg_marker('marker1') marker2 = svg_lib.find_svg_marker('marker2') assert lookup_marker('marker1') == filename(marker1.fileobj) assert lookup_marker('marker2') == filename(marker2.fileobj) assert lookup_marker('marker3') == path.join(FOLDER1, 'marker3.svg') svg_marker_library.cache.clear() with transaction.manager: DBSession.delete(marker1) assert lookup_marker('marker1') == path.join(FOLDER1, 'marker1.svg') assert lookup_marker('marker2') == filename(marker2.fileobj) svg_marker_library.cache.clear() with svg_marker_library.options.override({'path': [FOLDER1]}): assert lookup_marker('marker1') == path.join(FOLDER1, 'marker1.svg')
def test_from_fields(ngw_resource_group, ngw_txn): res = VectorLayer( parent_id=ngw_resource_group, display_name='from_fields', owner_user=User.by_keyname('administrator'), geometry_type='POINT', srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ) res.setup_from_fields([ dict(keyname='integer', datatype=FIELD_TYPE.INTEGER), dict(keyname='bigint', datatype=FIELD_TYPE.BIGINT), dict(keyname='real', datatype=FIELD_TYPE.REAL), dict(keyname='string', datatype=FIELD_TYPE.STRING, label_field=True), dict(keyname='date', datatype=FIELD_TYPE.DATE), dict(keyname='time', datatype=FIELD_TYPE.TIME), dict(keyname='datetime', datatype=FIELD_TYPE.DATETIME), ]) res.persist() assert res.feature_label_field.keyname == 'string' DBSession.flush()
def test_id_field(data, ngw_resource_group): res = VectorLayer(parent_id=ngw_resource_group, display_name=f'test-{data}', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex).persist() src = os.path.join(DATA_PATH, f'{data}.geojson') ds = ogr.Open(src) layer = ds.GetLayer(0) with pytest.raises(ValidationError): res.setup_from_ogr(layer) fid_params = dict(fid_source=FID_SOURCE.FIELD, fid_field=['id']) with pytest.raises(ValidationError): res.setup_from_ogr(layer, fid_params=fid_params) res.setup_from_ogr(layer, fix_errors=ERROR_FIX.SAFE, fid_params=fid_params) res.load_from_ogr(layer) DBSession.flush() query = res.feature_query() feature = query().one() assert feature.id == 1 assert list(feature.fields.keys()) == ['id_1']
def test_from_ogr(data, ngw_resource_group, ngw_txn): src = os.path.join(DATA_PATH, data) dsource = ogr.Open('/vsizip/' + src) layer = dsource.GetLayer(0) res = VectorLayer( parent_id=ngw_resource_group, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex), ) res.persist() res.setup_from_ogr(layer, lambda x: x) res.load_from_ogr(layer, lambda x: x) DBSession.flush() features = list(res.feature_query()()) assert len(features) == 1 feature = features[0] assert feature.id == 1 fields = feature.fields assert fields['int'] == -1 # TODO: Date, time and datetime tests fails on shapefile # assert fields['date'] == date(2001, 1, 1) # assert fields['time'] == time(23, 59, 59) # assert fields['datetime'] == datetime(2001, 1, 1, 23, 59, 0) assert fields['string'] == "Foo bar" assert fields[ 'unicode'] == 'Значимость этих проблем настолько очевидна, что реализация намеченных плановых заданий требуют определения и уточнения.' # NOQA: E501
def test_size_limit(size_limit, width, height, band_count, datatype, ok, ngw_env, ngw_resource_group): res = RasterLayer( parent_id=ngw_resource_group, display_name='test-raster-limit', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), ).persist() driver = gdal.GetDriverByName('GTiff') proj = res.srs.to_osr() proj_wkt = proj.ExportToWkt() with ngw_env.raster_layer.options.override(dict(size_limit=size_limit)): with NamedTemporaryFile('w') as f: ds = driver.Create(f.name, width, height, band_count, datatype) ds.SetProjection(proj_wkt) ds.FlushCache() ds = None f.flush() if ok: res.load_file(f.name, ngw_env) else: with pytest.raises(ValidationError): res.load_file(f.name, ngw_env) DBSession.expunge(res)
def test_create(filename, options, checks, ngw_resource_group, ngw_txn): obj = VectorLayer(parent_id=ngw_resource_group, display_name='vector_layer', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex)).persist() src = str(path / filename) ds = ogr.Open(src) layer = ds.GetLayer(0) geom_cast_params = dict(geometry_type=options.get('geometry_type'), is_multi=options.get('is_multi'), has_z=options.get('has_z')) def setup_and_load(): setup_kwargs = dict() load_kwargs = dict() if 'skip_other_geometry_types' in options: setup_kwargs['skip_other_geometry_types'] = options[ 'skip_other_geometry_types'] load_kwargs['skip_other_geometry_types'] = options[ 'skip_other_geometry_types'] if 'fix_errors' in options: load_kwargs['fix_errors'] = options['fix_errors'] if 'skip_errors' in options: load_kwargs['skip_errors'] = options['skip_errors'] obj.setup_from_ogr(layer, geom_cast_params=geom_cast_params, **setup_kwargs) obj.load_from_ogr(layer, **load_kwargs) if 'exception' in checks: with pytest.raises(checks['exception']): setup_and_load() DBSession.expunge(obj) else: setup_and_load() DBSession.flush() if 'geometry_type' in checks: exp_geometry_type = checks['geometry_type'] assert obj.geometry_type == exp_geometry_type, \ "Expected geometry type was {} but actually got {}".format( exp_geometry_type, obj.geometry_type) if 'feature_count' in checks: exp_feature_count = checks['feature_count'] query = obj.feature_query() feature_count = query().total_count assert feature_count == exp_feature_count, \ "Expected feature count was {} but got {}".format( exp_feature_count, feature_count)
def test_same_display_name(ngw_txn, ngw_resource_group): margs = dict( parent_id=ngw_resource_group, display_name='display name', owner_user=User.by_keyname('administrator')) with pytest.raises(IntegrityError, match='"resource_parent_id_display_name_key"'): ResourceGroup(**margs).persist() ResourceGroup(**margs).persist() DBSession.flush()
def ngw_resource_group(ngw_env): with transaction.manager: res = ResourceGroup( parent_id=0, owner_user=User.by_keyname('administrator'), display_name='Test resource group ({})'.format(datetime.now().isoformat()), ).persist() yield res.id with transaction.manager: DBSession.delete(ResourceGroup.filter_by(id=res.id).one())
def webmap_with_items(ngw_resource_group, fixt_layers_styles): with transaction.manager: webmap = WebMap(parent_id=ngw_resource_group, display_name=__name__, owner_user=User.by_keyname('administrator'), root_item=WebMapItem(item_type='root')) webmap.from_dict(make_webmap_items(fixt_layers_styles)) webmap.persist() yield webmap, fixt_layers_styles with transaction.manager: DBSession.delete(WebMap.filter_by(id=webmap.id).one())
def get_last_update(request): try: last_update = Meta.filter_by(key=LAST_UPDATE_KEY).one() except NoResultFound: from datetime import datetime, timedelta import calendar prevent_date = datetime.utcnow() - timedelta(days=1) prevent_date_utc_ts = calendar.timegm(prevent_date.utctimetuple()) with transaction.manager: last_update = Meta(key=LAST_UPDATE_KEY, value=prevent_date_utc_ts) DBSession.add(last_update) last_update = Meta.filter_by(key=LAST_UPDATE_KEY).one() return int(last_update.value)
def frtc(ngw_resource_group, ngw_txn): vector_layer = VectorLayer(parent_id=ngw_resource_group, display_name='from_fields', owner_user=User.by_keyname('administrator'), geometry_type='POINT', srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex)).persist() vector_layer.setup_from_fields([]) result = ResourceTileCache(resource=vector_layer, ).persist() DBSession.flush() result.initialize() return result
def frtc(ngw_resource_group): with transaction.manager: layer = RasterLayer( parent_id=ngw_resource_group, display_name='test-render-layer', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), xsize=100, ysize=100, dtype='Byte', band_count=3, ).persist() style = RasterStyle( parent=layer, display_name='test-render-style', owner_user=User.by_keyname('administrator'), ).persist() result = ResourceTileCache(resource=style, ).persist() result.async_writing = True DBSession.flush() result.initialize() yield result with transaction.manager: DBSession.delete( ResourceTileCache.filter_by(resource_id=result.resource_id).one()) DBSession.delete(RasterStyle.filter_by(id=style.id).one()) DBSession.delete(RasterLayer.filter_by(id=layer.id).one())
def test_storage_estimate_all(ngw_env, ngw_resource_group, ngw_webtest_app, ngw_auth_administrator): with transaction.manager: res = vector_layer('test-vector-layer', ngw_resource_group) DBSession.flush() DBSession.expunge(res) feature = dict(geom='POINT (0 0)') ngw_webtest_app.post_json('/api/resource/%d/feature/' % res.id, feature) content = 'some-content' resp = ngw_webtest_app.put('/api/component/file_upload/', content) file_upload = resp.json ngw_webtest_app.post_json( '/api/resource/%d/feature/%d/attachment/' % (res.id, 1), dict(file_upload=file_upload)) ngw_webtest_app.post('/api/component/pyramid/estimate_storage', status=200) sleep(0.05) # Give a chance to start a thread and acquire the lock with transaction.manager: # Wait estimation end DBSession.execute(SQL_LOCK) cur = ngw_env.core.query_storage( dict(resource_id=lambda col: col == res.id)) assert FeatureAttachmentData.identity in cur assert cur[FeatureAttachmentData.identity]['data_volume'] == len(content) with transaction.manager: DBSession.delete(res)
def get_project_by_resource(cls, resource): if '_project_cache' not in cls.__dict__.keys(): db_session = DBSession() projects = db_session.query(Project).all() cls._project_cache = {project.root_resource_id: project for project in projects if project.root_resource_id is not None} res = resource while res: if res.id in cls._project_cache.keys(): return cls._project_cache[res.id] res = res.parent return None
def test_type_geojson(ngw_resource_group, ngw_txn): src = Path(__file__).parent / 'data' / 'type.geojson' dataset = ogr.Open(str(src)) assert dataset is not None, gdal.GetLastErrorMsg() layer = dataset.GetLayer(0) assert layer is not None, gdal.GetLastErrorMsg() res = VectorLayer(parent_id=ngw_resource_group, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex)) res.persist() res.setup_from_ogr(layer, lambda x: x) res.load_from_ogr(layer, lambda x: x) layer.ResetReading() DBSession.flush() def field_as(f, n, t): fidx = f.GetFieldIndex(n) if f.IsFieldNull(fidx): return None attr = getattr(f, 'GetFieldAs' + t) result = attr(fidx) if t in ('Date', 'Time', 'DateTime'): result = [int(v) for v in result] if t == 'String' and six.PY2: result = result.decode('utf-8') return result for feat, ref in zip(res.feature_query()(), layer): fields = feat.fields assert fields['null'] == field_as(ref, 'null', None) assert fields['int'] == field_as(ref, 'int', 'Integer') assert fields['real'] == field_as(ref, 'real', 'Double') assert fields['date'] == date(*field_as(ref, 'date', 'DateTime')[0:3]) assert fields['time'] == time(*field_as(ref, 'time', 'DateTime')[3:6]) assert fields['datetime'] == datetime( *field_as(ref, 'datetime', 'DateTime')[0:6]) assert fields['string'] == field_as(ref, 'string', 'String') assert fields['unicode'] == field_as(ref, 'unicode', 'String')
def user_with_webmap(ngw_env, ngw_resource_group): with transaction.manager: user = User( keyname=TEST_USER_KEYNAME, display_name='Test User', ).persist() test_root_resource = ResourceGroup.filter_by(id=0).one() append_acl(test_root_resource, 'allow', user, 'resource', 'read', ResourceGroup.identity) test_resource_group = ResourceGroup.filter_by( id=ngw_resource_group).one() append_acl(test_resource_group, 'allow', user, 'resource', 'read', ResourceGroup.identity) webmap = WebMap(parent_id=ngw_resource_group, display_name=__name__, owner_user=User.by_keyname(TEST_USER_KEYNAME), root_item=WebMapItem(item_type='root')).persist() append_acl(webmap, 'allow', user, 'resource', 'read', WebMap.identity) append_acl(webmap, 'allow', user, 'webmap', 'annotation_read', WebMap.identity) append_acl(webmap, 'allow', user, 'webmap', 'annotation_write', WebMap.identity) webmap.persist() user_admin_id = User.by_keyname('administrator').id make_annotation(webmap, public=True, user_id=user_admin_id) make_annotation(webmap, public=False, user_id=user_admin_id) make_annotation(webmap, public=True, user_id=user.id) make_annotation(webmap, public=False, user_id=user.id) DBSession.flush() yield user, webmap with transaction.manager: user = User.filter_by(keyname=TEST_USER_KEYNAME).one() webmap = WebMap.filter_by(owner_user_id=user.id).one() DBSession.query(ResourceACLRule).filter( ResourceACLRule.principal_id == user.id).delete() DBSession.query(User).filter( User.keyname == TEST_USER_KEYNAME).delete() DBSession.query(WebMapAnnotation).filter( WebMapAnnotation.webmap_id == webmap.id).delete() DBSession.delete(WebMap.filter_by(owner_user_id=user.id).one())
def fixt_layers_styles(ngw_env, ngw_resource_group): layers_styles_ = [] with transaction.manager: for i in range(count_layers_created): layer_id, style_id = make_layer_style(ngw_resource_group, i) draw_order_position = count_layers_created - i layers_styles_.append((layer_id, style_id, draw_order_position)) yield layers_styles_ with transaction.manager: for layer_id, style_id, draw_order_position in layers_styles_: DBSession.delete(RasterStyle.filter_by(id=style_id).one()) DBSession.delete(RasterLayer.filter_by(id=layer_id).one())
def connection_id(ngw_resource_group, wfs_service_path): with transaction.manager: admin = User.by_keyname('administrator') obj = WFSConnection(parent_id=ngw_resource_group, display_name='wfs_connection', owner_user=admin, path=wfs_service_path, username='******', password='******', version='2.0.2').persist() yield obj.id with transaction.manager: DBSession.delete(WFSConnection.filter_by(id=obj.id).one())
def legend(request): if 'styles' not in request.GET.keys(): raise HTTPBadRequest("Parameter 'styles' not found.") try: styles = map(int, request.GET.getall('styles')) except ValueError: raise HTTPBadRequest("Invalid 'styles' parameter. Only numbers.") result = [] legend_list = DBSession.query(Resource).filter(Resource.parent_id.in_(styles)) for legend in legend_list: legend_description = env.file_storage.filename(legend.description_fileobj) with open(legend_description, mode='r') as f: description = loads(f.read(), encoding='utf-8') if type(description) != list: description = list(description) element = dict( id=legend.id, legend_id=legend.id, name=legend.display_name or legend.keyname, children=description ) result.append(element) return Response(dumps(result), content_type='application/json', charset='utf-8')
def create_feature_layer(ogrlayer, parent_id, **kwargs): with transaction.manager: layer = VectorLayer( parent_id=parent_id, display_name='Feature layer (vector)', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=uuid4().hex, ).persist() layer.setup_from_ogr(ogrlayer) layer.load_from_ogr(ogrlayer) DBSession.flush() yield layer
def get_count_item_for_last_days(trailcam_id, days=7): date_now = datetime.now() date_7_days_ago = date_now - timedelta(days=days) time_group = DBSession.query(extract('months', TrailcamItem.date_original).label('m'), extract('days', TrailcamItem.date_original).label('d'), sa.func.count(TrailcamItem.id)) \ .filter(TrailcamItem.date_original <= date_now) \ .filter(TrailcamItem.date_original >= date_7_days_ago) \ .filter(TrailcamItem.trailcam_id == trailcam_id) \ .group_by('d') \ .group_by('m') \ .order_by(sa.desc('m')) \ .order_by(sa.desc('d')) \ .all() count_by_days = dict( ('{0}-{1}'.format(m, d), count) for m, d, count in time_group) count_for_last_days = [] for i in range(days): date_i_days_ago = date_now - timedelta(days=i) current_key = '{d.month}-{d.day}'.format(d=date_i_days_ago) if current_key in count_by_days: count_for_last_days.append( (date_i_days_ago.isoformat(), count_by_days[current_key])) else: count_for_last_days.append((date_i_days_ago.isoformat(), 0)) return count_for_last_days
def test_postgis_transform(ngw_txn, x, y, src, dst): px, py = DBSession.connection().execute(db.text( 'SELECT ST_X(pt), ST_Y(pt) ' 'FROM ST_Transform(ST_Transform(' ' ST_SetSRID(ST_MakePoint(:x, :y), :src) ,:dst), :src) AS pt' ), x=x, y=y, src=src, dst=dst).fetchone() assert abs(px - x) < 1e-6 assert abs(py - y) < 1e-6
def test_size_limit_reproj(source, size_expect, ngw_env, ngw_resource_group): res = RasterLayer( parent_id=ngw_resource_group, display_name='test-raster-limit-reproj', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), ).persist() filename = os.path.join(os.path.split(__file__)[0], 'data', source) with ngw_env.raster_layer.options.override(dict(size_limit=size_expect - 100)): with pytest.raises(ValidationError): res.load_file(filename, ngw_env) with ngw_env.raster_layer.options.override(dict(size_limit=size_expect)): res.load_file(filename, ngw_env) DBSession.expunge(res)
def disable_users(): active_uids = [] with transaction.manager: for user in User.filter( db.and_(User.keyname != 'administrator', db.not_(User.disabled), db.not_(User.system))).all(): user.disabled = True active_uids.append(user.id) DBSession.flush() yield with transaction.manager: for user in User.filter(User.id.in_(active_uids)).all(): user.disabled = False DBSession.flush()
def create_feature_layer(ogrlayer, parent_id, ngw_httptest_app): if not env.options.get('component.wfsclient'): pytest.skip("wfsclient is not available") with create_vector_layer(ogrlayer, parent_id) as vlayer: with transaction.manager: res_common = dict(parent_id=parent_id, owner_user=User.by_keyname('administrator')) service = WFSService( **res_common, display_name='WFS service', ).persist() service_layer = WFS_Service_Layer(resource_id=vlayer.id, display_name='Layer', keyname='layer') service.layers.append(service_layer) with transaction.manager: wfs_path = '{}/api/resource/{}/wfs'.format( ngw_httptest_app.base_url, service.id) connection = WFSConnection( **res_common, display_name='WFS connection', path=wfs_path, version='2.0.2', username='******', password='******', ).persist() layer = WFSLayer( **res_common, display_name='Feature layer (WFS)', connection=connection, srs=SRS.filter_by(id=3857).one(), layer_name=service_layer.keyname, column_geom='geom', geometry_srid=vlayer.srs_id, geometry_type='POINT', ).persist() DBSession.flush() layer.setup() yield layer
def geocollection(request): try: date = str(request.GET.get('datetime', datetime.now())) if 'layers' in request.GET: qlayers = map(int, request.GET['layers'].split(',')) print qlayers layers = DBSession.query(VectorLayer)\ .filter(VectorLayer.id.in_(qlayers))\ .all() else: layers = DBSession.query(VectorLayer)\ .all() layers = filter(lambda layer: layer.has_permission(PD_READ, request.user), layers) print layers except Exception as e: raise HTTPBadRequest(e.message) features = GeoJsonFeatureList() # Запрос коллекции GeoJSON объектов, попадающих в заданную область видимости if 'bbox' in request.GET: try: bbox = map(float, request.GET['bbox'].split(',')) print bbox geometry = box(*bbox, srid=3857) for layer in layers: query = layer.feature_query() query.geom() query.intersects(geometry) for feature in query(): print feature feature.fields['__layer__'] = feature.layer.id features.append(feature) except Exception as e: print e raise HTTPBadRequest(e.message) else: raise HTTPBadRequest() print len(features) for f in features: f._geom = geometry_transform(f.geom, f.layer.srs_id, 4326) result = geojson.dumps(features, cls=ComplexEncoder) return Response(result, content_type='application/json')
def test_resource_storage(ngw_env, ngw_resource_group, ngw_webtest_app, ngw_auth_administrator): reserve_storage = ngw_env.core.reserve_storage with transaction.manager: res1 = vector_layer('test-resource-1', ngw_resource_group) res2 = vector_layer('test-resource-2', ngw_resource_group) reserve_storage('test_comp', TestKOD1, resource=res1, value_data_volume=100) reserve_storage('test_comp', TestKOD2, resource=res1, value_data_volume=10) reserve_storage('test_comp', TestKOD1, resource=res2, value_data_volume=200) DBSession.flush() DBSession.expunge(res1) DBSession.expunge(res2) resp = ngw_webtest_app.get('/api/resource/%d/volume' % res1.id, status=200) assert resp.json['volume'] == 110 resp = ngw_webtest_app.get('/api/resource/%d/volume' % res2.id, status=200) assert resp.json['volume'] == 200 cur = ngw_env.core.query_storage() assert cur['']['data_volume'] == 310 assert cur[TestKOD1.identity]['data_volume'] == 300 with transaction.manager: DBSession.delete(res1) cur = ngw_env.core.query_storage() assert cur['']['data_volume'] == 200 with transaction.manager: DBSession.delete(res2) cur = ngw_env.core.query_storage() assert cur['']['data_volume'] == 0
def test_keep_delete(ngw_env, ngw_txn): fo_keep = FileObj(component='test').persist() fn_keep = ngw_env.file_storage.filename(fo_keep, makedirs=True) fo_delete = FileObj(component='test') fn_delete = ngw_env.file_storage.filename(fo_delete, makedirs=True) DBSession.flush() for fn in (fn_keep, fn_delete): with io.open(fn, 'w') as fd: fd.write(fn) ngw_env.file_storage.cleanup() assert os.path.isfile(fn_keep) assert not os.path.isfile(fn_delete) os.unlink(fn_keep)
def forward(ctx): connection = DBSession.connection() instance_id = ctx.env.core.options.get('provision.instance_id', str(uuid.uuid4())) connection.execute(text(""" INSERT INTO setting (component, name, value) VALUES ('core', 'instance_id', :instance_id) """), instance_id=json.dumps(instance_id))
def create_vector_layer(cls, parent_obj, json_layer_struct, layer_name): from nextgisweb.resource.serialize import CompositeSerializer # only where!!! vl = VectorLayer(parent=parent_obj, owner_user=parent_obj.owner_user) cs = CompositeSerializer(vl, parent_obj.owner_user, json_layer_struct) cs.deserialize() vl.tbl_uuid = uuid.uuid4().hex for fld in vl.fields: fld.fld_uuid = uuid.uuid4().hex vl.keyname = '%s_%s' % (layer_name, vl.tbl_uuid) vl.persist() # temporary workaround #266. vl.srs_id = vl.srs.id ti = TableInfo.from_layer(vl) ti.setup_metadata(vl._tablename) ti.metadata.create_all(bind=DBSession.connection()) return vl