def cmd_compile(args): def _compile_file(path, component, locale): with io.open(str(path), 'r') as po: catalog = read_po(po, locale=locale, domain=component) logger.info("Compiling component [%s] locale [%s] (%d messages)...", component, locale, len(catalog)) with io.open(str(path.with_suffix('.mo')), 'wb') as mo: write_mo(mo, catalog) with io.open(str(path.with_suffix('.jed')), 'w') as jed: write_jed(jed, catalog) components = list(load_components(args)) modern_file_layout = list() for comp_id, comd_mod in components: locale_path = Path(import_module(comd_mod).__path__[0]) / 'locale' if not locale_path.is_dir() or len(list( locale_path.glob('*.po'))) == 0: continue modern_file_layout.append(comp_id) for po_path in locale_path.glob('*.po'): locale = po_path.with_suffix('').name _compile_file(po_path, comp_id, locale) locpath = resource_filename(args.package, 'locale') pofiles = [] for root, dirnames, filenames in os.walk(locpath): for filename in fnmatch.filter(filenames, '*.po'): pofiles.append(os.path.join(root, filename)[len(locpath) + 1:]) comp_ids = [cid for cid, _ in components] for pofile in pofiles: locale = pofile.split(os.sep, 1)[0] component = os.path.split(pofile)[1][:-3] if component not in comp_ids: continue if component in modern_file_layout: logger.warning( "Component [%s] was already compliled from new style " "locale file layout! Skipping it.", component) continue _compile_file(Path(os.path.join(locpath, pofile)), component, locale)
def type_geojson_dataset(): import nextgisweb.vector_layer.test path = Path( nextgisweb.vector_layer.test.__file__).parent / 'data' / 'type.geojson' result = ogr.Open(str(path)) assert result is not None, gdal.GetLastErrorMsg() return result
def test_fid(fid_source, fid_field, id_expect, ngw_resource_group, ngw_txn): src = Path(__file__).parent / 'data' / 'type.geojson' dataset = ogr.Open(str(src)) assert dataset is not None, gdal.GetLastErrorMsg() layer = dataset.GetLayer(0) assert layer is not None, gdal.GetLastErrorMsg() res = VectorLayer(parent_id=ngw_resource_group, display_name='test_fid', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex)) res.persist() res.setup_from_ogr(layer, fid_params=dict(fid_source=fid_source, fid_field=fid_field)) res.load_from_ogr(layer) DBSession.flush() query = res.feature_query() query.filter_by(id=id_expect) assert query().total_count == 1
def registry(): regpath = Path(__file__).parent / 'registry' reg = Registry() for c in ('foo', 'bar'): reg.scandir(c, regpath / c) yield reg
def cmd_extract(args): pkginfo = load_pkginfo(args) for cident, cdefn in pkginfo['components'].items(): if args.component is not None and cident not in args.component: continue if isinstance(cdefn, six.string_types): cmod = cdefn else: cmod = cdefn['module'] module = import_module(cmod) modpath = module.__path__[0] dist = get_distribution(args.package) meta = dict(message_from_string(dist.get_metadata('PKG-INFO'))) catalog = Catalog(project=args.package, version=dist.version, copyright_holder=meta.get('Author'), msgid_bugs_address=meta.get('Author-email'), fuzzy=False, charset='utf-8') method_map, options_map = get_mappings() def log_callback(filename, method, options): if method != 'ignore': filepath = os.path.normpath(os.path.join(modpath, filename)) logger.debug('Extracting messages from %s', filepath) extracted = extract_from_dir(modpath, method_map, options_map, callback=log_callback) for filename, lineno, message, comments, context in extracted: catalog.add(message, None, [(filename, lineno)], auto_comments=comments, context=context) logger.info("%d messages extracted from component [%s]", len(catalog), cident) locale_path = Path(module.__path__[0]) / 'locale' if locale_path.is_dir(): outfn = str(locale_path / '.pot') else: outfn = resource_filename(args.package, 'locale/%s.pot' % cident) with io.open(outfn, 'wb') as outfd: write_po(outfd, catalog, ignore_obsolete=True)
def test_type_geojson(ngw_resource_group, ngw_txn): src = Path(__file__).parent / 'data' / 'type.geojson' dataset = ogr.Open(str(src)) assert dataset is not None, gdal.GetLastErrorMsg() layer = dataset.GetLayer(0) assert layer is not None, gdal.GetLastErrorMsg() res = VectorLayer(parent_id=ngw_resource_group, display_name='from_ogr', owner_user=User.by_keyname('administrator'), srs=SRS.filter_by(id=3857).one(), tbl_uuid=six.text_type(uuid4().hex)) res.persist() res.setup_from_ogr(layer, lambda x: x) res.load_from_ogr(layer, lambda x: x) layer.ResetReading() DBSession.flush() def field_as(f, n, t): fidx = f.GetFieldIndex(n) if f.IsFieldNull(fidx): return None attr = getattr(f, 'GetFieldAs' + t) result = attr(fidx) if t in ('Date', 'Time', 'DateTime'): result = [int(v) for v in result] if t == 'String' and six.PY2: result = result.decode('utf-8') return result for feat, ref in zip(res.feature_query()(), layer): fields = feat.fields assert fields['null'] == field_as(ref, 'null', None) assert fields['int'] == field_as(ref, 'int', 'Integer') assert fields['real'] == field_as(ref, 'real', 'Double') assert fields['date'] == date(*field_as(ref, 'date', 'DateTime')[0:3]) assert fields['time'] == time(*field_as(ref, 'time', 'DateTime')[3:6]) assert fields['datetime'] == datetime( *field_as(ref, 'datetime', 'DateTime')[0:6]) assert fields['string'] == field_as(ref, 'string', 'String') assert fields['unicode'] == field_as(ref, 'unicode', 'String')
def cmd_init(args): root = resource_filename(args.package, 'locale') for component, compmod in load_components(args): mod = import_module(compmod) locale_path = Path(mod.__path__[0]) / 'locale' if locale_path.is_dir(): pot_file = locale_path / '.pot' po_file = locale_path / ('%s.po' % args.locale) if not pot_file.is_file(): logger.error("POT-file for component [%s] not found in [%s]", component, str(pot_file)) continue else: pot_file = Path(root) / ('%s.pot' % component) po_file = Path(root) / args.locale / 'LC_MESSAGES' / ('%s.po' % component) if not pot_file.is_file(): logger.error("POT-file for component [%s] not found in [%s]", component, str(pot_file)) continue if po_file.is_file() and not args.force: logger.error( "Component [%s] target file exists! Skipping. Use --force to overwrite.", component) continue with io.open(str(pot_file), 'r') as infd: catalog = read_po(infd, locale=args.locale) catalog.locale = Locale.parse(args.locale) catalog.revision_date = datetime.now(LOCALTZ) with io.open(str(po_file), 'wb') as outfd: write_po(outfd, catalog)
def cmd_update(args): def _update_file(po_path, pot_path, component, locale): logger.info("Updating component [%s] locale [%s]...", component, locale) with io.open(str(po_path), 'r') as po_fd, io.open(str(pot_path), 'r') as pot_fd: po = read_po(po_fd, locale=locale) pot = read_po(pot_fd) po.update(pot, True) with io.open(str(po_path), 'wb') as fd: write_po(fd, po) components = list(load_components(args)) modern_file_layout = list() for comp_id, comp_mod in components: locale_path = Path(import_module(comp_mod).__path__[0]) / 'locale' if not locale_path.is_dir() or len(list( locale_path.glob('*.po'))) == 0: continue modern_file_layout.append(comp_id) pot_path = locale_path / '.pot' if not pot_path.is_file(): logger.error("POT-file for component [%s] not found in [%s]", comp_id, str(pot_path)) continue for po_path in locale_path.glob('*.po'): locale = po_path.with_suffix('').name _update_file(po_path, pot_path, comp_id, locale) root = resource_filename(args.package, 'locale') pofiles = [] for dirname, dirnames, filenames in os.walk(root): for filename in fnmatch.filter(filenames, '*.po'): relative = os.path.relpath(os.path.join(dirname, filename), root) pofiles.append(relative) comp_ids = [cid for cid, _ in components] for pofile in pofiles: locale = pofile.split(os.sep)[0] component = os.path.split(pofile)[1].split('.', 1)[0] if component not in comp_ids: continue if component in modern_file_layout: logger.warning( "Component [%s] was already updated from new style " "locale file layout! Skipping it.", component) continue logger.info("Updating component '%s' locale '%s'...", component, locale) # NOQA with open(os.path.join(root, pofile), 'r') as fd: catalog = read_po(fd, locale=locale, charset='utf-8') potfile = os.path.join(root, '%s.pot' % component) if not os.path.isfile(potfile): logger.warn("Template for %s:%s doesn't exists! Skipping.", locale, component) # NOQA with codecs.open(potfile, 'r', 'utf-8') as fd: template = read_po(fd) catalog.update(template, True) with open(os.path.join(root, pofile), 'wb') as fd: write_po(fd, catalog)
# -*- coding: utf-8 -*- from __future__ import division, unicode_literals, print_function, absolute_import import pytest from nextgisweb.compat import Path from nextgisweb.lib.migration.migration import MigrationKey from nextgisweb.lib.migration.registry import (PythonModuleMigration, SQLScriptMigration, Registry) data_mformat = Path(__file__).parent / 'mformat' data_registry = Path(__file__).parent / 'registry' def mk(revision): return MigrationKey('default', revision) def test_python_module_migration(): migs = list(PythonModuleMigration.scandir('default', data_mformat)) assert len(migs) == 2 m1, m2 = migs assert m1.key == mk('00000001') assert m1.parents == (mk('00000000'), ) assert m2.key == mk('00000002') assert m2.parents == (mk('00000001'), ) assert m1.has_forward and not m2.has_rewind
# -*- coding: utf-8 -*- from __future__ import division, absolute_import, print_function, unicode_literals from uuid import uuid4 import six import pytest from osgeo import ogr from nextgisweb.models import DBSession from nextgisweb.auth import User from nextgisweb.compat import Path from nextgisweb.core.exception import ValidationError from nextgisweb.vector_layer import VectorLayer from nextgisweb.spatial_ref_sys import SRS path = Path(__file__).parent / 'data' / 'errors' # List of creation test cases: file name, creation options, and final checks. CREATE_TEST_PARAMS = ( ( 'geom-collection.geojson', dict(), dict(exception=ValidationError), ), ( 'geom-collection.geojson', dict(fix_errors='SAFE'), dict(feature_count=2), ), ( 'incomplete-linestring.geojson',