Exemple #1
0
        def dbOpen(name):

            dbpathname = abspath(self.path) + '/' + name

            if self.create:
                db = LevelDB(dbpathname, create_if_missing=True,
                             error_if_exists=False)
            else:
                db = LevelDB(dbpathname, create_if_missing=False,
                             error_if_exists=False)

            return db
Exemple #2
0
    def __init__(self,
                 index=None,
                 save_dir=None,
                 points=None,
                 points_path=None,
                 points_db=None,
                 points_db_path=None,
                 index_path=None,
                 include_only_properties=None,
                 precision=GEOHASH_PRECISION):
        if save_dir:
            self.save_dir = save_dir
        else:
            self.save_dir = None

        if include_only_properties and hasattr(include_only_properties,
                                               '__contains__'):
            self.include_only_properties = include_only_properties

        if not index_path:
            index_path = os.path.join(save_dir or '.', self.INDEX_FILENAME)

        self.index_path = index_path

        if not index:
            self.index = defaultdict(list)
        else:
            self.index = index

        if not points_path:
            points_path = os.path.join(save_dir or '.', self.POINTS_FILENAME)
        self.points_path = points_path

        if not points:
            self.points = array.array('d')
        else:
            self.points = points

        if not points_db_path:
            points_db_path = os.path.join(save_dir or '.', self.POINTS_DB_DIR)

        if not points_db:
            self.points_db = LevelDB(points_db_path)
        else:
            self.points_db = points_db

        self.precision = precision

        self.i = 0
Exemple #3
0
    def __init__(self, index=None, polygons=None, polygons_db=None, save_dir=None,
                 index_filename=None,
                 polygons_db_path=None,
                 include_only_properties=None):
        if save_dir:
            self.save_dir = save_dir
        else:
            self.save_dir = None

        if not index_filename:
            index_filename = self.INDEX_FILENAME

        self.index_path = os.path.join(save_dir or '.', index_filename)

        if not index:
            self.create_index(overwrite=True)
        else:
            self.index = index

        if include_only_properties and hasattr(include_only_properties, '__contains__'):
            self.include_only_properties = include_only_properties

        if not polygons and not self.persistent_polygons:
            self.polygons = {}
        elif polygons and not self.persistent_polygons:
            self.polygons = polygons
        elif self.persistent_polygons and self.cache_size > 0:
            self.polygons = LRU(self.cache_size)
            if polygons:
                for key, value in six.iteritems(polygons):
                    self.polygons[key] = value

            self.cache_hits = 0
            self.cache_misses = 0

            self.get_polygon = self.get_polygon_cached

        if not polygons_db_path:
            polygons_db_path = os.path.join(save_dir or '.', self.POLYGONS_DB_DIR)

        if not polygons_db:
            self.polygons_db = LevelDB(polygons_db_path)
        else:
            self.polygons_db = polygons_db

        self.setup()

        self.i = 0
Exemple #4
0
 def load(cls, d):
     index = cls.load_index(d)
     points = cls.load_points(d)
     points_db = LevelDB(os.path.join(d, cls.POINTS_DB_DIR))
     point_index = cls(index=index, points=points, points_db=points_db)
     point_index.load_properties(os.path.join(d, cls.PROPS_FILENAME))
     return point_index
Exemple #5
0
def do_viewpeakcachedb(args, dbh):

    from leveldb import LevelDB
    from collections import defaultdict

    ldb = LevelDB(args.peakcachedb, create_if_missing=False)

    batches = defaultdict(int)

    for key in ldb.RangeIter(include_value=False):
        batch_code = bytes(key.split(b'|', 1)[0])
        batches[batch_code] += 1

    cout('Peakcache DB: %s' % args.peakcachedb)
    for (k, v) in batches.items():
        cout('\t%s\t%4d' % (k.decode(), v))
 def __init__(self, db: Union[Path, str]):
     """
     :param db: the filename of level database.
     """
     MocaClassCache.__init__(self)
     MocaNamedInstance.__init__(self)
     self._db: LevelDB = LevelDB(str(db))
Exemple #7
0
def summarize(conn: leveldb.LevelDB):
    counts = defaultdict(int)
    for k, v in conn.RangeIter():
        assert isinstance(k, bytearray)
        kind = chr(k[0])
        counts[kind] += 1

    code_to_name = {t.value: t.name for t in RowType}
Exemple #8
0
def dump_csv(conn: leveldb.LevelDB) -> None:
    """Dump the data from a given connection."""
    writer = csv.writer(sys.stdout)
    secret = get_obfuscate_key(conn)
    writer.writerow(['txid', 'vout', 'height', 'coinbase', 'amount'])
    for k, v in conn.RangeIter(b'C', b'D', include_value=True):
        txid, vout = decode_key(k)
        decrypt(v, secret)
        height, coinbase, amount = decode_val(v)
        writer.writerow([txid, vout, height, coinbase, amount])
Exemple #9
0
    def __init__(self, filename, db_dir):
        self.filename = filename

        self.node_ids = array.array('l')

        self.logger = logging.getLogger('osm.intersections')

        # Store these in a LevelDB
        ensure_dir(db_dir)
        ways_dir = os.path.join(db_dir, 'ways')
        ensure_dir(ways_dir)
        nodes_dir = os.path.join(db_dir, 'nodes')
        ensure_dir(nodes_dir)
        self.way_props = LevelDB(ways_dir)
        self.node_props = LevelDB(nodes_dir)

        # These form a graph and should always have the same length
        self.intersection_edges_nodes = array.array('l')
        self.intersection_edges_ways = array.array('l')
Exemple #10
0
def dump_chainstate_csv(conn: leveldb.LevelDB):
    secret = get_obfuscate_key(conn)
    writer = csv.writer(sys.stdout)
    writer.writerow(
        ['txid', 'vout', 'height', 'coinbase', 'amount', 'scriptsize'])
    for k, v in conn.RangeIter(b'C', b'D', include_value=True):
        txid, vout = decode_key(k)
        decrypt(v, secret)
        height, coinbase, amount, sz = decode_val(v)
        writer.writerow([txid, vout, height, coinbase, amount, sz])
Exemple #11
0
def summarize(conn: leveldb.LevelDB):
    counts = defaultdict(int)
    for k, v in conn.RangeIter():
        assert isinstance(k, bytearray)
        kind = chr(k[0])
        counts[kind] += 1

    code_to_name = {t.value: t.name for t in RowType}
    for k, v in sorted(
            counts.items(), key=operator.itemgetter(1), reverse=True):
        print('{:15s} {}'.format(code_to_name[k], v))
Exemple #12
0
 def load(cls, d, index_name=None, polys_filename=DEFAULT_POLYS_FILENAME,
          properties_filename=DEFAULT_PROPS_FILENAME,
          polys_db_dir=POLYGONS_DB_DIR):
     index = cls.load_index(d, index_name=index_name or cls.INDEX_FILENAME)
     if not cls.persistent_polygons:
         polys = cls.load_polygons(os.path.join(d, polys_filename))
     else:
         polys = None
     polygons_db = LevelDB(os.path.join(d, polys_db_dir))
     polygon_index = cls(index=index, polygons=polys, polygons_db=polygons_db, save_dir=d)
     polygon_index.load_properties(os.path.join(d, properties_filename))
     polygon_index.load_polygon_properties(d)
     return polygon_index
Exemple #13
0
def open_db(logfile):
    dberror = ''
    db = None
    if os.path.exists(logfile + '_db'):
        try:
            db = LevelDB(logfile + '_db', create_if_missing=False)
        except LevelDBError:
            dberror = 'LevelDBError'

        if dberror != '':
            logging.error(dberror)
            sys.exit(-1)
    return db
Exemple #14
0
def get_obfuscate_key(conn: leveldb.LevelDB) -> bytearray:
    """Load the obfuscation key from the database."""
    secret = conn.Get(OBFUSCATE_KEY)
    assert secret[0] == 8 and len(secret) == 9
    return secret[1:]
Exemple #15
0
 def __init__(self, store: KeyValueStore, db: leveldb.LevelDB, sync: bool):
     super().__init__(store, sync=sync)
     self._touched_keys = set()
     self._snapshot = db.CreateSnapshot()
Exemple #16
0
from send2trash import send2trash

from config import MyConfig
from common import calc_hu_moments, CalcDiff
from leveldb import LevelDB

PARALLEL_COUNT_MAX = 2
DIFF_THRESHOLD = 5.0e-7
DOUBLE_FMT = 'd'

if __name__ == "__main__":
    parallel = Pool(min(PARALLEL_COUNT_MAX, multi.cpu_count()))
    config = MyConfig()
    images_dir = sys.argv[1] if 2 <= len(
        sys.argv) else config.path.original_images_dir
    db = LevelDB(config.path.end_nearly_dirs_file, DOUBLE_FMT)
    for p, _, fs in os.walk(images_dir):
        if fs and DIFF_THRESHOLD < (db.get(p) or 1.0):
            print(p)
            (hu_moments, _) = calc_hu_moments([path.join(p, f) for f in fs],
                                              parallel)
            filtered_diff = parallel.map(CalcDiff(hu_moments, DIFF_THRESHOLD),
                                         enumerate(hu_moments))
            failed = [
                hu.fname for hu, b in zip(hu_moments, filtered_diff) if b
            ]
            for fname in failed:
                print("  Trash: %s" % fname)
                send2trash(fname)
            db.put(p, DIFF_THRESHOLD)
def main():
  args = parser.parse_args()

  world_paths = get_world_paths()

  if args.world_name not in world_paths:
    print(f'Could not find world by the name of "{args.world_name}"')
    exit(1)

  world_path = world_paths[args.world_name]

  output_folder = world_path
  if args.behavior_pack:
    behavior_packs_path = os.path.join(world_path, 'behavior_packs')
    behavior_pack = [ file.path for file in os.scandir(behavior_packs_path) if file.is_dir() ][0]
    if not behavior_pack: 
      print('Could not find behavior pack!')
      exit(1)
    else:
      output_folder = behavior_pack

  db_path = os.path.join(world_path, 'db')
  db = LevelDB(db_path)

  structures: StructureDict = {}

  structure_id = args.structure_id
  if structure_id != 'all':
    if ':' not in structure_id: structure_id = 'mystructure:' + structure_id

  for key, data in db.iterate():
    try:
      key_str = key.decode('ascii')
      if key_str.startswith('structuretemplate_'):
        str_id = key_str[len('structuretemplate_'):]

        structure = amulet_nbt.load(buffer=data, little_endian=True)
        structures[str_id] = structure

        if (str_id == structure_id or structure_id == 'all') and args.delete:
          # print(f'Deleted structure "{str_id}" from the leveldb database')
          db.delete(key)
    except: pass

  db.close()

  filtered_structures = {}
  if structure_id != 'all':
    if structure_id not in structures:
      print(f'Could not find structure with the id of "{structure_id}"! Available ids: {", ".join(structures.keys())}')
      exit(0)
      
    filtered_structures = { key: value for key, value in structures.items() if key == structure_id }
  elif structure_id == 'all':
    filtered_structures = structures

  if len(filtered_structures) == 0:
    print(f'No structures found!')
    exit(0)
  else:
    print(f'Preparing to save {", ".join(filtered_structures.keys())}')
  
  save_structures(output_folder, filtered_structures, args.force)
Exemple #18
0
class PolygonIndex(object):
    include_only_properties = None
    simplify_tolerance = 0.0001
    preserve_topology = True
    persistent_polygons = False
    cache_size = 0
    fix_invalid_polygons = False

    INDEX_FILENAME = None
    POLYGONS_DB_DIR = 'polygons'

    def __init__(self, index=None, polygons=None, polygons_db=None, save_dir=None,
                 index_filename=None,
                 polygons_db_path=None,
                 include_only_properties=None):
        if save_dir:
            self.save_dir = save_dir
        else:
            self.save_dir = None

        if not index_filename:
            index_filename = self.INDEX_FILENAME

        self.index_path = os.path.join(save_dir or '.', index_filename)

        if not index:
            self.create_index(overwrite=True)
        else:
            self.index = index

        if include_only_properties and hasattr(include_only_properties, '__contains__'):
            self.include_only_properties = include_only_properties

        if not polygons and not self.persistent_polygons:
            self.polygons = {}
        elif polygons and not self.persistent_polygons:
            self.polygons = polygons
        elif self.persistent_polygons and self.cache_size > 0:
            self.polygons = LRU(self.cache_size)
            if polygons:
                for key, value in six.iteritems(polygons):
                    self.polygons[key] = value

            self.cache_hits = 0
            self.cache_misses = 0

            self.get_polygon = self.get_polygon_cached

        if not polygons_db_path:
            polygons_db_path = os.path.join(save_dir or '.', self.POLYGONS_DB_DIR)

        if not polygons_db:
            self.polygons_db = LevelDB(polygons_db_path)
        else:
            self.polygons_db = polygons_db

        self.setup()

        self.i = 0

    def create_index(self, overwrite=False):
        raise NotImplementedError('Children must implement')

    def index_polygon(self, polygon):
        raise NotImplementedError('Children must implement')

    def setup(self):
        pass

    def clear_cache(self, garbage_collect=True):
        if self.persistent_polygons and self.cache_size > 0:
            self.polygons.clear()
            if garbage_collect:
                gc.collect()

    def simplify_polygon(self, poly, simplify_tolerance=None, preserve_topology=None):
        if simplify_tolerance is None:
            simplify_tolerance = self.simplify_tolerance
        if preserve_topology is None:
            preserve_topology = self.preserve_topology
        return poly.simplify(simplify_tolerance, preserve_topology=preserve_topology)

    def index_polygon_properties(self, properties):
        pass

    def polygon_geojson(self, poly, properties):
        return {
            'type': 'Feature',
            'geometry': mapping(poly),
        }

    def add_polygon(self, poly, properties, cache=False, include_only_properties=None):
        if include_only_properties is not None:
            properties = {k: v for k, v in properties.iteritems() if k in include_only_properties}

        if not self.persistent_polygons or cache:
            self.polygons[self.i] = prep(poly)

        if self.persistent_polygons:
            self.polygons_db.Put(self.polygon_key(self.i), json.dumps(self.polygon_geojson(poly, properties)))

        self.polygons_db.Put(self.properties_key(self.i), json.dumps(properties))
        self.index_polygon_properties(properties)
        self.i += 1

    @classmethod
    def create_from_shapefiles(cls, inputs, output_dir,
                               index_filename=None,
                               include_only_properties=None):
        index = cls(save_dir=output_dir, index_filename=index_filename or cls.INDEX_FILENAME)
        for input_file in inputs:
            if include_only_properties is not None:
                include_props = include_only_properties.get(input_file, cls.include_only_properties)
            else:
                include_props = cls.include_only_properties

            f = fiona.open(input_file)

            index.add_geojson_like_file(f)

        return index

    @classmethod
    def fix_polygon(cls, poly):
        '''
        Coerce to valid polygon
        '''
        if not poly.is_valid:
            poly = poly.buffer(0)
            if not poly.is_valid:
                return None
        return poly

    @classmethod
    def to_polygon(cls, coords, holes=None, test_point=None):
        '''
        Create shapely polygon from list of coordinate tuples if valid
        '''
        if not coords or len(coords) < 3:
            return None

        # Fix for polygons crossing the 180th meridian
        lons = [lon for lon, lat in coords]
        if (max(lons) - min(lons) > 180):
            coords = [(lon + 360.0 if lon < 0 else lon, lat) for lon, lat in coords]
            if holes:
                holes = [(lon + 360.0 if lon < 0 else lon, lat) for lon, lat in holes]

        poly = Polygon(coords, holes)
        try:
            if test_point is None:
                test_point = poly.representative_point()
            invalid = cls.fix_invalid_polygons and not poly.is_valid and not poly.contains(test_point)
        except Exception:
            invalid = True

        if invalid:
            try:
                poly_fix = cls.fix_polygon(poly)

                if poly_fix is not None and poly_fix.bounds and len(poly_fix.bounds) == 4 and poly_fix.is_valid and poly_fix.type == poly.type:
                    if test_point is None:
                        test_point = poly_fix.representative_point()

                    if poly_fix.contains(test_point):
                        poly = poly_fix
            except Exception:
                pass

        return poly

    def add_geojson_like_record(self, rec, include_only_properties=None):
        if not rec or not rec.get('geometry') or 'type' not in rec['geometry']:
            return
        poly_type = rec['geometry']['type']
        if poly_type == 'Polygon':
            coords = rec['geometry']['coordinates'][0]
            poly = self.to_polygon(coords)
            if poly is None or not poly.bounds or len(poly.bounds) != 4:
                return
            self.index_polygon(poly)
            self.add_polygon(poly, rec['properties'], include_only_properties=include_only_properties)
        elif poly_type == 'MultiPolygon':
            polys = []
            poly_coords = rec['geometry']['coordinates']
            for coords in poly_coords:
                poly = self.to_polygon(coords[0])
                if poly is None or not poly.bounds or len(poly.bounds) != 4:
                    continue
                polys.append(poly)
                self.index_polygon(poly)

            self.add_polygon(MultiPolygon(polys), rec['properties'], include_only_properties=include_only_properties)
        else:
            return

    def add_geojson_like_file(self, f, include_only_properties=None):
        '''
        Add either GeoJSON or a shapefile record to the index
        '''

        for rec in f:
            self.add_geojson_like_record(rec, include_only_properties=include_only_properties)

    @classmethod
    def create_from_geojson_files(cls, inputs, output_dir,
                                  index_filename=None,
                                  polys_filename=DEFAULT_POLYS_FILENAME,
                                  include_only_properties=None):
        index = cls(save_dir=output_dir, index_filename=index_filename or cls.INDEX_FILENAME)
        for input_file in inputs:
            if include_only_properties is not None:
                include_props = include_only_properties.get(input_file, cls.include_only_properties)
            else:
                include_props = cls.include_only_properties

            f = json.load(open(input_file))

            index.add_geojson_like_file(f['features'], include_only_properties=include_props)

        return index

    def compact_polygons_db(self):
        self.polygons_db.CompactRange('\x00', '\xff')

    def save(self):
        self.save_index()
        self.save_properties(os.path.join(self.save_dir, DEFAULT_PROPS_FILENAME))
        if not self.persistent_polygons:
            self.save_polygons(os.path.join(self.save_dir, DEFAULT_POLYS_FILENAME))
        self.compact_polygons_db()
        self.save_polygon_properties(self.save_dir)

    def load_properties(self, filename):
        properties = json.load(open(filename))
        self.i = int(properties.get('num_polygons', self.i))

    def save_properties(self, out_filename):
        out = open(out_filename, 'w')
        json.dump({'num_polygons': str(self.i)}, out)

    def save_polygons(self, out_filename):
        out = open(out_filename, 'w')
        for i in xrange(self.i):
            poly = self.polygons[i]
            feature = {
                'type': 'Feature',
                'geometry': mapping(poly.context),
            }
            out.write(json.dumps(feature) + u'\n')

    def save_index(self):
        raise NotImplementedError('Children must implement')

    def load_polygon_properties(self, d):
        pass

    def save_polygon_properties(self, d):
        pass

    @classmethod
    def polygon_from_geojson(cls, feature):
        poly_type = feature['geometry']['type']
        if poly_type == 'Polygon':
            coords = feature['geometry']['coordinates']
            poly = cls.to_polygon(coords[0], holes=coords[1:] or None)
            return poly
        elif poly_type == 'MultiPolygon':
            polys = []
            for coords in feature['geometry']['coordinates']:
                poly = cls.to_polygon(coords[0], holes=coords[1:] or None)
                polys.append(poly)

            return MultiPolygon(polys)

    @classmethod
    def load_polygons(cls, filename):
        f = open(filename)
        polygons = {}
        cls.i = 0
        for line in f:
            feature = json.loads(line.rstrip())
            polygons[cls.i] = prep(cls.polygon_from_geojson(feature))
            cls.i += 1
        return polygons

    @classmethod
    def load_index(cls, d, index_name=None):
        raise NotImplementedError('Children must implement')

    @classmethod
    def load(cls, d, index_name=None, polys_filename=DEFAULT_POLYS_FILENAME,
             properties_filename=DEFAULT_PROPS_FILENAME,
             polys_db_dir=POLYGONS_DB_DIR):
        index = cls.load_index(d, index_name=index_name or cls.INDEX_FILENAME)
        if not cls.persistent_polygons:
            polys = cls.load_polygons(os.path.join(d, polys_filename))
        else:
            polys = None
        polygons_db = LevelDB(os.path.join(d, polys_db_dir))
        polygon_index = cls(index=index, polygons=polys, polygons_db=polygons_db, save_dir=d)
        polygon_index.load_properties(os.path.join(d, properties_filename))
        polygon_index.load_polygon_properties(d)
        return polygon_index

    def get_candidate_polygons(self, lat, lon):
        raise NotImplementedError('Children must implement')

    def get_properties(self, i):
        return json.loads(self.polygons_db.Get(self.properties_key(i)))

    def get_polygon(self, i):
        return self.polygons[i]

    def get_polygon_cached(self, i):
        poly = self.polygons.get(i, None)
        if poly is None:
            data = json.loads(self.polygons_db.Get(self.polygon_key(i)))
            poly = prep(self.polygon_from_geojson(data))
            self.polygons[i] = poly
            self.cache_misses += 1
        else:
            self.cache_hits += 1
        return poly

    def __iter__(self):
        for i in xrange(self.i):
            yield self.get_properties(i), self.get_polygon(i)

    def __len__(self):
        return self.i

    def polygons_contain(self, candidates, point, return_all=False):
        containing = None
        if return_all:
            containing = []
        for i in candidates:
            poly = self.get_polygon(i)
            contains = poly.contains(point)
            if contains:
                properties = self.get_properties(i)
                if not return_all:
                    return properties
                else:
                    containing.append(properties)
        return containing

    def polygon_key(self, i):
        return 'poly:{}'.format(i)

    def properties_key(self, i):
        return 'props:{}'.format(i)

    def point_in_poly(self, lat, lon, return_all=False):
        candidates = self.get_candidate_polygons(lat, lon)
        point = Point(lon, lat)
        return self.polygons_contain(candidates, point, return_all=return_all)
Exemple #19
0
# -- Imports --------------------------------------------------------------------------

from typing import *
from leveldb import LevelDB, LevelDBError
from pathlib import Path
from pickle import dumps, loads
from .moca_utils import print_warning
from tinydb import TinyDB, JSONStorage, Query
from tinydb.middlewares import CachingMiddleware

# -------------------------------------------------------------------------- Imports --

# -- Variables --------------------------------------------------------------------------

try:
    __core_db = LevelDB(
        str(Path(__file__).parent.joinpath('storage').joinpath('core.db')))
except LevelDBError:
    __core_db = None

core_tiny_db: TinyDB = TinyDB(
    str(
        Path(__file__).parent.joinpath('storage').joinpath(
            'core-tiny-db.json')),
    CachingMiddleware(JSONStorage),
)
core_tiny_query: Query = Query()

# -------------------------------------------------------------------------- Variables --

# -- Core DB --------------------------------------------------------------------------
def get_obfuscate_key(conn: leveldb.LevelDB) -> bytearray:
	secret = conn.Get(bytearray(b'\x0e\x00obfuscate_key'))
	assert secret[0] == 8 and len(secret) == 9
	return secret[1:]
Exemple #21
0
class PointIndex(object):
    include_only_properties = None
    persistent_index = False
    cache_size = 0

    POINTS_DB_DIR = 'points'

    GEOHASH_PRECISION = 7
    PROPS_FILENAME = 'properties.json'
    POINTS_FILENAME = 'points.json'
    INDEX_FILENAME = 'index.json'

    def __init__(self,
                 index=None,
                 save_dir=None,
                 points=None,
                 points_path=None,
                 points_db=None,
                 points_db_path=None,
                 index_path=None,
                 include_only_properties=None,
                 precision=GEOHASH_PRECISION):
        if save_dir:
            self.save_dir = save_dir
        else:
            self.save_dir = None

        if include_only_properties and hasattr(include_only_properties,
                                               '__contains__'):
            self.include_only_properties = include_only_properties

        if not index_path:
            index_path = os.path.join(save_dir or '.', self.INDEX_FILENAME)

        self.index_path = index_path

        if not index:
            self.index = defaultdict(list)
        else:
            self.index = index

        if not points_path:
            points_path = os.path.join(save_dir or '.', self.POINTS_FILENAME)
        self.points_path = points_path

        if not points:
            self.points = array.array('d')
        else:
            self.points = points

        if not points_db_path:
            points_db_path = os.path.join(save_dir or '.', self.POINTS_DB_DIR)

        if not points_db:
            self.points_db = LevelDB(points_db_path)
        else:
            self.points_db = points_db

        self.precision = precision

        self.i = 0

    def index_point(self, lat, lon):
        code = geohash.encode(lat, lon)[:self.precision]

        for key in [code] + geohash.neighbors(code):
            self.index[key].append(self.i)
        self.points.extend([lat, lon])

    def add_point(self,
                  lat,
                  lon,
                  properties,
                  cache=False,
                  include_only_properties=None):
        if include_only_properties is None and self.include_only_properties:
            include_only_properties = self.include_only_properties
        if include_only_properties is not None:
            properties = {
                k: v
                for k, v in properties.iteritems()
                if k in include_only_properties
            }

        self.index_point(lat, lon)
        self.points_db.Put(self.properties_key(self.i), json.dumps(properties))
        self.i += 1

    def load_properties(self, filename):
        properties = json.load(open(filename))
        self.i = int(properties.get('num_points', self.i))
        self.precision = int(properties.get('precision', self.precision))

    def save_properties(self, out_filename):
        out = open(out_filename, 'w')
        json.dump({
            'num_points': str(self.i),
            'precision': self.precision
        }, out)

    def save_index(self):
        if not self.index_path:
            self.index_path = os.path.join(self.save_dir or '.',
                                           self.INDEX_FILENAME)
        json.dump(self.index, open(self.index_path, 'w'))

    @classmethod
    def load_index(cls, d, index_name=None):
        return json.load(
            open(os.path.join(d, index_name or cls.INDEX_FILENAME)))

    def save_points(self):
        json.dump(self.points, open(self.points_path, 'w'))

    @classmethod
    def load_points(cls, d):
        return array.array(
            'd', json.load(open(os.path.join(d, cls.POINTS_FILENAME))))

    def properties_key(self, i):
        return 'props:{}'.format(i)

    def get_properties(self, i):
        return json.loads(self.points_db.Get(self.properties_key(i)))

    def compact_points_db(self):
        self.points_db.CompactRange('\x00', '\xff')

    def save(self):
        self.save_index()
        self.save_points()
        self.compact_points_db()
        self.save_properties(os.path.join(self.save_dir, self.PROPS_FILENAME))

    @classmethod
    def load(cls, d):
        index = cls.load_index(d)
        points = cls.load_points(d)
        points_db = LevelDB(os.path.join(d, cls.POINTS_DB_DIR))
        point_index = cls(index=index, points=points, points_db=points_db)
        point_index.load_properties(os.path.join(d, cls.PROPS_FILENAME))
        return point_index

    def __iter__(self):
        for i in xrange(self.i):
            lat, lon = self.points[i * 2], self.points[i * 2 + 1]
            yield self.get_properties(i), lat, lon

    def __len__(self):
        return self.i

    def get_candidate_points(self, latitude, longitude):
        code = geohash.encode(latitude, longitude)[:self.precision]
        candidates = OrderedDict()

        candidates.update([(k, None) for k in self.index.get(code, [])])

        for neighbor in geohash.neighbors(code):
            candidates.update([(k, None)
                               for k in self.index.get(neighbor, [])])

        return candidates.keys()

    def point_distances(self, latitude, longitude):
        candidates = self.get_candidate_points(latitude, longitude)

        return [(i, self.points[i * 2], self.points[i * 2 + 1],
                 haversine_distance(latitude, longitude, self.points[i * 2],
                                    self.points[i * 2 + 1]))
                for i in candidates]

    def all_nearby_points(self, latitude, longitude):
        distances = self.point_distances(latitude, longitude)
        if not distances:
            return []
        return sorted(distances, key=operator.itemgetter(-1))

    def points_with_properties(self, results):
        return [(self.get_properties(i), lat, lon, distance)
                for i, lat, lon, distance in results]

    def nearest_points(self, latitude, longitude):
        return self.points_with_properties(
            self.all_nearby_points(latitude, longitude))

    def nearest_n_points(self, latitude, longitude, n=2):
        return self.points_with_properties(
            self.all_nearby_points(latitude, longitude)[:n])

    def nearest_point(self, latitude, longitude):
        distances = self.all_nearby_points(latitude, longitude)
        if not distances:
            return None
        return self.points_with_properties(distances[:1])[0]
Exemple #22
0
    # Debian: http://code.google.com/p/py-leveldb/
    from leveldb import LevelDB
    ldb_imported = True
except ImportError as e:
    try:
        # Fedora: https://github.com/wbolster/plyvel
        from plyvel import DB as LevelDB
        ldb_imported = True
    except ImportError as e:
        print('Warning: No leveldb/plyvel module was found')
        ldb_imported = False

ldb = None
if ldb_imported:
    try:
        ldb = LevelDB(Config.CACHE_DB, create_if_missing=True)
        # support plyvel 0.6
        if hasattr(ldb, 'put'):
            ldb_get = ldb.get
            ldb_put = ldb.put
        else:
            ldb_get = ldb.Get
            ldb_put = ldb.Put
    except Exception as e:
        ldb_imported = False
        print(e)
        sys.exit(1)


def empty_func(*args, **kwds):
    pass
Exemple #23
0
class OSMIntersectionReader(object):
    def __init__(self, filename, db_dir):
        self.filename = filename

        self.node_ids = array.array('l')

        self.logger = logging.getLogger('osm.intersections')

        # Store these in a LevelDB
        ensure_dir(db_dir)
        ways_dir = os.path.join(db_dir, 'ways')
        ensure_dir(ways_dir)
        nodes_dir = os.path.join(db_dir, 'nodes')
        ensure_dir(nodes_dir)
        self.way_props = LevelDB(ways_dir)
        self.node_props = LevelDB(nodes_dir)

        # These form a graph and should always have the same length
        self.intersection_edges_nodes = array.array('l')
        self.intersection_edges_ways = array.array('l')

    def binary_search(self, a, x):
        '''Locate the leftmost value exactly equal to x'''
        i = bisect_left(a, x)
        if i != len(a) and a[i] == x:
            return i
        return None

    def intersections(self):
        '''
        Generator which yields tuples like:

        (node_id, lat, lon, {way_id: way_props})
        '''
        i = 0

        node_ids = array.array('l')
        node_counts = array.array('i')

        for element_id, props, deps in parse_osm(self.filename,
                                                 dependencies=True):
            props = {
                safe_decode(k): safe_decode(v)
                for k, v in six.iteritems(props)
            }
            if element_id.startswith('node'):
                node_id = long(element_id.split(':')[-1])
                node_ids.append(node_id)
                node_counts.append(0)
                self.node_props.Put(safe_encode(node_id), json.dumps(props))
            elif element_id.startswith('way'):
                # Don't care about the ordering of the nodes, and want uniques e.g. for circular roads
                deps = set(deps)

                # Get node indices by binary search
                for node_id in deps:
                    try:
                        node_index = self.binary_search(node_ids, node_id)
                    except ValueError:
                        continue
                    if node_index is None:
                        continue
                    node_counts[node_index] += 1

            if i % 1000 == 0 and i > 0:
                self.logger.info('doing {}s, at {}'.format(
                    element_id.split(':')[0], i))
            i += 1

        for i, count in enumerate(node_counts):
            if count > 1:
                self.node_ids.append(node_ids[i])

        del node_ids
        del node_counts

        i = 0

        for element_id, props, deps in parse_osm(self.filename,
                                                 dependencies=True):
            if element_id.startswith('node'):
                node_id = long(element_id.split(':')[-1])
                node_index = self.binary_search(self.node_ids, node_id)
            elif element_id.startswith('way'):
                props = {
                    safe_decode(k): safe_decode(v)
                    for k, v in six.iteritems(props)
                }
                way_id = long(element_id.split(':')[-1])
                props['id'] = way_id
                for node_id in deps:
                    node_index = self.binary_search(self.node_ids, node_id)
                    if node_index is not None:
                        self.intersection_edges_nodes.append(node_id)
                        self.intersection_edges_ways.append(way_id)
                        self.way_props.Put(safe_encode(way_id),
                                           json.dumps(props))

            if i % 1000 == 0 and i > 0:
                self.logger.info('second pass, doing {}s, at {}'.format(
                    element_id.split(':')[0], i))
            i += 1

        i = 0

        indices = numpy.argsort(self.intersection_edges_nodes)
        self.intersection_edges_nodes = numpy.fromiter(
            (self.intersection_edges_nodes[i] for i in indices),
            dtype=numpy.uint64)
        self.intersection_edges_ways = numpy.fromiter(
            (self.intersection_edges_ways[i] for i in indices),
            dtype=numpy.uint64)
        del indices

        idx = 0

        # Need to make a copy here otherwise will change dictionary size during iteration
        for node_id, g in groupby(self.intersection_edges_nodes):
            group_len = sum((1 for j in g))

            node_props = json.loads(self.node_props.Get(safe_encode(node_id)))

            way_indices = self.intersection_edges_ways[idx:idx + group_len]
            all_ways = [
                json.loads(self.way_props.Get(safe_encode(w)))
                for w in way_indices
            ]
            way_names = set()
            ways = []
            for way in all_ways:
                if way['name'] in way_names:
                    continue
                ways.append(way)
                way_names.add(way['name'])

            idx += group_len

            if i % 1000 == 0 and i > 0:
                self.logger.info('checking intersections, did {}'.format(i))
            i += 1

            if len(ways) > 1:
                node_index = self.binary_search(self.node_ids, node_id)
                yield self.node_ids[node_index], node_props, ways

    def create_intersections(self, outfile):
        out = open(outfile, 'w')
        for node_id, node_props, ways in self.intersections():
            d = {'id': safe_encode(node_id), 'node': node_props, 'ways': ways}
            out.write(json.dumps(d) + six.u('\n'))

    @classmethod
    def read_intersections(cls, infile):
        f = open(infile)
        for line in f:
            data = json.loads(line.rstrip())
            yield data['id'], data['node'], data['ways']
Exemple #24
0
    ## check if the LevelDB local database and histfile exists; if not create; if yes re-use
    ## LevelDB local database

    levelDB_local_database = os.path.join(dot_gdclient_folder,
                                          ".gdclient_levelDB")
    docker_image_id = None
    ## Add history
    histfile = os.path.join(dot_gdclient_folder, ".gd_history")
    try:
        readline.read_history_file(histfile)
    except IOError:
        pass
    atexit.register(readline.write_history_file, histfile)
    del histfile

    db = LevelDB(levelDB_local_database)
    if db == None:
        print("cannot open db from " + levelDB_local_database)
        exit(1)

    print('enter "stop" to end session')
    completer_suggestions = {
        'geounit': {
            'start': dataset_dict,
            'delete': {}
        },
        'add_member': {},
        'track': {},
        'transfer': {},
        'package': {
            'provenance': {
Exemple #25
0
@contact:QQ4113291000
@time:2018/5/14.下午4:30
pip install py-leveldb
def initialize():
    db = leveldb.LevelDB("students");
    return db;

def insert(db, sid, name):
    db.Put(str(sid), name);

def delete(db, sid):
    db.Delete(str(sid));

def update(db, sid, name):
    db.Put(str(sid), name);

def search(db, sid):
    name = db.Get(str(sid));
    return name;

def display(db):
    for key, value in db.RangeIter():
        print (key, value);
2.7字符串操作需要u''
'''

from leveldb import LevelDB

db = LevelDB('./data')
db.Put(str(u'nihao'),'wobuxing')
print db.Get(str(u'nihao'))