Example #1
0
 def draw_robot_error(self):
     robot_pos = self.inches_to_pixels(
         flip_y(
             align_with_origin(self.log_info[self.step].actual_pos.pos,
                               self.alliance)))
     target_x, target_y, target_h = v3_align_with_origin(
         self.log_info[self.step].abs_target.v3, self.alliance)
     target_pos = self.inches_to_pixels(flip_y((target_x, target_y)))
     heading_target_vector = self.inches_to_pixels(
         rotate_vector((0, 5), target_h + 180, True))
     abs_heading_target = (target_pos[0] + heading_target_vector[0],
                           target_pos[1] + heading_target_vector[1])
     pygame.draw.line(self.screen, (255, 0, 0), robot_pos, target_pos, 5)
     pygame.draw.line(self.screen, (0, 255, 0), target_pos,
                      abs_heading_target, 5)
Example #2
0
    def grid(self, z, x, y, callback=None):
        if self.tilescheme == 'tms':
            y = flip_y(y, z)
        rows = self._query(
            '''SELECT grid FROM grids
                              WHERE zoom_level=? AND tile_column=? AND tile_row=?;''',
            (z, x, y))
        t = rows.fetchone()
        if not t:
            raise ExtractionError(
                _("Could not extract grid %s from %s") %
                ((z, x, y), self.filename))
        grid_json = json.loads(zlib.decompress(t[0]))

        rows = self._query(
            '''SELECT key_name, key_json FROM grid_data
                              WHERE zoom_level=? AND tile_column=? AND tile_row=?;''',
            (z, x, tms_y))
        # join up with the grid 'data' which is in pieces when stored in mbtiles file
        grid_json['data'] = {}
        grid_data = rows.fetchone()
        while grid_data:
            grid_json['data'][grid_data[0]] = json.loads(grid_data[1])
            grid_data = rows.fetchone()
        serialized = json.dumps(grid_json)
        if callback is not None:
            return '%s(%s);' % (callback, serialized)
        return serialized
Example #3
0
 def draw_robot(self):
     x, y, angle = v3_align_with_origin(
         self.log_info[self.step].actual_pos.v3, self.alliance)
     robot_rect = self.robot_surface.get_rect()
     robot_rect.center = self.inches_to_pixels(flip_y((x, y)))
     self.screen.blit(pygame.transform.rotate(self.robot_surface, angle),
                      robot_rect)
Example #4
0
class Disk(Cache):
    def __init__(self, basename, folder, **kwargs):
        super(Disk, self).__init__(**kwargs)
        self._basename = None
        self._basefolder = folder
        self.folder = folder
        self.basename = basename

    @property
    def basename(self):
        return self._basename

    @basename.setter
    def basename(self, basename):
        self._basename = basename
        subfolder = re.sub(r'[^a-z^A-Z^0-9^_]+', '',
                           basename.replace("/", "_").lower())
        self.folder = os.path.join(self._basefolder, subfolder)

    @Cache.scheme.setter
    def scheme(self, scheme):
        assert scheme in ('wmts', 'xyz', 'tms'), "Unknown scheme %s" % scheme
        self._scheme = 'xyz' if (scheme == 'wmts') else scheme

    def tile_file(self, (z, x, y)):
        tile_dir = os.path.join("%s" % z, "%s" % x)
        if (self.scheme != 'xyz'):
            y = flip_y(y, z)
        tile_name = "%s%s" % (y, self.extension)
        return tile_dir, tile_name
Example #5
0
def convert_tile_to_bbox(tile_z, tile_x, tile_y, flip_tile_y):
    if flip_tile_y:
        tile_y = flip_y(tile_z, tile_y)

    min_x, min_y = tile_to_coordinate(tile_x - 0.5, tile_y + 0.5, tile_z)
    max_x, max_y = tile_to_coordinate(tile_x + 0.5, tile_y - 0.5, tile_z)

    return [min_x, min_y, max_x, max_y]
Example #6
0
 def tile(self, z, x, y):
     logger.debug(_("Extract tile %s") % ((z, x, y),))
     tms_y = flip_y(int(y), int(z))
     rows = self._query('''SELECT tile_data FROM tiles
                           WHERE zoom_level=? AND tile_column=? AND tile_row=?;''', (z, x, tms_y))
     t = rows.fetchone()
     if not t:
         raise ExtractionError(_("Could not extract tile %s from %s") % ((z, x, y), self.filename))
     return t[0]
Example #7
0
 def tile(self, z, x, y):
     logger.debug(_("Extract tile %s") % ((z, x, y),))
     tms_y = flip_y(int(y), int(z))
     rows = self._query('''SELECT tile_data FROM tiles
                           WHERE zoom_level=? AND tile_column=? AND tile_row=?;''', (z, x, tms_y))
     t = rows.fetchone()
     if not t:
         raise ExtractionError(_("Could not extract tile %s from %s") % ((z, x, y), self.filename))
     return t[0]
Example #8
0
class Cache(object):
    def __init__(self, **kwargs):
        self.extension = kwargs.get('extension', '.png')
        self._scheme = 'tms'

    def tile_file(self, (z, x, y)):
        tile_dir = os.path.join("%s" % z, "%s" % x)
        y = flip_y(y, z)
        tile_name = "%s%s" % (y, self.extension)
        return tile_dir, tile_name
Example #9
0
def tiles_for_bbox(left, bottom, right, top, tile_z, flip_tile_y):
    min_x, min_y = coordinate_to_tile(left, bottom, tile_z)
    max_x, max_y = coordinate_to_tile(right, top, tile_z)

    if min_y > max_y:
        min_y, max_y = max_y, min_y

    for tile_x in range(min_x, max_x+1):
        for tile_y in range(min_y, max_y+1):
            if flip_tile_y:
                tile_y = flip_y(tile_z, tile_y)
            yield [tile_z, tile_x, tile_y]
Example #10
0
 def display_zebra_motionworks(self):
     data = self.zmw.data
     index = self.zmw.closest_time_index(self.stopwatch.get_time())
     robots = []
     for alliance, teams in data.items():
         robots.extend([(alliance, team, coords_list[index])
                        for team, coords_list in teams.items()])
     for robot in robots:
         color = (255, 0, 0) if robot[0] == "red" else (0, 0, 255)
         try:
             pygame.draw.circle(
                 self.screen, color,
                 self.inches_to_pixels(
                     flip_y([(coord * 12) for coord in robot[2]])), 7)
         except TypeError:
             pass
Example #11
0
def mbtiles_tilelist(mbtiles_file, **kwargs):

    flip_tile_y = kwargs.get('flip_y', False)
    as_bboxes   = kwargs.get('as_bboxes', False)

    scale     = kwargs.get('tile_scale', None)
    zoom      = kwargs.get('zoom', -1)
    min_zoom  = kwargs.get('min_zoom', 0)
    max_zoom  = kwargs.get('max_zoom', 18)

    auto_commit     = kwargs.get('auto_commit', False)
    journal_mode    = kwargs.get('journal_mode', 'wal')
    synchronous_off = kwargs.get('synchronous_off', False)

    if zoom >= 0:
        min_zoom = max_zoom = zoom


    con = mbtiles_connect(mbtiles_file, auto_commit, journal_mode, synchronous_off, False, True)


    zoom_level_string = None

    if min_zoom == max_zoom:
        zoom_level_string = "zoom level %d" % (min_zoom)
    else:
        zoom_level_string = "zoom levels %d -> %d" % (min_zoom, max_zoom)

    logger.info("Tile list for %s (%s)" % (prettify_connect_string(con.connect_string), zoom_level_string))


    for tile_z in range(min_zoom, max_zoom+1):
        logger.debug("Starting zoom level %d" % (tile_z))

        for t in con.columns_and_rows_for_zoom_level(tile_z, scale):
            tile_x, tile_y = int(t[0]), int(t[1])

            if as_bboxes:
                convert_tile_to_bbox(tile_z, tile_x, tile_y, flip_tile_y)
            else:
                if flip_tile_y:
                    tile_y = flip_y(tile_z, tile_y)
                sys.stdout.write("%d/%d/%d\n" % (tile_z, tile_x, tile_y))


    con.close()
Example #12
0
    def grid(self, z, x, y, callback=None):
        tms_y = flip_y(int(y), int(z))
        rows = self._query('''SELECT grid FROM grids
                              WHERE zoom_level=? AND tile_column=? AND tile_row=?;''', (z, x, tms_y))
        t = rows.fetchone()
        if not t:
            raise ExtractionError(_("Could not extract grid %s from %s") % ((z, x, y), self.filename))
        grid_json = json.loads(zlib.decompress(t[0]))

        rows = self._query('''SELECT key_name, key_json FROM grid_data
                              WHERE zoom_level=? AND tile_column=? AND tile_row=?;''', (z, x, tms_y))
        # join up with the grid 'data' which is in pieces when stored in mbtiles file
        grid_json['data'] = {}
        grid_data = rows.fetchone()
        while grid_data:
            grid_json['data'][grid_data[0]] = json.loads(grid_data[1])
            grid_data = rows.fetchone()
        serialized = json.dumps(grid_json)
        if callback is not None:
            return '%s(%s);' % (callback, serialized)
        return serialized
Example #13
0
    def __init__(self, filename, tilesize=None, tilescheme='xyz'):
        super(MBTilesReader, self).__init__(tilesize)
        self.filename = filename
        self.basename = os.path.basename(self.filename)
        self._con = None
        self._cur = None
        self.tilescheme = tilescheme

        # read the scheme from the metadata
        metadata = self.metadata()
        if 'scheme' in metadata:
            self.tilescheme = metadata['scheme']
        if self.tilescheme != 'xyz' and self.tilescheme != 'tms':
            raise InvalidFormatError(_("unknown scheme: ") + scheme)

        # read tile size from file
        from StringIO import StringIO
        try:
            import Image
        except ImportError:
            from PIL import Image

        for z in self.zoomlevels():
            query = self._query(
                '''SELECT zoom_level, tile_column, tile_row FROM tiles
                                   WHERE zoom_level=? ;''', (z, ))
            try:
                t = query.fetchone()
                if self.tilescheme == 'tms':
                    t = t[0], t[1], flip_y(t[2], t[0])
                img = Image.open(StringIO(apply(self.tile, t)))

                if img.width != img.height:
                    raise InvalidFormatError(_("tile not square!") + t)
                self.tilesize = img.width
                return
            except IOError:
                print 'invalid tile at zoom level', z
        print 'no valid first tiles found in any zoom level!'
Example #14
0
    def __init__(self, filename, tilesize=None, tilescheme='xyz'):
        super(MBTilesReader, self).__init__(tilesize)
        self.filename = filename
        self.basename = os.path.basename(self.filename)
        self._con = None
        self._cur = None
        self.tilescheme = tilescheme

        # read the scheme from the metadata
        metadata = self.metadata()
        if 'scheme' in metadata:
            self.tilescheme = metadata['scheme']
        if self.tilescheme != 'xyz' and self.tilescheme != 'tms':
            raise InvalidFormatError(_("unknown scheme: ") + scheme)
        
        # read tile size from file
        from StringIO import StringIO
        try:
            import Image
        except ImportError:
            from PIL import Image

        for z in self.zoomlevels():
            query = self._query('''SELECT zoom_level, tile_column, tile_row FROM tiles
                                   WHERE zoom_level=? ;''', (z, ))
            try:
                t = query.fetchone()
                if self.tilescheme == 'tms':
                    t = t[0], t[1], flip_y(t[2], t[0])
                img = Image.open(StringIO(apply(self.tile, t)))
                
                if img.width != img.height:
                    raise InvalidFormatError(_("tile not square!") + t)
                self.tilesize = img.width
                return
            except IOError:
                print 'invalid tile at zoom level', z
        print 'no valid first tiles found in any zoom level!'
Example #15
0
def disk_to_mbtiles(directory_path, mbtiles_file, **kwargs):

    auto_commit     = kwargs.get('auto_commit', False)
    journal_mode    = kwargs.get('journal_mode', 'wal')
    synchronous_off = kwargs.get('synchronous_off', False)

    print_progress  = kwargs.get('progress', False)
    flip_tile_y     = kwargs.get('flip_y', False)

    scale    = kwargs.get('tile_scale', None)
    zoom     = kwargs.get('zoom', -1)
    min_zoom = kwargs.get('min_zoom', 0)
    max_zoom = kwargs.get('max_zoom', 18)
    tmp_dir  = kwargs.get('tmp_dir', None)

    if tmp_dir and not os.path.isdir(tmp_dir):
        os.mkdir(tmp_dir)

    if zoom >= 0:
        min_zoom = max_zoom = zoom


    con = mbtiles_connect(mbtiles_file, auto_commit, journal_mode, synchronous_off, False)

    con.mbtiles_setup()

    # if not con.is_compacted():
    #     con.close()
    #     logger.info("The mbtiles database must be compacted, exiting...")
    #     return

    con.mbtiles_setup()


    zoom_level_string = None

    if min_zoom == max_zoom:
        zoom_level_string = "zoom level %d" % (min_zoom)
    else:
        zoom_level_string = "zoom levels %d -> %d" % (min_zoom, max_zoom)

    logger.info("Importing path:'%s' --> %s (%s)" % (directory_path, prettify_connect_string(con.connect_string), zoom_level_string))


    image_format = 'png'
    try:

        metadata = json.load(open(os.path.join(directory_path, 'metadata.json'), 'r'))
        image_format = metadata.get('format', 'png')

        # Check that the old and new image formats are the same
        receiving_metadata = con.metadata()

        if receiving_metadata != None and len(receiving_metadata) > 0:
            original_format = receiving_metadata.get('format')

            if original_format != None and image_format != original_format:
                sys.stderr.write('The databases to merge must use the same image format (png or jpg)\n')
                sys.exit(1)

        for name, value in metadata.items():
            con.update_metadata(name, value)

        logger.info('metadata from metadata.json restored')

    except IOError:
        logger.warning('metadata.json not found')


    count = 0
    start_time = time.time()

    if print_progress:
        sys.stdout.write("0 tiles imported (0 tiles/sec)")
        sys.stdout.flush()


    known_tile_ids = set()

    tmp_images_list = []
    tmp_row_list = []
    tmp_tiles_list = []

    for r1, zs, ignore in os.walk(os.path.join(directory_path, "tiles")):
        for tile_z in zs:
            if int(tile_z) < min_zoom or int(tile_z) > max_zoom:
                continue

            for r2, xs, ignore in os.walk(os.path.join(r1, tile_z)):
                for tile_x in xs:
                    for r2, ignore, ys in os.walk(os.path.join(r1, tile_z, tile_x)):
                        for tile_y in ys:
                            tile_y, extension = tile_y.split('.')

                            f = open(os.path.join(r1, tile_z, tile_x, tile_y) + '.' + extension, 'rb')
                            tile_data = f.read()
                            f.close()

                            if flip_tile_y:
                                tile_y = str(flip_y(tile_z, tile_y))

                            # Execute commands
                            if kwargs.get('command_list'):
                                tile_data = execute_commands_on_tile(kwargs['command_list'], image_format, tile_data, tmp_dir)

                            if con.is_compacted():
                                m = hashlib.md5()
                                m.update(tile_data)
                                tile_id = m.hexdigest()

                                if tile_id not in known_tile_ids:
                                    tmp_images_list.append( (tile_id, tile_data) )
                                    known_tile_ids.add(tile_id)

                                tmp_row_list.append( (tile_z, tile_x, tile_y, 1, tile_id, int(time.time())) )
                            else:
                                tmp_tiles_list.append( (tile_z, tile_x, tile_y, 1, tile_data, int(time.time())) )

                            count = count + 1
                            if (count % 100) == 0:
                                logger.debug("%d tiles imported (%.1f tiles/sec)" % (count, count / (time.time() - start_time)))
                                if print_progress:
                                    sys.stdout.write("\r%d tiles imported (%.1f tiles/sec)" % (count, count / (time.time() - start_time)))
                                    sys.stdout.flush()

                            if len(tmp_images_list) > 250:
                                con.insert_tiles_to_images(tmp_images_list)
                                tmp_images_list = []

                            if len(tmp_row_list) > 250:
                                con.insert_tiles_to_map(tmp_row_list)
                                tmp_row_list = []

                            if len(tmp_tiles_list) > 250:
                                con.insert_tiles(tmp_tiles_list)
                                tmp_tiles_list = []


    # Push the remaining rows to the database
    if len(tmp_images_list) > 0:
        con.insert_tiles_to_images(tmp_images_list)

    if len(tmp_row_list) > 0:
        con.insert_tiles_to_map(tmp_row_list)

    if len(tmp_tiles_list) > 0:
        con.insert_tiles(tmp_tiles_list)

    if print_progress:
        sys.stdout.write('\n')

    logger.info("%d tiles imported." % (count))
    if print_progress:
        sys.stdout.write("%d tiles imported.\n" % (count))
        sys.stdout.flush()


    con.optimize_database(kwargs.get('skip_analyze', False), kwargs.get('skip_vacuum', False))

    con.close()
Example #16
0
def merge_mbtiles(mbtiles_file1, mbtiles_file2, **kwargs):

    scale         = kwargs.get('tile_scale', None)
    zoom          = kwargs.get('zoom', -1)
    min_zoom      = kwargs.get('min_zoom', 0)
    max_zoom      = kwargs.get('max_zoom', 18)

    tmp_dir         = kwargs.get('tmp_dir', None)
    auto_commit     = kwargs.get('auto_commit', False)
    journal_mode    = kwargs.get('journal_mode', 'wal')
    synchronous_off = kwargs.get('synchronous_off', False)

    min_timestamp = kwargs.get('min_timestamp', 0)
    max_timestamp = kwargs.get('max_timestamp', 0)

    delete_after_export   = kwargs.get('delete_after_export', False)
    print_progress        = kwargs.get('progress', False)
    delete_vanished_tiles = kwargs.get('delete_vanished_tiles', False)
    flip_tile_y           = kwargs.get('flip_y', False)
    debug                 = kwargs.get('debug', False)

    if tmp_dir and not os.path.isdir(tmp_dir):
        os.mkdir(tmp_dir)

    if zoom >= 0:
        min_zoom = max_zoom = zoom


    check_before_merge = kwargs.get('check_before_merge', False)
    if check_before_merge and not check_mbtiles(mbtiles_file2, **kwargs):
        sys.stderr.write("The pre-merge check on %s failed\n" % (mbtiles_file2))
        sys.exit(1)


    con1 = mbtiles_connect(mbtiles_file1, auto_commit, journal_mode, synchronous_off, False, False)
    con2 = mbtiles_connect(mbtiles_file2, auto_commit, journal_mode, synchronous_off, False, True)

    con1.mbtiles_setup()

    # if not con1.is_compacted():
    #     sys.stderr.write('To merge two mbtiles databases, the receiver must already be compacted\n')
    #     con1.close()
    #     con2.close()
    #     sys.exit(1)

    if not con2.is_compacted() and (min_timestamp != 0 or max_timestamp != 0):
        con1.close()
        con2.close()
        sys.stderr.write('min-timestamp/max-timestamp can only be used with compacted databases.\n')
        sys.exit(1)


    zoom_level_string = None

    if min_zoom == max_zoom:
        zoom_level_string = "zoom level %d" % (min_zoom)
    else:
        zoom_level_string = "zoom levels %d -> %d" % (min_zoom, max_zoom)

    logger.info("Merging %s --> %s (%s)" % (prettify_connect_string(con2.connect_string), prettify_connect_string(con1.connect_string), zoom_level_string))


    # Check that the old and new image formats are the same
    original_format = new_format = None
    try:
        original_format = con1.metadata().get('format')
    except:
        pass

    try:
        new_format = con2.metadata().get('format')
    except:
        pass

    if new_format == None:
        logger.info("No image format found in the sending database, assuming 'png'")
        new_format = "png"

    if original_format != None and new_format != original_format:
        con1.close()
        con2.close()
        sys.stderr.write('The files to merge must use the same image format (png or jpg)\n')
        sys.exit(1)

    if original_format == None and new_format != None:
        con1.update_metadata("format", new_format)

    if new_format == None:
        new_format = original_format


    count = 0
    start_time = time.time()
    chunk = 1000

    total_tiles = 1

    if print_progress or debug:
        total_tiles = con2.tiles_count(min_zoom, max_zoom, min_timestamp, max_timestamp, scale)

        if total_tiles == 0:
            con1.close()
            con2.close()
            sys.stderr.write('No tiles to merge, exiting...\n')
            return

        logger.debug("%d tiles to merge" % (total_tiles))
        if print_progress:
            sys.stdout.write("%d tiles to merge\n" % (total_tiles))
            sys.stdout.write("0 tiles merged (0% @ 0 tiles/sec)")
            sys.stdout.flush()



    # merge and process (--merge --execute)
    if con2.is_compacted() and kwargs['command_list']:
        default_pool_size = kwargs.get('poolsize', -1)
        if default_pool_size < 1:
            default_pool_size = None
            logger.debug("Using default pool size")
        else:
            logger.debug("Using pool size = %d" % (default_pool_size))

        pool = Pool(default_pool_size)
        multiprocessing.log_to_stderr(logger.level)

        tiles_to_process = []
        known_tile_ids = {}

        for t in con2.tiles_with_tile_id(min_zoom, max_zoom, min_timestamp, max_timestamp, scale):
            tile_z = t[0]
            tile_x = t[1]
            tile_y = t[2]
            tile_scale = t[3]
            tile_data = str(t[4])
            tile_id = t[5]

            if flip_tile_y:
                tile_y = flip_y(tile_z, tile_y)

            new_tile_id = known_tile_ids.get(tile_id)
            if new_tile_id is None:
                tmp_file_fd, tmp_file_name = tempfile.mkstemp(suffix=".%s" % (new_format), prefix="tile_", dir=tmp_dir)
                tmp_file = os.fdopen(tmp_file_fd, "w")
                tmp_file.write(tile_data)
                tmp_file.close()

                tiles_to_process.append({
                    'tile_id':tile_id,
                    'filename':tmp_file_name,
                    'format':new_format,
                    'size':len(tile_data),
                    'command_list':kwargs['command_list'],
                    'tile_x':tile_x,
                    'tile_y':tile_y,
                    'tile_z':tile_z,
                    'tile_scale':tile_scale
                })
            else:
                con1.insert_tile_to_map(tile_z, tile_x, tile_y, tile_scale, new_tile_id)

                count = count + 1
                if (count % 100) == 0:
                    logger.debug("%d tiles merged (%.1f%% @ %.1f tiles/sec)" % (count, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
                    if print_progress:
                        sys.stdout.write("\r%d tiles merged (%.1f%% @ %.1f tiles/sec)" % (count, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
                        sys.stdout.flush()


            if len(tiles_to_process) < chunk:
                continue

            count = process_tiles(pool, tiles_to_process, con1, count, total_tiles, start_time, print_progress, delete_vanished_tiles, known_tile_ids)

            tiles_to_process = []

        if len(tiles_to_process) > 0:
            count = process_tiles(pool, tiles_to_process, con1, count, total_tiles, start_time, print_progress, delete_vanished_tiles, known_tile_ids)


    # merge from a compacted database (--merge)
    elif con2.is_compacted():
        known_tile_ids = set()

        tmp_images_list = []
        tmp_row_list = []
        tmp_tiles_list = []

        for t in con2.tiles_with_tile_id(min_zoom, max_zoom, min_timestamp, max_timestamp, scale):
            tile_z = t[0]
            tile_x = t[1]
            tile_y = t[2]
            tile_scale = t[3]
            tile_data = str(t[4])
            tile_id = t[5]

            if flip_tile_y:
                tile_y = flip_y(tile_z, tile_y)

            if con1.is_compacted():
                if tile_id not in known_tile_ids:
                    tmp_images_list.append( (tile_id, tile_data) )
                    known_tile_ids.add(tile_id)

                tmp_row_list.append( (tile_z, tile_x, tile_y, tile_scale, tile_id, int(time.time())) )
            else:
                tmp_tiles_list.append( (tile_z, tile_x, tile_y, tile_scale, tile_data, int(time.time())) )

            count = count + 1
            if (count % 100) == 0:
                logger.debug("%d tiles merged (%.1f%% @ %.1f tiles/sec)" % (count, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
                if print_progress:
                    sys.stdout.write("\r%d tiles merged (%.1f%% @ %.1f tiles/sec)" % (count, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
                    sys.stdout.flush()

            if len(tmp_images_list) > 250:
                con1.insert_tiles_to_images(tmp_images_list)
                tmp_images_list = []

            if len(tmp_row_list) > 250:
                con1.insert_tiles_to_map(tmp_row_list)
                tmp_row_list = []

            if len(tmp_tiles_list) > 250:
                con1.insert_tiles(tmp_tiles_list)
                tmp_tiles_list = []

        # Push the remaining rows to the database
        if len(tmp_images_list) > 0:
            con1.insert_tiles_to_images(tmp_images_list)

        if len(tmp_row_list) > 0:
            con1.insert_tiles_to_map(tmp_row_list)

        if len(tmp_tiles_list) > 0:
            con1.insert_tiles(tmp_tiles_list)


    # merge an uncompacted database (--merge)
    else:
        known_tile_ids = set()

        tmp_images_list = []
        tmp_row_list = []
        tmp_tiles_list = []

        for t in con2.tiles(min_zoom, max_zoom, min_timestamp, max_timestamp, scale):
            tile_z = t[0]
            tile_x = t[1]
            tile_y = t[2]
            tile_scale = t[3]
            tile_data = str(t[4])

            if flip_tile_y:
                tile_y = flip_y(tile_z, tile_y)

            # Execute commands
            if kwargs.get('command_list'):
                tile_data = execute_commands_on_tile(kwargs['command_list'], new_format, tile_data, tmp_dir)

            if con1.is_compacted():
                m = hashlib.md5()
                m.update(tile_data)
                tile_id = m.hexdigest()

                if tile_id not in known_tile_ids:
                    tmp_images_list.append( (tile_id, tile_data) )
                    known_tile_ids.add(tile_id)

                tmp_row_list.append( (tile_z, tile_x, tile_y, tile_scale, tile_id, int(time.time())) )
            else:
                tmp_tiles_list.append( (tile_z, tile_x, tile_y, tile_scale, tile_data, int(time.time())) )

            count = count + 1
            if (count % 100) == 0:
                logger.debug("%d tiles merged (%.1f%% @ %.1f tiles/sec)" % (count, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
                if print_progress:
                    sys.stdout.write("\r%d tiles merged (%.1f%% @ %.1f tiles/sec)" % (count, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
                    sys.stdout.flush()

            if len(tmp_images_list) > 250:
                con1.insert_tiles_to_images(tmp_images_list)
                tmp_images_list = []

            if len(tmp_row_list) > 250:
                con1.insert_tiles_to_map(tmp_row_list)
                tmp_row_list = []

            if len(tmp_tiles_list) > 250:
                con1.insert_tiles(tmp_tiles_list)
                tmp_tiles_list = []

        # Push the remaining rows to the database
        if len(tmp_images_list) > 0:
            con1.insert_tiles_to_images(tmp_images_list)

        if len(tmp_row_list) > 0:
            con1.insert_tiles_to_map(tmp_row_list)

        if len(tmp_tiles_list) > 0:
            con1.insert_tiles(tmp_tiles_list)

    if print_progress:
        sys.stdout.write('\n')

    logger.info("%d tiles merged (100.0%% @ %.1f tiles/sec)" % (count, count / (time.time() - start_time)))
    if print_progress:
        sys.stdout.write("%d tiles merged (100.0%% @ %.1f tiles/sec)\n" % (count, count / (time.time() - start_time)))
        sys.stdout.flush()


    if delete_after_export:
        logger.debug("WARNING: Removing merged tiles from %s" % (mbtiles_file2))

        con2.delete_tiles(min_zoom, max_zoom, min_timestamp, max_timestamp, scale)
        con2.optimize_database(kwargs.get('skip_analyze', False), kwargs.get('skip_vacuum', False))


    con1.close()
    con2.close()
Example #17
0
def fill_mbtiles(mbtiles_file, image_filename, **kwargs):

    zoom        = kwargs.get('zoom', -1)
    min_zoom    = kwargs.get('min_zoom', 0)
    max_zoom    = kwargs.get('max_zoom', 18)
    flip_tile_y = kwargs.get('flip_y', False)
    bbox        = kwargs.get('bbox', None)
    tile_bbox   = kwargs.get('tile_bbox', None)

    auto_commit     = kwargs.get('auto_commit', False)
    journal_mode    = kwargs.get('journal_mode', 'wal')
    synchronous_off = kwargs.get('synchronous_off', False)

    print_progress  = kwargs.get('progress', False)

    if zoom >= 0:
        min_zoom = max_zoom = zoom
    elif min_zoom == max_zoom:
        zoom = min_zoom

    if tile_bbox != None and zoom < 0:
        logger.info("--tile-bbox can only be used with --zoom, exiting...")
        return

    if tile_bbox == None and bbox == None:
        logger.info("Either --tile-bbox or --bbox must be given, exiting...")
        return


    con = mbtiles_connect(mbtiles_file, auto_commit, journal_mode, synchronous_off, False)

    con.mbtiles_setup()

    if not con.is_compacted():
        con.close()
        logger.info("The mbtiles database must be compacted, exiting...")
        return


    zoom_level_string = None

    if min_zoom == max_zoom:
        zoom_level_string = "zoom level %d" % (min_zoom)
    else:
        zoom_level_string = "zoom levels %d -> %d" % (min_zoom, max_zoom)

    logger.info("Filling %s (%s)" % (prettify_connect_string(con.connect_string), zoom_level_string))


    # Insert an image
    tmp_file = open(image_filename, "r")
    tile_data = tmp_file.read()
    tmp_file.close()

    m = hashlib.md5()
    m.update(tile_data)
    tile_id = m.hexdigest()

    con.insert_tile_to_images(tile_id, tile_data)


    count = 0
    start_time = time.time()


    for tile_z in range(min_zoom, max_zoom+1):
        min_x = min_y = max_x = max_y = 0

        if tile_bbox != None:
            match = re.match(r'(\d+),(\d+),(\d+),(\d+)', tile_bbox, re.I)
            if match:
                min_x, min_y, max_x, max_y = int(match.group(1)), int(match.group(2)), int(match.group(3)), int(match.group(4))
        elif bbox != None:
            match = re.match(r'([-0-9\.]+),([-0-9\.]+),([-0-9\.]+),([-0-9\.]+)', bbox, re.I)
            if match:
                left, bottom, right, top = float(match.group(1)), float(match.group(2)), float(match.group(3)), float(match.group(4))
                min_x, min_y = coordinate_to_tile(left, bottom, tile_z)
                max_x, max_y = coordinate_to_tile(right, top, tile_z)

        if min_y > max_y:
            min_y, max_y = max_y, min_y

        for tile_x in range(min_x, max_x+1):
            for tile_y in range(min_y, max_y+1):
                if flip_tile_y:
                    tile_y = flip_y(tile_z, tile_y)

                # z, x, y
                con.insert_tile_to_map(tile_z, tile_x, tile_y, tile_id, False) # Don't overwrite existing tiles

                count = count + 1
                if (count % 100) == 0:
                    logger.debug("%d tiles inserted (%.1f tiles/sec)" %
                        (count, count / (time.time() - start_time)))
                    if print_progress:
                        sys.stdout.write("\r%d tiles inserted (%.1f tiles/sec)" %
                            (count, count / (time.time() - start_time)))
                        sys.stdout.flush()


    if print_progress:
        sys.stdout.write('\n')

    logger.info("%d tiles inserted (100.0%% @ %.1f tiles/sec)" %
        (count, count / (time.time() - start_time)))
    if print_progress:
        sys.stdout.write("%d tiles inserted (100.0%% @ %.1f tiles/sec)\n" %
            (count, count / (time.time() - start_time)))
        sys.stdout.flush()


    con.optimize_database(kwargs.get('skip_analyze', False), kwargs.get('skip_vacuum', False))
    con.close()
Example #18
0
def disk_to_mbtiles(directory_path, mbtiles_file, **kwargs):
    logger.info("Importing from disk to database: %s --> %s" % (directory_path, mbtiles_file))


    import_into_existing_mbtiles = os.path.isfile(mbtiles_file)
    existing_mbtiles_is_compacted = True

    no_overwrite = kwargs.get('no_overwrite', False)
    auto_commit  = kwargs.get('auto_commit', False)
    zoom     = kwargs.get('zoom', -1)
    min_zoom = kwargs.get('min_zoom', 0)
    max_zoom = kwargs.get('max_zoom', 255)

    if zoom >= 0:
        min_zoom = max_zoom = zoom


    con = mbtiles_connect(mbtiles_file, auto_commit)
    cur = con.cursor()
    optimize_connection(cur, False)


    if import_into_existing_mbtiles:
        existing_mbtiles_is_compacted = (con.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='images'").fetchone()[0] > 0)
    else:
        mbtiles_setup(cur)


    image_format = 'png'
    try:
        metadata = json.load(open(os.path.join(directory_path, 'metadata.json'), 'r'))
        image_format = metadata.get('format', 'png')

        # Check that the old and new image formats are the same
        if import_into_existing_mbtiles:
            original_format = None

            try:
                original_format = cur.execute("SELECT value FROM metadata WHERE name='format'").fetchone()[0]
            except:
                pass

            if original_format != None and image_format != original_format:
                sys.stderr.write('The files to merge must use the same image format (png or jpg)\n')
                sys.exit(1)

        if not import_into_existing_mbtiles:
            for name, value in metadata.items():
                cur.execute('INSERT OR IGNORE INTO metadata (name, value) VALUES (?, ?)',
                        (name, value))
            con.commit()
            logger.info('metadata from metadata.json restored')

    except IOError:
        logger.warning('metadata.json not found')


    existing_tiles = {}

    if no_overwrite:
        tiles = cur.execute("""SELECT zoom_level, tile_column, tile_row FROM tiles WHERE zoom_level>=? AND zoom_level<=?""",
            (min_zoom, max_zoom))

        t = tiles.fetchone()
        while t:
            z = str(t[0])
            x = str(t[1])
            y = str(t[2])

            zoom = existing_tiles.get(z, None)
            if not zoom:
                zoom = {}
                existing_tiles[z] = zoom

            row = zoom.get(y, None)
            if not row:
                row = set()
                zoom[y] = row

            row.add(x)
            t = tiles.fetchone()


    count = 0
    start_time = time.time()


    for r1, zs, ignore in os.walk(os.path.join(directory_path, "tiles")):
        for z in zs:
            if int(z) < min_zoom or int(z) > max_zoom:
                continue

            for r2, xs, ignore in os.walk(os.path.join(r1, z)):
                for x in xs:
                    for r2, ignore, ys in os.walk(os.path.join(r1, z, x)):
                        for y in ys:
                            y, extension = y.split('.')

                            if no_overwrite:
                                if x in existing_tiles.get(z, {}).get(y, set()):
                                    logging.debug("Ignoring tile (%s, %s, %s)" % (z, x, y))
                                    continue

                            if kwargs.get('flip_y', False) == True:
                                y = flip_y(z, y)

                            f = open(os.path.join(r1, z, x, y) + '.' + extension, 'rb')
                            tile_data = f.read()
                            f.close()

                            # Execute commands
                            if kwargs.get('command_list'):
                                tile_data = execute_commands_on_tile(kwargs['command_list'], image_format, tile_data)

                            if existing_mbtiles_is_compacted:
                                m = hashlib.md5()
                                m.update(tile_data)
                                tile_id = m.hexdigest()

                                cur.execute("""INSERT OR IGNORE INTO images (tile_id, tile_data) VALUES (?, ?)""",
                                    (tile_id, sqlite3.Binary(tile_data)))

                                cur.execute("""REPLACE INTO map (zoom_level, tile_column, tile_row, tile_id) VALUES (?, ?, ?, ?)""",
                                    (z, x, y, tile_id))
                            else:
                                cur.execute("""REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES (?, ?, ?, ?)""",
                                    (z, x, y.split('.')[0], sqlite3.Binary(tile_data)))


                            count = count + 1
                            if (count % 100) == 0:
                                logger.debug("%s tiles imported (%d tiles/sec)" %
                                    (count, count / (time.time() - start_time)))


    logger.info("%d tiles imported." % (count))

    con.commit()
    con.close()
Example #19
0
def mbtiles_to_disk(mbtiles_file, directory_path, **kwargs):

    delete_after_export = kwargs.get('delete_after_export', False)

    auto_commit     = kwargs.get('auto_commit', False)
    journal_mode    = kwargs.get('journal_mode', 'wal')
    synchronous_off = kwargs.get('synchronous_off', False)

    zoom     = kwargs.get('zoom', -1)
    min_zoom = kwargs.get('min_zoom', 0)
    max_zoom = kwargs.get('max_zoom', 18)
    tmp_dir  = kwargs.get('tmp_dir', None)

    print_progress = kwargs.get('progress', False)
    flip_tile_y    = kwargs.get('flip_y', False)
    min_timestamp  = kwargs.get('min_timestamp', 0)
    max_timestamp  = kwargs.get('max_timestamp', 0)

    if tmp_dir and not os.path.isdir(tmp_dir):
        os.mkdir(tmp_dir)

    if zoom >= 0:
        min_zoom = max_zoom = zoom


    con = mbtiles_connect(mbtiles_file, auto_commit, journal_mode, synchronous_off, False, True)


    zoom_level_string = None

    if min_zoom == max_zoom:
        zoom_level_string = "zoom level %d" % (min_zoom)
    else:
        zoom_level_string = "zoom levels %d -> %d" % (min_zoom, max_zoom)

    logger.info("Exporting %s --> path:'%s' (%s)" % (prettify_connect_string(con.connect_string), directory_path, zoom_level_string))


    if not os.path.isdir(directory_path):
        os.mkdir(directory_path)
    base_path = os.path.join(directory_path, "tiles")
    if not os.path.isdir(base_path):
        os.makedirs(base_path)


    metadata = con.metadata()
    json.dump(metadata, open(os.path.join(directory_path, 'metadata.json'), 'w'), indent=4)

    count = 0
    start_time = time.time()
    image_format = metadata.get('format', 'png')
    sending_mbtiles_is_compacted = con.is_compacted()

    if not sending_mbtiles_is_compacted and (min_timestamp != 0 or max_timestamp != 0):
        con.close()
        sys.stderr.write('min-timestamp/max-timestamp can only be used with compacted databases.\n')
        sys.exit(1)


    total_tiles = con.tiles_count(min_zoom, max_zoom, min_timestamp, max_timestamp)

    logger.debug("%d tiles to export" % (total_tiles))
    if print_progress:
        sys.stdout.write("%d tiles to export\n" % (total_tiles))
        sys.stdout.write("%d / %d tiles exported (0%% @ 0 tiles/sec)" % (count, total_tiles))
        sys.stdout.flush()


    for t in con.tiles(min_zoom, max_zoom, min_timestamp, max_timestamp):
        tile_z = t[0]
        tile_x = t[1]
        tile_y = t[2]
        tile_data = str(t[3])

        # Execute commands
        if kwargs.get('command_list'):
            tile_data = execute_commands_on_tile(kwargs['command_list'], image_format, tile_data, tmp_dir)

        if flip_tile_y:
            tile_y = flip_y(tile_z, tile_y)

        tile_dir = os.path.join(base_path, str(tile_z), str(tile_x))
        if not os.path.isdir(tile_dir):
            os.makedirs(tile_dir)

        tile_file = os.path.join(tile_dir, '%s.%s' % (tile_y, metadata.get('format', 'png')))

        f = open(tile_file, 'wb')
        f.write(tile_data)
        f.close()

        count = count + 1
        if (count % 100) == 0:
            logger.debug("%d / %d tiles exported (%.1f%% @ %.1f tiles/sec)" %
                (count, total_tiles, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
            if print_progress:
                sys.stdout.write("\r%d / %d tiles exported (%.1f%% @ %.1f tiles/sec)" %
                    (count, total_tiles, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
                sys.stdout.flush()


    if print_progress:
        sys.stdout.write('\n')

    logger.info("%d / %d tiles exported (100.0%% @ %.1f tiles/sec)" % (count, total_tiles, count / (time.time() - start_time)))
    if print_progress:
        sys.stdout.write("%d / %d tiles exported (100.0%% @ %.1f tiles/sec)\n" % (count, total_tiles, count / (time.time() - start_time)))
        sys.stdout.flush()


    if delete_after_export:
        logger.debug("WARNING: Removing exported tiles from %s" % (mbtiles_file))

        con.delete_tiles(min_zoom, max_zoom, min_timestamp, max_timestamp)
        con.optimize_database(kwargs.get('skip_analyze', False), kwargs.get('skip_vacuum', False))


    con.close()
Example #20
0
def update_mbtiles(mbtiles_file1, mbtiles_file2, **kwargs):

    scale    = kwargs.get('tile_scale', None)
    zoom     = kwargs.get('zoom', -1)
    min_zoom = kwargs.get('min_zoom', 0)
    max_zoom = kwargs.get('max_zoom', 18)

    auto_commit     = kwargs.get('auto_commit', False)
    journal_mode    = kwargs.get('journal_mode', 'wal')
    synchronous_off = kwargs.get('synchronous_off', False)

    print_progress  = kwargs.get('progress', False)
    flip_tile_y     = kwargs.get('flip_y', False)


    con1 = mbtiles_connect(mbtiles_file1, auto_commit, journal_mode, synchronous_off, False, False)
    con2 = mbtiles_connect(mbtiles_file2, auto_commit, journal_mode, synchronous_off, False, True)

    con1.mbtiles_setup()

    if not con1.is_compacted() or not con2.is_compacted:
        con1.close()
        con2.close()
        sys.stderr.write('To update mbtiles databases, both databases must already be compacted\n')
        sys.exit(1)


    zoom_level_string = None

    if min_zoom == max_zoom:
        zoom_level_string = "zoom level %d" % (min_zoom)
    else:
        zoom_level_string = "zoom levels %d -> %d" % (min_zoom, max_zoom)

    logger.info("Updating %s --> %s (%s)" % (prettify_connect_string(con2.connect_string), prettify_connect_string(con1.connect_string), zoom_level_string))


    # Check that the old and new image formats are the same
    original_format = new_format = None
    try:
        original_format = con1.metadata().get('format')
    except:
        pass

    try:
        new_format = con2.metadata().get('format')
    except:
        pass

    if new_format == None:
        logger.info("No image format found in the sending database, assuming 'png'")
        new_format = "png"

    if original_format != None and new_format != original_format:
        con1.close()
        con2.close()
        sys.stderr.write('The files to merge must use the same image format (png or jpg)\n')
        sys.exit(1)

    if original_format == None and new_format != None:
        con1.update_metadata("format", new_format)

    if new_format == None:
        new_format = original_format


    count = 0
    start_time = time.time()

    min_timestamp = con1.max_timestamp()
    if min_timestamp is None: min_timestamp = 0
    max_timestamp = int(time.time())

    total_tiles = con2.updates_count(min_zoom, max_zoom, min_timestamp, max_timestamp)

    if total_tiles == 0:
        con1.close()
        con2.close()
        sys.stderr.write('No tiles to update, exiting...\n')
        return

    logger.debug("%d tiles to update" % (total_tiles))
    if print_progress:
        sys.stdout.write("%d tiles to update\n" % (total_tiles))
        sys.stdout.write("0 tiles updated (0% @ 0 tiles/sec)")
        sys.stdout.flush()


    known_tile_ids = set()
    tmp_images_list = []
    tmp_row_list = []

    deleted_tiles_count = 0

    for t in con2.updates(min_zoom, max_zoom, min_timestamp, max_timestamp):
        tile_z = t[0]
        tile_x = t[1]
        tile_y = t[2]
        tile_scale = t[3]
        tile_data = str(t[4])
        tile_id = t[5]

        if flip_tile_y:
            tile_y = flip_y(tile_z, tile_y)

        if tile_id is None:
            deleted_tiles_count += 1

        if tile_id and tile_id not in known_tile_ids:
            tmp_images_list.append( (tile_id, tile_data) )
            known_tile_ids.add(tile_id)

        tmp_row_list.append( (tile_z, tile_x, tile_y, tile_scale, tile_id, int(time.time())) )

        count = count + 1
        if (count % 100) == 0:
            logger.debug("%d tiles merged (%.1f%% @ %.1f tiles/sec)" % (count, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
            if print_progress:
                sys.stdout.write("\r%d tiles merged (%.1f%% @ %.1f tiles/sec)" % (count, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))
                sys.stdout.flush()

        if len(tmp_images_list) > 250:
            con1.insert_tiles_to_images(tmp_images_list)
            tmp_images_list = []

        if len(tmp_row_list) > 250:
            con1.insert_tiles_to_map(tmp_row_list)
            tmp_row_list = []

    # Push the remaining rows to the database
    if len(tmp_images_list) > 0:
        con1.insert_tiles_to_images(tmp_images_list)

    if len(tmp_row_list) > 0:
        con1.insert_tiles_to_map(tmp_row_list)


    if print_progress:
        sys.stdout.write('\n')

    logger.info("%d tiles merged (100.0%% @ %.1f tiles/sec)" % (count, count / (time.time() - start_time)))
    if print_progress:
        sys.stdout.write("%d tiles merged (100.0%% @ %.1f tiles/sec)\n" % (count, count / (time.time() - start_time)))
        sys.stdout.flush()


    if deleted_tiles_count > 0:
        logger.info("Deleting %d orphaned image(s)" % (deleted_tiles_count))
        if print_progress:
            sys.stdout.write("Deleting %d orphaned image(s)\n" % (deleted_tiles_count))
            sys.stdout.flush()
        con1.delete_orphaned_images()


    con1.close()
    con2.close()
Example #21
0
def test_mbtiles(mbtiles_file, **kwargs):

    scale       = kwargs.get('tile_scale', None)
    zoom        = kwargs.get('zoom', -1)
    min_zoom    = kwargs.get('min_zoom', 0)
    max_zoom    = kwargs.get('max_zoom', 18)
    tmp_dir     = kwargs.get('tmp_dir', None)
    flip_tile_y = kwargs.get('flip_y', False)

    min_timestamp    = kwargs.get('min_timestamp', 0)
    max_timestamp    = kwargs.get('max_timestamp', 0)
    revert_test     = kwargs.get('revert_test', False)

    auto_commit     = kwargs.get('auto_commit', False)
    journal_mode    = kwargs.get('journal_mode', 'wal')
    synchronous_off = kwargs.get('synchronous_off', False)

    default_pool_size = kwargs.get('poolsize', -1)

    if zoom >= 0:
        min_zoom = max_zoom = zoom


    con = mbtiles_connect(mbtiles_file, auto_commit, journal_mode, synchronous_off, False, True)


    zoom_level_string = None

    if min_zoom == max_zoom:
        zoom_level_string = "zoom level %d" % (min_zoom)
    else:
        zoom_level_string = "zoom levels %d -> %d" % (min_zoom, max_zoom)

    logger.info("Testing %s (%s)" % (prettify_connect_string(con.connect_string), zoom_level_string))


    image_format = 'png'

    metadata = con.metadata()
    if metadata.has_key('format'):
        image_format = metadata['format']


    if default_pool_size < 1:
        default_pool_size = None
        logger.debug("Using default pool size")
    else:
        logger.debug("Using pool size = %d" % (default_pool_size))

    pool = Pool(default_pool_size)
    multiprocessing.log_to_stderr(logger.level)


    chunk = 1000
    tiles_to_process = []

    for t in con.tiles(min_zoom, max_zoom, min_timestamp, max_timestamp, scale):
        tile_z = t[0]
        tile_x = t[1]
        tile_y = t[2]
        tile_scale = t[3]
        tile_data = str(t[4])

        if flip_tile_y:
            tile_y = flip_y(tile_z, tile_y)

        tmp_file_fd, tmp_file_name = tempfile.mkstemp(suffix=".%s" % (image_format), prefix="tile_", dir=tmp_dir)
        tmp_file = os.fdopen(tmp_file_fd, "w")
        tmp_file.write(tile_data)
        tmp_file.close()

        tiles_to_process.append({
            'tile_x' : tile_x,
            'tile_y' : tile_y,
            'tile_z' : tile_z,
            'filename' : tmp_file_name,
            'format' : image_format,
            'revert_test' : revert_test,
            'command_list' : kwargs.get('command_list', [])
        })

        if len(tiles_to_process) < chunk:
            continue

        process_tiles(pool, tiles_to_process)

        tiles_to_process = []


    if len(tiles_to_process) > 0:
        process_tiles(pool, tiles_to_process)


    pool.close()
    con.close()
Example #22
0
def mbtiles_to_disk(mbtiles_file, directory_path, **kwargs):
    logger.info("Exporting database to disk: %s --> %s" % (mbtiles_file, directory_path))


    delete_after_export = kwargs.get('delete_after_export', False)
    no_overwrite        = kwargs.get('no_overwrite', False)

    zoom     = kwargs.get('zoom', -1)
    min_zoom = kwargs.get('min_zoom', 0)
    max_zoom = kwargs.get('max_zoom', 255)

    if zoom >= 0:
        min_zoom = max_zoom = zoom


    con = mbtiles_connect(mbtiles_file)
    cur = con.cursor()
    optimize_connection(cur)


    if not os.path.isdir(directory_path):
        os.mkdir(directory_path)
    base_path = os.path.join(directory_path, "tiles")
    if not os.path.isdir(base_path):
        os.makedirs(base_path)


    metadata = dict(con.execute('SELECT name, value FROM metadata').fetchall())
    json.dump(metadata, open(os.path.join(directory_path, 'metadata.json'), 'w'), indent=4)

    count = 0
    start_time = time.time()
    image_format = metadata.get('format', 'png')
    total_tiles = con.execute("""SELECT count(zoom_level) FROM tiles WHERE zoom_level>=? AND zoom_level<=?""",
        (min_zoom, max_zoom)).fetchone()[0]
    sending_mbtiles_is_compacted = (con.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='images'").fetchone()[0] > 0)


    tiles = cur.execute("""SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles WHERE zoom_level>=? AND zoom_level<=?""",
        (min_zoom, max_zoom))
    t = tiles.fetchone()
    while t:
        z = t[0]
        x = t[1]
        y = t[2]
        tile_data = t[3]

        # Execute commands
        if kwargs.get('command_list'):
            tile_data = execute_commands_on_tile(kwargs['command_list'], image_format, tile_data)

        if kwargs.get('flip_y', False) == True:
          y = flip_y(z, y)

        tile_dir = os.path.join(base_path, str(z), str(x))
        if not os.path.isdir(tile_dir):
            os.makedirs(tile_dir)

        tile_file = os.path.join(tile_dir, '%s.%s' % (y, metadata.get('format', 'png')))

        if no_overwrite == False or not os.path.isfile(tile_file):
            f = open(tile_file, 'wb')
            f.write(tile_data)
            f.close()


        count = count + 1
        if (count % 100) == 0:
            logger.debug("%s / %s tiles exported (%.1f%%, %.1f tiles/sec)" %
                (count, total_tiles, (float(count) / float(total_tiles)) * 100.0, count / (time.time() - start_time)))

        t = tiles.fetchone()


    logger.info("%s / %s tiles exported (100.0%%, %.1f tiles/sec)" % (count, total_tiles, count / (time.time() - start_time)))


    if delete_after_export:
        logger.debug("WARNING: Removing exported tiles from %s" % (mbtiles_file))

        if sending_mbtiles_is_compacted:
            cur.execute("""DELETE FROM images WHERE tile_id IN (SELECT tile_id FROM map WHERE zoom_level>=? AND zoom_level<=?)""",
                (min_zoom, max_zoom))
            cur.execute("""DELETE FROM map WHERE zoom_level>=? AND zoom_level<=?""", (min_zoom, max_zoom))
        else:
            cur.execute("""DELETE FROM tiles WHERE zoom_level>=? AND zoom_level<=?""", (min_zoom, max_zoom))

        optimize_database(cur, kwargs.get('skip_analyze', False), kwargs.get('skip_vacuum', False))
        con.commit()


    con.close()