def debug_drawing(self, surface): from pygame.gfxdraw import box surface.lock() # draw events for event in self.client.events: topleft = self.get_pos_from_tilepos((event.x, event.y)) size = self.project((event.w, event.h)) rect = topleft, size box(surface, rect, (0, 255, 0, 128)) # We need to iterate over all collidable objects. So, let's start # with the walls/collision boxes. box_iter = imap(self._collision_box_to_pgrect, self.collision_map) # Next, deal with solid NPCs. npc_iter = imap(self._npc_to_pgrect, self.npcs.values()) # draw noc and wall collision tiles red = (255, 0, 0, 128) for item in itertools.chain(box_iter, npc_iter): box(surface, item, red) # draw center lines to verify camera is correct w, h = surface.get_size() cx, cy = w // 2, h // 2 pygame.draw.line(surface, (255, 50, 50), (cx, 0), (cx, h)) pygame.draw.line(surface, (255, 50, 50), (0, cy), (w, cy)) surface.unlock()
def test_sliceable(): it = Sliceable(lambda: imap(str, itertools.count())) assert it[500] == "500" assert it[505] == "505" it1 = it.iterator it.cache.clear() assert it[0] == "0" assert it[2] == "2" assert it1 is not it.iterator assert it[10:15:3] == [str(i) for i in range(10, 15, 3)] assert it[101:200:5] == [str(i) for i in range(101, 200, 5)] assert it[20:25:3] == [str(i) for i in range(20, 25, 3)] assert it[5:100] == [str(i) for i in range(5, 100)] it1 = it.iterator assert it[10:20] == [str(i) for i in range(10, 20)] assert it.iterator is it1 with pytest.raises(KeyError): it[-1] with pytest.raises(KeyError): it["raise"] it = Sliceable(lambda: six.moves.xrange(10)) assert len([i for i in it]) == 10 assert len(it) == 10 it = Sliceable(lambda: imap(str, six.moves.xrange(100))) assert len(it) == 100 it = Sliceable(lambda: imap(str, six.moves.xrange(100))) assert it[10:] == [str(i) for i in range(10, 100)]
def debug_drawing(self, surface): # We need to iterate over all collidable objects. So, let's start # with the walls/collision boxes. box_iter = imap(self._collision_box_to_pgrect, self.collision_map) # Next, deal with solid NPCs. npc_iter = imap(self._npc_to_pgrect, self.npcs.values()) # draw noc and wall collision tiles for item in itertools.chain(box_iter, npc_iter): surface.blit(self.collision_tile, (item[0], item[1])) # draw events for event in self.game.events: rect = self._collision_box_to_pgrect((event.x, event.y)) surface.fill((0, 255, 255, 128), rect) # draw collision check boxes if self.player1.direction["up"]: surface.blit(self.collision_tile, ( self.player1.position[0], self.player1.position[1] - self.tile_size[1])) elif self.player1.direction["down"]: surface.blit(self.collision_tile, ( self.player1.position[0], self.player1.position[1] + self.tile_size[1])) elif self.player1.direction["left"]: surface.blit(self.collision_tile, ( self.player1.position[0] - self.tile_size[0], self.player1.position[1])) elif self.player1.direction["right"]: surface.blit(self.collision_tile, ( self.player1.position[0] + self.tile_size[0], self.player1.position[1]))
def azip(*iterables, **kwargs): """Move `axis` (default -1) to the front of ndarrays in `iterables`.""" from six.moves import map as imap, zip as izip return izip(*( imap(kwargs.get('func', unmask), np.rollaxis(i, kwargs.get('axis', -1), kwargs.get('start', 0))) if isinstance(i, np.ndarray) else i for i in iterables))
def extract_all_features(): def get_group(block_id): group = groupby.get_group(block_id) return group pool = Pool(processes=56, initializer=init_process, initargs=(netatmo_groups, netatmo_anns)) res = list(pool.imap(sleep_30_sec, xrange(56))) group_generator = imap(get_group, groups.keys()[:]) feature_iterator = pool.imap(extract_features, group_generator) X, y, block_ids = [], [], [] save_id = 0 for block_id, features in izip(groups.keys()[:], tqdm(feature_iterator)): group = groupby.get_group(block_id) X.append(features) y.append(group.iloc[0]['rain']) block_ids.append(block_id + (group.iloc[0]["hours_since"], )) X = pd.DataFrame(X) y = np.array(y) block_ids = pd.DataFrame( block_ids, columns=["city_code", "sq_x", "sq_y", "hour_hash", "hours_since"]) return X, y, block_ids
def query_shards(self, query): """Applying shard[query] for each shard in `self.shards`, as a sequence. Parameters ---------- query : {iterable of list of (int, number) , list of (int, number))} Document in BoW format or corpus of documents. Returns ------- (None, list of ...) Result of search. """ args = zip([query] * len(self.shards), self.shards) if PARALLEL_SHARDS and PARALLEL_SHARDS > 1: logger.debug("spawning %i query processes", PARALLEL_SHARDS) pool = multiprocessing.Pool(PARALLEL_SHARDS) result = pool.imap(query_shard, args, chunksize=1 + len(args) / PARALLEL_SHARDS) else: # serial processing, one shard after another pool = None result = imap(query_shard, args) return pool, result
def azip(*iterables, **kwargs): """Move `axis` (default -1) to the front of ndarrays in `iterables`.""" from six.moves import map as imap, zip as izip return izip( *(imap(kwargs.get('func', unmask), np.rollaxis(i, kwargs.get('axis', -1), kwargs.get('start', 0)) ) if isinstance(i, np.ndarray) else i for i in iterables))
def get_treenode_data(self, cursor, params, extra_treenode_ids=None): """ Selects all treenodes of which links to other treenodes intersect with the request bounding box. Will optionally fetch additional treenodes. """ params['halfzdiff'] = abs(params['z2'] - params['z1']) * 0.5 params['halfz'] = params['z1'] + (params['z2'] - params['z1']) * 0.5 params['sanitized_treenode_ids'] = list( imap(int, extra_treenode_ids or [])) if self.prepared_statements: # Use a prepared statement to get the treenodes cursor.execute( ''' EXECUTE {}(%(project_id)s, %(left)s, %(top)s, %(z1)s, %(right)s, %(bottom)s, %(z2)s, %(halfz)s, %(halfzdiff)s, %(limit)s, %(sanitized_treenode_ids)s) '''.format(self.TREENODE_STATEMENT_NAME), params) else: cursor.execute(self.treenode_query_psycopg, params) treenodes = cursor.fetchall() treenode_ids = [t[0] for t in treenodes] return treenode_ids, treenodes
def sign(self, method, bucket_name=None, url=None, headers=None, sub_resources=None): headers = HeaderDict() if headers is None else headers.copy() string_to_sign = [] string_to_sign.append('{0!s}\n'.format(method)) string_to_sign.append('{0!s}\n'.format(headers.get('content-md5', ('',)))) string_to_sign.append('{0!s}\n'.format(headers.get('content-type', ('',)))) if 'x-amz-date' in headers: string_to_sign.append('\n') else: string_to_sign.append('{0!s}\n'.format(headers.get('date', ('',)))) for key in sorted(set(imap(str.lower, headers.keys())) - self.SIGN_IGNORE_HEADERS): string_to_sign.append('{0!s}:{1!s}\n'.format(key, headers[key])) if bucket_name is not None: string_to_sign.append('/{0!s}'.format(bucket_name)) if url is not None: string_to_sign.append(urlparse(url).path) if sub_resources: query_params = [] for key in sorted(sub_resources.keys()): value = sub_resources[key] if value is None: query_param = key else: query_param = '{0!s}={1!s}'.format(key, value) query_params.append(query_param) string_to_sign.append('?{0!s}'.format('&'.join(query_params))) signature = hmac.new(self.secret_access_key, ''.join(string_to_sign), hashlib.sha1) return 'AWS {0!s}:{1!s}'.format(self.access_key, b64encode(signature.digest()))
def get_all(self): """ Generate all the tiles in the store with their data. :rtype: iterator """ return imap(self.get_one, ifilter(None, self.list()))
def each(self, func): """ Call `func` on each element in the collection Returns a new Collection. """ func = _make_callable(func) return Collection(imap(func, self._items))
def extract_iolets_in_correct_order(next_block, unit): # The iolets must be swapped such that the iolets of the just eliminated # unit are on the right hand side. if get_unit_prefix(next_block[0].in_out[1]) == unit: func = lambda eq: (eq.in_out[0], eq.in_out[1], eq.id) else: func = lambda eq: (eq.in_out[1], eq.in_out[0], eq.id) return list(imap(func, next_block))
def parse_actions(self, actions): os = actions.attrib.get("os", None) architecture = actions.get("architecture", None) action_els = actions.findall("action") assert action_els is not None parsed_actions = list(imap(self.parse_action, action_els)) action_packages = [] for package in actions.findall("package"): action_packages.append(self.parse_action_package(package)) return Actions(parsed_actions, os, architecture, action_packages)
def delete(self, tiles): """ Delete ``tiles`` from the store. :param tiles: Input tilestream :type tiles: iterable :rtype: iterator """ return imap(self.delete_one, ifilter(None, tiles))
def get_bounding_pyramid(self): """ Returns the bounding pyramid that encloses all tiles in the store. :rtype: :class:`BoundingPyramid` """ return reduce(BoundingPyramid.add, imap(attrgetter('tilecoord'), ifilter(None, self.list())), BoundingPyramid())
def sized_imap(func, iterable, strict=False): ''' Return an iterable whose elements are the result of applying the callable `func` to each element of `iterable`. If `iterable` has a `len()`, then the iterable returned by this function will have the same `len()`. Otherwise calling `len()` on the returned iterable will raise `TypeError`. :param func: The function to apply to each element of `iterable`. :param iterable: An iterable whose objects will be mapped. :param bool strict: If `True` and `iterable` does not support `len()`, raise an exception immediately instead of returning an iterable that does not support `len()`. ''' try: length = len(iterable) except TypeError: if strict: raise else: return imap(func, iterable) return SizedGenerator(lambda: imap(func, iterable), length=length)
def get_bounding_pyramid(self): """ Returns the bounding pyramid that encloses all tiles in the store. :rtype: :class:`BoundingPyramid` """ return reduce( BoundingPyramid.add, imap(attrgetter('tilecoord'), ifilter(None, self.list())), BoundingPyramid())
def put(self, tiles): """ Store ``tiles`` in the store. :param tiles: Tilestream :type tiles: iterator :rtype: iterator """ return imap(self.put_one, ifilter(None, tiles))
def get(self, tiles): """ Add data to each of ``tiles``. :param tiles: Tilestream :type tiles: iterator :rtype: iterator """ return imap(self.get_one, ifilter(None, tiles))
def _add_sources(req, sources, result, origin): pkg_names = imap(lambda x: x.split(" ")[0], sources) constraints = imap( lambda x: x.split(" ")[1].replace("(", "").replace(")", "") if "(" in x else None, sources, ) version = req_compile.utils.parse_version(list(req.specifier)[0].version) metadata = None if req.name in result: metadata = result[req.name].metadata if metadata is None: metadata = req_compile.containers.DistInfo(req.name, version, []) metadata.version = version metadata.origin = origin result.add_dist(metadata, None, req) for name, constraint in zip(pkg_names, constraints): if name and not (name.endswith(".txt") or name.endswith(".out") or "\\" in name or "/" in name): constraint_req = req_compile.utils.parse_requirement(name) result.add_dist(constraint_req.name, None, constraint_req) reverse_dep = result[name] if reverse_dep.metadata is None: inner_meta = req_compile.containers.DistInfo( constraint_req.name, req_compile.utils.parse_version("0+missing"), [], ) inner_meta.origin = ReferenceSourceRepository(inner_meta) reverse_dep.metadata = inner_meta else: reverse_dep = None reason = _create_metadata_req(req, metadata, name, constraint) if reverse_dep is not None: reverse_dep.metadata.reqs.append(reason) result.add_dist(metadata.name, reverse_dep, reason)
def get_distance(lat1, lon1, lat2, lon2): """ Calculate the great circle distance between two points on the earth (specified in decimal degrees) """ # convert decimal degrees to radians lon1, lat1, lon2, lat2 = imap(radians, [lon1, lat1, lon2, lat2]) # haversine formula dlon = lon2 - lon1 dlat = lat2 - lat1 a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2 c = 2 * asin(sqrt(a)) km = 6367 * c return km
def debug_drawing(self, surface): # We need to iterate over all collidable objects. So, let's start # with the walls/collision boxes. box_iter = imap(self._collision_box_to_pgrect, self.collision_map) # Next, deal with solid NPCs. npc_iter = imap(self._npc_to_pgrect, self.npcs.values()) # draw noc and wall collision tiles for item in itertools.chain(box_iter, npc_iter): surface.blit(self.collision_tile, (item[0], item[1])) # draw events for event in self.game.events: rect = self._collision_box_to_pgrect((event.x, event.y)) surface.fill((0, 255, 255, 128), rect) # draw collision check boxes if self.player1.direction["up"]: surface.blit(self.collision_tile, (self.player1.position[0], self.player1.position[1] - self.tile_size[1])) elif self.player1.direction["down"]: surface.blit(self.collision_tile, (self.player1.position[0], self.player1.position[1] + self.tile_size[1])) elif self.player1.direction["left"]: surface.blit(self.collision_tile, (self.player1.position[0] - self.tile_size[0], self.player1.position[1])) elif self.player1.direction["right"]: surface.blit(self.collision_tile, (self.player1.position[0] + self.tile_size[0], self.player1.position[1]))
def _retrieve_assets(self, sids, asset_tbl, asset_type): """ Internal function for loading assets from a table. This should be the only method of `AssetFinder` that writes Assets into self._asset_cache. Parameters --------- sids : iterable of int Asset ids to look up. asset_tbl : sqlalchemy.Table Table from which to query assets. asset_type : type Type of asset to be constructed. Returns ------- assets : dict[int -> Asset] Dict mapping requested sids to the retrieved assets. """ # Fastpath for empty request. if not sids: return {} cache = self._asset_cache hits = {} for assets in group_into_chunks(sids): # Load misses from the db. query = self._select_assets_by_sid(asset_tbl, assets) for row in imap(dict, query.execute().fetchall()): asset = asset_type(**_convert_asset_timestamp_fields(row)) sid = asset.sid hits[sid] = cache[sid] = asset # If we get here, it means something in our code thought that a # particular sid was an equity/future and called this function with a # concrete type, but we couldn't actually resolve the asset. This is # an error in our code, not a user-input error. misses = tuple(set(sids) - viewkeys(hits)) if misses: if asset_type == Equity: raise EquitiesNotFound(sids=misses) else: raise FutureContractsNotFound(sids=misses) return hits
def _search_all(self, term): tokens = [t.lower() for t in _tokenizer_regex.findall(term)] glob_cond = lambda t: ('?' in t) or ('*' in t) glob_tokens = filter(glob_cond, tokens) word_tokens = filter(lambda t: not glob_cond(t), tokens) wids = set() wids.update(self._lexicon.termToWordIds(word_tokens)) wids.update(itertools.chain(*map(self._lexicon.globToWordIds, glob_tokens))) wids = self._remove_oov_wids(wids) # XXX # We should have OrderedDict-like lazy objects # and we should have weightedIntersection and weightedUnion # working lazily in a for of zerodbext.catalog # This is just a workaround for simpler queries # XXX return imap(lambda x: x[0], mass_weightedUnion(self._search_wids(wids)))
def _introspect(self): def splitter(value): return value.split(".") self.modules = [] methods = self.remote_call("daemon.get_method_list").get() methodmap = defaultdict(dict) for module, method in imap(splitter, methods): methodmap[module][method] = self._create_module_method(module, method) for module, methods in methodmap.items(): clsname = "DelugeModule{0}".format(module.capitalize()) cls = type(clsname, (), methods) setattr(self, module, cls()) self.modules.append(module)
def query_shards(self, query): """ Return the result of applying shard[query] for each shard in self.shards, as a sequence. If PARALLEL_SHARDS is set, the shards are queried in parallel, using the multiprocessing module. """ args = zip([query] * len(self.shards), self.shards) if PARALLEL_SHARDS and PARALLEL_SHARDS > 1: logger.debug("spawning %i query processes", PARALLEL_SHARDS) pool = multiprocessing.Pool(PARALLEL_SHARDS) result = pool.imap(query_shard, args, chunksize=1 + len(args) / PARALLEL_SHARDS) else: # serial processing, one shard after another pool = None result = imap(query_shard, args) return pool, result
def _introspect(self): def splitter(value): return value.split(".") self.modules = [] methods = self.remote_call("daemon.get_method_list").get() methodmap = defaultdict(dict) for module, method in imap(splitter, methods): methodmap[module][method] = self._create_module_method( module, method) for module, methods in methodmap.items(): clsname = "DelugeModule{0}".format(module.capitalize()) cls = type(clsname, (), methods) setattr(self, module, cls()) self.modules.append(module)
def _introspect(self): def splitter(value): return value.split('.') self.modules = [] methods = self.remote_call('daemon.get_method_list').get() methods = (x.decode() for x in methods) methodmap = defaultdict(dict) for module, method in imap(splitter, methods): methodmap[module][method] = self._create_module_method( module, method) for module, methods in methodmap.items(): clsname = 'DelugeModule{0}'.format(module.capitalize()) cls = type(str(clsname), (), methods) setattr(self, str(module), cls()) self.modules.append(module)
def get_connector_data(self, cursor, params, missing_connector_ids=None): """Selects all connectors that are in or have links that intersect the bounding box, or that are in missing_connector_ids. """ params['halfz'] = params['z1'] + (params['z2'] - params['z1']) * 0.5 params['halfzdiff'] = abs(params['z2'] - params['z1']) * 0.5 params['sanitized_connector_ids'] = list(imap(int, missing_connector_ids or [])) if self.prepared_statements: # Use a prepared statement to get connectors cursor.execute(''' EXECUTE {}(%(project_id)s, %(left)s, %(top)s, %(z1)s, %(right)s, %(bottom)s, %(z2)s, %(halfz)s, %(halfzdiff)s, %(limit)s, %(sanitized_connector_ids)s) '''.format(self.CONNECTOR_STATEMENT_NAME), params) else: cursor.execute(self.connector_query_psycopg, params) return list(cursor.fetchall())
def list_messages(request, project_id=None): messages = Message.objects.filter( user=request.user, read=False)\ .order_by('-time') def message_to_dict(message): return { 'id': message.id, 'title': message.title, 'action': message.action, 'text': message.text, 'time': str(message.time) } messages = list(imap(message_to_dict, messages)) # Add a dummy message that includes the count of open notifications. # This is used to add the red badge to the notifications icon. crs = ChangeRequest.objects.filter(recipient = request.user, status = ChangeRequest.OPEN) messages += [{'id': -1, 'notification_count': len(crs)}] return HttpResponse(json.dumps(makeJSON_legacy_list(messages)))
def get_treenode_data(self, cursor, params, extra_treenode_ids=None): """ Selects all treenodes of which links to other treenodes intersect with the request bounding box. Will optionally fetch additional treenodes. """ params['halfzdiff'] = abs(params['z2'] - params['z1']) * 0.5 params['halfz'] = params['z1'] + (params['z2'] - params['z1']) * 0.5 params['sanitized_treenode_ids'] = list(imap(int, extra_treenode_ids or [])) if self.prepared_statements: # Use a prepared statement to get the treenodes cursor.execute(''' EXECUTE {}(%(project_id)s, %(left)s, %(top)s, %(z1)s, %(right)s, %(bottom)s, %(z2)s, %(halfz)s, %(halfzdiff)s, %(limit)s, %(sanitized_treenode_ids)s) '''.format(self.TREENODE_STATEMENT_NAME), params) else: cursor.execute(self.treenode_query_psycopg, params) treenodes = cursor.fetchall() treenode_ids = [t[0] for t in treenodes] return treenode_ids, treenodes
def list_messages(request, project_id=None): messages = Message.objects.filter( user=request.user, read=False)\ .order_by('-time') def message_to_dict(message): return { 'id': message.id, 'title': message.title, 'action': message.action, 'text': message.text, 'time': str(message.time) } messages = list(imap(message_to_dict, messages)) # Add a dummy message that includes the count of open notifications. # This is used to add the red badge to the notifications icon. crs = ChangeRequest.objects.filter(recipient=request.user, status=ChangeRequest.OPEN) messages += [{'id': -1, 'notification_count': len(crs)}] return HttpResponse(json.dumps(makeJSON_legacy_list(messages)))
def get_treenode_data(self, cursor, params, extra_treenode_ids): """ Selects all treenodes of which links to other treenodes intersect with the request bounding box. """ params['halfzdiff'] = abs(params['z2'] - params['z1']) * 0.5 params['halfz'] = params['z1'] + (params['z2'] - params['z1']) * 0.5 params['sanitized_treenode_ids'] = list(imap(int, extra_treenode_ids)) if settings.PREPARED_STATEMENTS: # Use a prepared statement to get the treenodes cursor.execute( ''' EXECUTE get_treenodes_postgis_2d(%(project_id)s, %(left)s, %(top)s, %(z1)s, %(right)s, %(bottom)s, %(z2)s, %(halfz)s, %(halfzdiff)s, %(limit)s, %(sanitized_treenode_ids)s) ''', params) else: cursor.execute(self.treenode_query_psycopg, params) treenodes = cursor.fetchall() treenode_ids = [t[0] for t in treenodes] return treenode_ids, treenodes
def extract_all_features_test(): def get_group(block_id): group = test_groupby.get_group(block_id) return group pool = Pool(processes=56, initializer=init_process, initargs=(test_netatmo_groups, test_netatmo_anns)) group_generator = imap(get_group, test_groups.keys()[:]) feature_iterator = pool.imap(extract_features, group_generator) X, block_ids = [], [] for block_id, features in izip(test_groups.keys()[:], tqdm(feature_iterator)): group = test_groupby.get_group(block_id) X.append(features) block_ids.append(block_id) X = pd.DataFrame(X) block_ids = pd.DataFrame( block_ids, columns=["city_code", "sq_x", "sq_y", "hour_hash"]) return X, block_ids
def query_shards(self, query): """Apply shard[query] to each shard in `self.shards`. Used internally. Parameters ---------- query : {iterable of list of (int, number) , list of (int, number))} Document in BoW format or corpus of documents. Returns ------- (None, list of individual shard query results) Query results. """ args = zip([query] * len(self.shards), self.shards) if PARALLEL_SHARDS and PARALLEL_SHARDS > 1: logger.debug("spawning %i query processes", PARALLEL_SHARDS) pool = multiprocessing.Pool(PARALLEL_SHARDS) result = pool.imap(query_shard, args, chunksize=1 + len(list(args)) / PARALLEL_SHARDS) else: # serial processing, one shard after another pool = None result = imap(query_shard, args) return pool, result
def sign(self, method, bucket_name=None, url=None, headers=None, sub_resources=None): headers = HeaderDict() if headers is None else headers.copy() string_to_sign = [] string_to_sign.append('%s\n' % (method, )) string_to_sign.append('%s\n' % headers.get('content-md5', ('', ))) string_to_sign.append('%s\n' % headers.get('content-type', ('', ))) if 'x-amz-date' in headers: string_to_sign.append('\n') else: string_to_sign.append('%s\n' % headers.get('date', ('', ))) for key in sorted( set(imap(str.lower, headers.keys())) - self.SIGN_IGNORE_HEADERS): string_to_sign.append('%s:%s\n' % (key, headers[key])) if bucket_name is not None: string_to_sign.append('/%s' % (bucket_name, )) if url is not None: string_to_sign.append(urlparse(url).path) if sub_resources: query_params = [] for key in sorted(sub_resources.keys()): value = sub_resources[key] if value is None: query_param = key else: query_param = '%s=%s' % (key, value) query_params.append(query_param) string_to_sign.append('?%s' % ('&'.join(query_params), )) signature = hmac.new(self.secret_access_key, ''.join(string_to_sign), hashlib.sha1) return 'AWS %s:%s' % (self.access_key, b64encode(signature.digest()))
def __iter__(self): return imap(self._unpackkey, query(self.connection, self.ITER_SQL))
def itervalues(self): func = partial(self.__class__._of(), parent=self) return (item for item in imap(func, self._element.__iter__()) if self.__class__._key(item) is not None)
def __iter__(self): func = partial(self.__class__._of(), parent=self) return imap(func, self._element.__iter__())
def f(): self.length, it = query_f() return imap(get_object, it)
def _wrap_multi(self, func): vals = func(self._value) return Collection(imap(Node, vals))
def iteritems(self): return imap(self._unpackitem, query(self.connection, self.ITERITEMS_SQL))
def __iter__(self): return imap(itemgetter(0), itervalues(self.items))
def itervalues(self): return imap(itemgetter(1), itervalues(self.items))
def itervalues(self): return imap(self._unpackvalue, query(self.connection, self.ITERVALUES_SQL))
def list(self): return imap(lambda s: Tile(TileCoord.from_string(s)), iterkeys(self.db))