def test_margin(self): assert (get_discretized_limits(-90., 90., -180., 180., 10) == get_discretized_limits(-89., 89., -179., 179., 10))
def xrjs_getClusters(self, lat_min, lat_max, lon_min, lon_max): """ """ def make_points(r_list, lan_dict, lon_dict, type_dict, rating_dict): points = [] for i, rid in enumerate(r_list): point = Point(i, float(lat_dict[rid]), float(lon_dict[rid])) point.type = type_dict[rid] point.rating = rating_dict[rid] points.append(point) return points def make_points_by_type(points): points_by_type = {} for point in points: if not point.type in points_by_type: points_by_type[point.type] = [] points_by_type[point.type].append(point) return points_by_type def make_clusters(points_by_type, lat_min, lat_max, lon_min, lon_max, grid_size): def average_rating(points): assert len(points) > 0 ratings = [point.rating for point in points] return sum(ratings) / float(len(ratings)) all_clusters = [] for type, points in points_by_type.iteritems(): centers, groups = kmeans(lat_min, lat_max, lon_min, lon_max, points, grid_size) clusters = [] for i, points in enumerate(groups): if len(points) > 0: clusters.append({ 'center': centers[i], 'points': points, 'type': type, 'averageRating': average_rating(points), }) all_clusters.extend(clusters) return all_clusters def point_data(cluster, r_list): def cluster_tooltip(cluster, r_list): cluster_rids = [ r_list[point.id] for point in cluster['points'] ] pins = [ getObjectFromCatalog(catalog, rid) for rid in cluster_rids ] return self.cluster_index(pins=pins) num_points = len(cluster['points']) if num_points == 1: point = cluster['points'][0] pin = getObjectFromCatalog(catalog, r_list[point.id]) icon_name = 'mk_single_rating_%s_%d' % (pin.type, pin.rating) return { 'id': pin.id, 'icon_name': icon_name, 'lat': pin.latitude, 'lon': pin.longitude, 'display_tooltip': False, 'tooltip': '', 'label': '' } else: rating = int(cluster['averageRating']) display_tooltip = (lat_max - lat_min) < 1. if display_tooltip: tooltip = cluster_tooltip(cluster, r_list) else: tooltip = '' icon_name = 'mk_rating_%s_%d_%d' % (cluster['type'], rating, num_points) return { 'id': '', 'icon_name': icon_name, 'lat': cluster['center'].lat, 'lon': cluster['center'].lon, 'display_tooltip': display_tooltip, 'tooltip': tooltip, 'label': 'cluster', 'num_points': num_points } tc_start = time() lat_min, lat_max = float(lat_min), float(lat_max) lon_min, lon_max = float(lon_min), float(lon_max) grid_size = 7 tlat_min, tlat_max, tlon_min, tlon_max = get_discretized_limits( lat_min, lat_max, lon_min, lon_max, grid_size) catalog = self.context.catalog tc_start_apply_idxs = time() type_dict = get_index_dict('type', catalog) lat_dict = get_index_dict('latitude', catalog, tlat_min, tlat_max) lon_dict = get_index_dict('longitude', catalog, tlon_min, tlon_max) rating_dict = get_index_dict('rating', catalog) tc_end_apply_idxs = time() print 'apply indexes', tc_end_apply_idxs - tc_start_apply_idxs map_filters = { 'latitude': { 'query': (tlat_min, tlat_max), 'range': 'min:max' }, 'longitude': { 'query': (tlon_min, tlon_max), 'range': 'min:max' } } rids = filter_rids(catalog, map_filters) if rids is None: r_list = [] else: r_list = list(set(rids)) points = make_points(r_list, lat_dict, lon_dict, type_dict, rating_dict) points_by_type = make_points_by_type(points) clusters = make_clusters(points_by_type, tlat_min, tlat_max, tlon_min, tlon_max, grid_size) points = [point_data(cluster, r_list) for cluster in clusters] tc_end = time() print 'clusters', tc_end - tc_start return json.dumps({'points': points})
def getClusters(catalog_tool, filters): # the objects are searched for in the tile limits (to get the same clusters every time) grid_size = 12 # geopoints' and clusters' density on map / also depends on map frame size # unpack map limits if filters: lat_min = float(filters[0]['geo_latitude']['query'][0]) lat_max = float(filters[0]['geo_latitude']['query'][1]) lon_min = float(filters[0]['geo_longitude']['query'][0]) lon_max = float(filters[0]['geo_longitude']['query'][1]) else: # this should not happen return [], [] tlat_min, tlat_max, tlon_min, tlon_max = clusters.get_discretized_limits(lat_min, lat_max, lon_min, lon_max, grid_size) catalog = catalog_tool._catalog # getting the inner indexes for lat and lon lat_index = catalog.getIndex('geo_latitude')._index lon_index = catalog.getIndex('geo_longitude')._index # adjust to cover results outside frame, but very close to margins # trying to fix cluster flickering near margins # applying the lat and lon indexes to get the rids rs = None lat_set, lat_dict = _apply_index_with_range_dict_results(lat_index, Decimal(str(tlat_min)), Decimal(str(tlat_max))) w, rs = weightedIntersection(rs, lat_set) lon_set, lon_dict = _apply_index_with_range_dict_results(lon_index, Decimal(str(tlon_min)), Decimal(str(tlon_max))) w, rs = weightedIntersection(rs, lon_set) rs_final = None # OR the filters and apply the index for each one for f in filters: rs_f = rs #adjust geo limits in filters to be consistent with discretized tile limits f['geo_longitude']['query'] = (Decimal(str(tlon_min)), Decimal(str(tlon_max))) f['geo_latitude']['query'] = (Decimal(str(tlat_min)), Decimal(str(tlat_max))) #this code is from the search function in the catalog implementation in Zope for i in catalog.indexes.keys(): index = catalog.getIndex(i) _apply_index = getattr(index, "_apply_index", None) if _apply_index is None: continue r = _apply_index(f) if r is not None: r, u = r w, rs_f = weightedIntersection(rs_f, r) w, rs_final = weightedUnion(rs_f, rs_final) r_list = list(rs_final) # transform objects to points points = [] for i in range(len(r_list)): points.append(clusters.Point(i, float(lat_dict[r_list[i]]), float(lon_dict[r_list[i]]))) centers, groups = clusters.kmeans(tlat_min, tlat_max, tlon_min, tlon_max, points, grid_size) # transform group points to rids for i in range(len(groups)): groups[i] = map(lambda p: r_list[p.id], groups[i]) return centers, groups
def getClusters(catalog_tool, filters): # the objects are searched for in the tile limits (to get the same clusters every time) grid_size = 16 # geopoints' and clusters' density on map / also depends on map frame size # unpack map limits if filters: lat_min = float(filters[0]['geo_latitude']['query'][0]) lat_max = float(filters[0]['geo_latitude']['query'][1]) lon_min = float(filters[0]['geo_longitude']['query'][0]) lon_max = float(filters[0]['geo_longitude']['query'][1]) else: # this should not happen return [], [] tlat_min, tlat_max, tlon_min, tlon_max = clusters.get_discretized_limits( lat_min, lat_max, lon_min, lon_max, grid_size) catalog = catalog_tool._catalog # getting the inner indexes for lat and lon lat_index = catalog.getIndex('geo_latitude')._index lon_index = catalog.getIndex('geo_longitude')._index # adjust to cover results outside frame, but very close to margins # trying to fix cluster flickering near margins # applying the lat and lon indexes to get the rids rs = None lat_set, lat_dict = _apply_index_with_range_dict_results( lat_index, Decimal(str(tlat_min)), Decimal(str(tlat_max))) w, rs = weightedIntersection(rs, lat_set) lon_set, lon_dict = _apply_index_with_range_dict_results( lon_index, Decimal(str(tlon_min)), Decimal(str(tlon_max))) w, rs = weightedIntersection(rs, lon_set) rs_final = None # OR the filters and apply the index for each one for f in filters: rs_f = rs #adjust geo limits in filters to be consistent with discretized tile limits f['geo_longitude']['query'] = (Decimal(str(tlon_min)), Decimal(str(tlon_max))) f['geo_latitude']['query'] = (Decimal(str(tlat_min)), Decimal(str(tlat_max))) #this code is from the search function in the catalog implementation in Zope for i in catalog.indexes.keys(): index = catalog.getIndex(i) _apply_index = getattr(index, "_apply_index", None) if _apply_index is None: continue r = _apply_index(f) if r is not None: r, u = r w, rs_f = weightedIntersection(rs_f, r) w, rs_final = weightedUnion(rs_f, rs_final) r_list = list(rs_final) # transform objects to points points = [] for i in range(len(r_list)): points.append( clusters.Point(i, float(lat_dict[r_list[i]]), float(lon_dict[r_list[i]]))) centers, groups = clusters.kmeans(tlat_min, tlat_max, tlon_min, tlon_max, points, grid_size) # transform group points to rids for i in range(len(groups)): groups[i] = map(lambda p: r_list[p.id], groups[i]) return centers, groups
def xrjs_getClusters(self, lat_min, lat_max, lon_min, lon_max): """ """ def make_points(r_list, lan_dict, lon_dict, type_dict, rating_dict): points = [] for i, rid in enumerate(r_list): point = Point(i, float(lat_dict[rid]), float(lon_dict[rid])) point.type = type_dict[rid] point.rating = rating_dict[rid] points.append(point) return points def make_points_by_type(points): points_by_type = {} for point in points: if not point.type in points_by_type: points_by_type[point.type] = [] points_by_type[point.type].append(point) return points_by_type def make_clusters(points_by_type, lat_min, lat_max, lon_min, lon_max, grid_size): def average_rating(points): assert len(points) > 0 ratings = [point.rating for point in points] return sum(ratings) / float(len(ratings)) all_clusters = [] for type, points in points_by_type.iteritems(): centers, groups = kmeans(lat_min, lat_max, lon_min, lon_max, points, grid_size) clusters = [] for i, points in enumerate(groups): if len(points) > 0: clusters.append({ 'center': centers[i], 'points': points, 'type': type, 'averageRating': average_rating(points), }) all_clusters.extend(clusters) return all_clusters def point_data(cluster, r_list): def cluster_tooltip(cluster, r_list): cluster_rids = [r_list[point.id] for point in cluster['points']] pins = [getObjectFromCatalog(catalog, rid) for rid in cluster_rids] return self.cluster_index(pins=pins) num_points = len(cluster['points']) if num_points == 1: point = cluster['points'][0] pin = getObjectFromCatalog(catalog, r_list[point.id]) icon_name = 'mk_single_rating_%s_%d' % (pin.type, pin.rating) return {'id': pin.id, 'icon_name': icon_name, 'lat': pin.latitude, 'lon': pin.longitude, 'display_tooltip': False, 'tooltip': '', 'label': ''} else: rating = int(cluster['averageRating']) display_tooltip = (lat_max - lat_min) < 1. if display_tooltip: tooltip = cluster_tooltip(cluster, r_list) else: tooltip = '' icon_name = 'mk_rating_%s_%d_%d' % (cluster['type'], rating, num_points) return {'id': '', 'icon_name': icon_name, 'lat': cluster['center'].lat, 'lon': cluster['center'].lon, 'display_tooltip': display_tooltip, 'tooltip': tooltip, 'label': 'cluster', 'num_points': num_points} tc_start = time() lat_min, lat_max = float(lat_min), float(lat_max) lon_min, lon_max = float(lon_min), float(lon_max) grid_size = 7 tlat_min, tlat_max, tlon_min, tlon_max = get_discretized_limits( lat_min, lat_max, lon_min, lon_max, grid_size) catalog = self.context.catalog tc_start_apply_idxs = time() type_dict = get_index_dict('type', catalog) lat_dict = get_index_dict('latitude', catalog, tlat_min, tlat_max) lon_dict = get_index_dict('longitude', catalog, tlon_min, tlon_max) rating_dict = get_index_dict('rating', catalog) tc_end_apply_idxs = time() print 'apply indexes', tc_end_apply_idxs - tc_start_apply_idxs map_filters = {'latitude': {'query': (tlat_min, tlat_max), 'range': 'min:max'}, 'longitude': {'query': (tlon_min, tlon_max), 'range': 'min:max'}} rids = filter_rids(catalog, map_filters) if rids is None: r_list = [] else: r_list = list(set(rids)) points = make_points(r_list, lat_dict, lon_dict, type_dict, rating_dict) points_by_type = make_points_by_type(points) clusters = make_clusters(points_by_type, tlat_min, tlat_max, tlon_min, tlon_max, grid_size) points = [point_data(cluster, r_list) for cluster in clusters] tc_end = time() print 'clusters', tc_end - tc_start return json.dumps({'points': points})