def __get_cells(self, table, key): """Get the Hypertable cells having the key """ scan_spec_builder = ht.ScanSpecBuilder() scan_spec_builder.add_row_interval(key, True, key, True) scan_spec_builder.set_max_versions(1) scan_spec_builder.set_row_limit(0) cells = [cell for cell in table.create_scanner(scan_spec_builder)] return cells
def _Dynamic_Get(self, get_request, get_response): keys = get_request.key_list() kind_keys_dict = {} for key in keys: kind = self._AppIdNamespaceKindForKey(key)[1] if kind_keys_dict.has_key(kind): kind_keys_dict[kind].append(key) else: kind_keys_dict[kind] = [key] entities = [] for kind in kind_keys_dict: table_name = str('%s_%s' % (self.__app_id, kind)) table = self.__client.open_table(table_name) for key in kind_keys_dict[kind]: key_pb = key key = datastore_types.Key._FromPb(key) scan_spec_builder = ht.ScanSpecBuilder() scan_spec_builder.add_row_interval(str(key), True, str(key), True) scan_spec_builder.set_max_versions(1) scan_spec_builder.set_row_limit(0) total_cells = [ cell for cell in table.create_scanner(scan_spec_builder) ] entity_proto = entity_pb.EntityProto() entity_proto.mutable_key().CopyFrom(key_pb) for cell in total_cells: if cell.column_family == 'props': prop_pb = entity_proto.add_property() prop_pb.set_name(cell.column_qualifier.encode('utf-8')) prop_pb.set_multiple(False) log.debug('cell.value: %s' % repr(cell.value)) value_decoder = sortable_pb_encoder.Decoder( array.array('B', str(cell.value))) value_pb = prop_pb.mutable_value() value_pb.Merge(value_decoder) group = get_response.add_entity() group.mutable_entity().CopyFrom(entity_proto)
def _Dynamic_Get(self, get_request, get_response): #FIXME: since we are getting protobuf-encoded keys here keys = [ datastore_types.Key._FromPb(key) for key in get_request.key_list() ] kind_keys_dict = {} for key in keys: if kind_keys_dict.has_key(key.kind()): kind_keys_dict[key.kind()].append(key) else: kind_keys_dict[key.kind()] = [key] entities = [] for kind in kind_keys_dict: table_name = str('%s_%s' % (self._app_id, kind)) table = self._client.open_table(table_name) for key in kind_keys_dict[kind]: scan_spec_builder = ht.ScanSpecBuilder() scan_spec_builder.add_row_interval(str(key), True, str(key), True) scan_spec_builder.set_max_versions(1) scan_spec_builder.set_row_limit(0) total_cells = [ cell for cell in table.create_scanner(scan_spec_builder) ] # make cells with same keys as a single entity entity = datastore.Entity(kind, _app=self._app_id, name=key.name(), id=key.id()) for cell in total_cells: if cell.column_family == 'props': entity[cell.column_qualifier] = pickle.loads( cell.value) group = get_response.add_entity() #FIXME: as datastore.Get is expecting a protobuf-encoded entities group.mutable_entity().CopyFrom(entity.ToPb())
def _Dynamic_RunQuery(self, query, query_result): kind = query.kind() keys_only = query.keys_only() filters = query.filter_list() orders = query.order_list() offset = query.offset() limit = query.limit() namespace = query.name_space() try: ns = self.__client.open_namespace('%s/%s' % (self.__app_id, namespace)) table = ns.open_table(kind) except RuntimeError: log.warning('No data for %s' % kind) return scan_spec_builder = ht.ScanSpecBuilder() scan_spec_builder.set_max_versions(1) if filters or orders: scan_spec_builder.set_row_limit(0) else: scan_spec_builder.set_row_limit(offset + limit) # get the hypertable cells total_cells = [ cell for cell in table.create_scanner(scan_spec_builder) ] # make a cell-key dictionary key_cell_dict = {} for cell in total_cells: if key_cell_dict.has_key(cell.row_key): key_cell_dict[cell.row_key].append(cell) else: key_cell_dict[cell.row_key] = [cell] pb_entities = [] for key in key_cell_dict: key_obj = datastore_types.Key(encoded=key) key_pb = key_obj._ToPb() for cell in key_cell_dict[key]: if cell.column_family == 'entity' and cell.column_qualifier == 'proto': entity_proto = entity_pb.EntityProto(str(cell.value)) entity_proto.mutable_key().CopyFrom(key_pb) pb_entities.append(entity_proto) results = map(lambda entity: datastore.Entity.FromPb(entity), pb_entities) query.set_app(self.__app_id) datastore_types.SetNamespace(query, namespace) encoded = datastore_types.EncodeAppIdNamespace(self.__app_id, namespace) operators = { datastore_pb.Query_Filter.LESS_THAN: '<', datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=', datastore_pb.Query_Filter.GREATER_THAN: '>', datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=', datastore_pb.Query_Filter.EQUAL: '==', } def has_prop_indexed(entity, prop): """Returns True if prop is in the entity and is indexed.""" if prop in datastore_types._SPECIAL_PROPERTIES: return True elif prop in entity.unindexed_properties(): return False values = entity.get(prop, []) if not isinstance(values, (tuple, list)): values = [values] for value in values: if type(value) not in datastore_types._RAW_PROPERTY_TYPES: return True return False for filt in filters: assert filt.op() != datastore_pb.Query_Filter.IN prop = filt.property(0).name().decode('utf-8') op = operators[filt.op()] filter_val_list = [ datastore_types.FromPropertyPb(filter_prop) for filter_prop in filt.property_list() ] def passes_filter(entity): """Returns True if the entity passes the filter, False otherwise. The filter being evaluated is filt, the current filter that we're on in the list of filters in the query. """ if not has_prop_indexed(entity, prop): return False try: entity_vals = datastore._GetPropertyValue(entity, prop) except KeyError: entity_vals = [] if not isinstance(entity_vals, list): entity_vals = [entity_vals] for fixed_entity_val in entity_vals: for filter_val in filter_val_list: fixed_entity_type = self._PROPERTY_TYPE_TAGS.get( fixed_entity_val.__class__) filter_type = self._PROPERTY_TYPE_TAGS.get( filter_val.__class__) if fixed_entity_type == filter_type: comp = u'%r %s %r' % (fixed_entity_val, op, filter_val) elif op != '==': comp = '%r %s %r' % (fixed_entity_type, op, filter_type) else: continue logging.log(logging.DEBUG - 1, 'Evaling filter expression "%s"', comp) try: ret = eval(comp) if ret and ret != NotImplementedError: return True except TypeError: pass return False results = filter(passes_filter, results) for order in orders: prop = order.property().decode('utf-8') results = [ entity for entity in results if has_prop_indexed(entity, prop) ] def order_compare_entities(a, b): """ Return a negative, zero or positive number depending on whether entity a is considered smaller than, equal to, or larger than b, according to the query's orderings. """ cmped = 0 for o in orders: prop = o.property().decode('utf-8') reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING) a_val = datastore._GetPropertyValue(a, prop) if isinstance(a_val, list): a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0] b_val = datastore._GetPropertyValue(b, prop) if isinstance(b_val, list): b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0] cmped = order_compare_properties(a_val, b_val) if o.direction() is datastore_pb.Query_Order.DESCENDING: cmped = -cmped if cmped != 0: return cmped if cmped == 0: return cmp(a.key(), b.key()) def order_compare_properties(x, y): """Return a negative, zero or positive number depending on whether property value x is considered smaller than, equal to, or larger than property value y. If x and y are different types, they're compared based on the type ordering used in the real datastore, which is based on the tag numbers in the PropertyValue PB. """ if isinstance(x, datetime.datetime): x = datastore_types.DatetimeToTimestamp(x) if isinstance(y, datetime.datetime): y = datastore_types.DatetimeToTimestamp(y) x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__) y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__) if x_type == y_type: try: return cmp(x, y) except TypeError: return 0 else: return cmp(x_type, y_type) results.sort(order_compare_entities) cursor = _Cursor(query, results, order_compare_entities) self.__queries[cursor.cursor] = cursor if query.has_count(): count = query.count() elif query.has_limit(): count = query.limit() else: count = _BATCH_SIZE cursor.PopulateQueryResult(query_result, count, query.offset(), compile=query.compile()) if query.compile(): compiled_query = query_result.mutable_compiled_query() compiled_query.set_keys_only(query.keys_only()) compiled_query.mutable_primaryscan().set_index_name(query.Encode())