Exemple #1
0
    def _Dynamic_Get(self, get_request, get_response):
        client = self._GetRiakClient()
        for key in get_request.key_list():
            appid_namespace, kind = self._AppIdNamespaceKindForKey(key)
            namespace = appid_namespace.rsplit('!', 1)[1] if '!' in appid_namespace else ''

            total_cells = []
            key_pb = key
            key = datastore_types.Key._FromPb(key)
            
            entity_bucket_name = '%s_%s_%s' % (self.__app_id, namespace, kind)
            entity_bucket = client.bucket(entity_bucket_name)
            # binary values are searched in this bucket
            binary_bucket_name = entity_bucket_name + ':BINARY'
            binary_bucket = client.bucket(binary_bucket_name)
            riak_entity = entity_bucket.get(key.id_or_name()).get_data()

            group = get_response.add_entity()

            entity = datastore.Entity(kind=kind, parent=key.parent(), name=key.name(), id=key.id())
            
            for property_name, property_value in riak_entity.iteritems():
                property = getattr(db.class_for_kind(kind), property_name)
                property_value = self.__create_value_for_riak_value(property, riak_entity[property_name], binary_bucket)
                entity[property_name] = property_value
            
            pb = entity._ToPb()
            #if not key.name():
            #   pb.key().path().element_list()[-1].set_id(key.id())
            
            group.mutable_entity().CopyFrom(pb)
Exemple #2
0
    def _Dynamic_RunQuery(self, query, query_result):
        client = self._GetRiakClient()
        kind = query.kind()
        keys_only = query.keys_only()
        filters = query.filter_list()
        orders = query.order_list()
        offset = query.offset()
        limit = query.limit()
        namespace = query.name_space()
        logging.debug('offset: %d limit: %d' %(offset, limit))
        entity_class = db.class_for_kind(kind)
        
        if filters or orders:
            row_limit = 0
        else:
            row_limit = offset + limit
        
        entity_bucket_name = '%s_%s_%s' % (self.__app_id, namespace, kind)
        entity_bucket = client.bucket(entity_bucket_name)
        binary_bucket_name = entity_bucket_name + ':BINARY'
        binary_bucket = client.bucket(binary_bucket_name)

        riak_query = client.add(entity_bucket_name)
        
        operators = {datastore_pb.Query_Filter.LESS_THAN:             '<',
                     datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL:    '<=',
                     datastore_pb.Query_Filter.GREATER_THAN:          '>',
                     datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
                     datastore_pb.Query_Filter.EQUAL:                 '==',
                     }
        
        condition_list = []
        for filt in filters:
            assert filt.op() != datastore_pb.Query_Filter.IN
            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]
            filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
                                    for filter_prop in filt.property_list()]
            filter_val = self.__get_filter_value_for_query(filter_val_list[0])
            
            loop_func = ''
            if isinstance(getattr(entity_class, prop), db.ListProperty):
                # filters for ListProperty has a different meaning in GAE
                if isinstance(filter_val, types.ListType):
                    loop_func = _JS_LIST_FILTER_MULTIARG_FUNC % (op, op)
                    condition = 'applyFilter(data.%s, %r, %r)' % (prop, filter_val[0], filter_val[1])
                else:
                    loop_func = _JS_LIST_FILTER_FUNC % op
                    condition = 'applyFilter(data.%s, %r)' % (prop, filter_val)
            else:
                # generate other filter conditions
                if isinstance(filter_val, types.ListType):
                    condition = 'data.%s[0] %s %r && data.%s[1] %s %r' % \
                            (prop, op, filter_val[0], prop, op, filter_val[1])
                else:
                    condition = 'data.%s %s %r' % (prop, op, filter_val)
            condition_list.append(condition)

        if not condition_list:
            filter_condition = 'true'
        else:
            filter_condition = ' && '.join(condition_list).strip()
        filter_condition = 'if (%s)' % filter_condition
        
        # add a map phase to filter out entities
        map_func = _JS_MAP_FUNCTION % (loop_func, filter_condition)
        logging.debug('map function: %s' % map_func)
        riak_query.map(map_func)
        
        for order in orders:
            prop = order.property().decode('utf-8')
            if order.direction() is datastore_pb.Query_Order.DESCENDING:
                reduce_func = 'function(a, b) { return b.%s - a.%s }' % (prop, prop)
            else:
                reduce_func = 'function(a, b) { return a.%s - b.%s }' % (prop, prop)
            logging.debug('reduce function: %s' % reduce_func)
            # add a reduce phase to sort the entities based on property direction
            riak_query.reduce('Riak.reduceSort', {'arg': reduce_func})

        if limit:
            # reduce phase for applying limit
            start = offset
            end = offset + limit
            logging.debug('reduce function: Riak.reduceSlice(start: %d, end:%d)' %(start, end))
            riak_query.reduce('Riak.reduceSlice', {'arg': [start, end]})

        results = []
        for result in riak_query.run():
            metadata, riak_entity = result
            key = metadata['X-Riak-Meta']['X-Riak-Meta-Key']
            key = datastore_types.Key(encoded=key)
            entity = datastore.Entity(kind=kind, parent=key.parent(), name=key.name(), id=key.id())
            for property_name, property_value in riak_entity.iteritems():
                property = getattr(db.class_for_kind(kind), property_name)
                property_value = self.__create_value_for_riak_value(property, riak_entity[property_name], binary_bucket)
                entity[property_name] = property_value
            results.append(entity)

        query.set_app(self.__app_id)
        datastore_types.SetNamespace(query, namespace)
        encoded = datastore_types.EncodeAppIdNamespace(self.__app_id, namespace)
    
        def order_compare_entities(a, b):
            """ Return a negative, zero or positive number depending on whether
            entity a is considered smaller than, equal to, or larger than b,
            according to the query's orderings. """
            cmped = 0
            for o in orders:
                prop = o.property().decode('utf-8')
        
                reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
        
                a_val = datastore._GetPropertyValue(a, prop)
                if isinstance(a_val, list):
                    a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
        
                b_val = datastore._GetPropertyValue(b, prop)
                if isinstance(b_val, list):
                    b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
        
                cmped = order_compare_properties(a_val, b_val)
        
                if o.direction() is datastore_pb.Query_Order.DESCENDING:
                    cmped = -cmped
    
                if cmped != 0:
                    return cmped
    
            if cmped == 0:
                return cmp(a.key(), b.key())
    
        def order_compare_properties(x, y):
            """Return a negative, zero or positive number depending on whether
            property value x is considered smaller than, equal to, or larger than
            property value y. If x and y are different types, they're compared based
            on the type ordering used in the real datastore, which is based on the
            tag numbers in the PropertyValue PB.
            """
            if isinstance(x, datetime.datetime):
                x = datastore_types.DatetimeToTimestamp(x)
            if isinstance(y, datetime.datetime):
                y = datastore_types.DatetimeToTimestamp(y)
    
            x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
            y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)
    
            if x_type == y_type:
                try:
                    return cmp(x, y)
                except TypeError:
                    return 0
            else:
                return cmp(x_type, y_type)

        cursor = _Cursor(query, results, order_compare_entities)
        self.__queries[cursor.cursor] = cursor
    
        if query.has_count():
            count = query.count()
        elif query.has_limit():
            count = query.limit()
        else:
            count = _BATCH_SIZE
    
        cursor.PopulateQueryResult(query_result, count,
                                     query.offset(), compile=query.compile())
    
        if query.compile():
            compiled_query = query_result.mutable_compiled_query()
            compiled_query.set_keys_only(query.keys_only())
            compiled_query.mutable_primaryscan().set_index_name(query.Encode())