def reduce(self, *args): """ Start assembling a Map/Reduce operation. A shortcut for :meth:`~riak.mapreduce.RiakMapReduce.reduce`. :rtype: :class:`~riak.mapreduce.RiakMapReduce` """ mr = RiakMapReduce(self.client) mr.add(self.bucket.name, self.key) return mr.reduce(*args)
def reduce(self, *args): """ Start assembling a Map/Reduce operation. A shortcut for :func:`RiakMapReduce.reduce`. :rtype: RiakMapReduce """ mr = RiakMapReduce(self.client) mr.add(self.bucket.name, self.key) return mr.reduce(*args)
def test_stream_results(self): bucket = self.client.bucket("bucket") bucket.new("one", data=1).store() bucket.new("two", data=2).store() mr = RiakMapReduce(self.client).add("bucket", "one").add("bucket", "two") mr.map_values_json() results = [] for phase, data in mr.stream(): results.extend(data) self.assertEqual(sorted(results), [1, 2])
def test_stream_results(self): bucket = self.client.bucket(self.bucket_name) bucket.new('one', data=1).store() bucket.new('two', data=2).store() mr = RiakMapReduce(self.client).add(self.bucket_name, 'one')\ .add(self.bucket_name, 'two') mr.map_values_json() results = [] for phase, data in mr.stream(): results.extend(data) self.assertEqual(sorted(results), [1, 2])
def test_stream_cleanoperationsup(self): bucket = self.client.bucket("bucket") bucket.new("one", data=1).store() bucket.new("two", data=2).store() mr = RiakMapReduce(self.client).add("bucket", "one").add("bucket", "two") mr.map_values_json() try: for phase, data in mr.stream(): raise RuntimeError("woops") except RuntimeError: pass # This should not raise an exception obj = bucket.get("one") self.assertEqual(1, obj.data)
def reduce(self, *args): """ Start assembling a Map/Reduce operation. A shortcut for :func:`RiakMapReduce.reduce`. :rtype: :class:`RiakMapReduce` """ mr = RiakMapReduce(self) return apply(mr.reduce, args)
def test_stream_cleanoperationsup(self): bucket = self.client.bucket(self.bucket_name) bucket.new('one', data=1).store() bucket.new('two', data=2).store() mr = RiakMapReduce(self.client).add(self.bucket_name, 'one')\ .add(self.bucket_name, 'two') mr.map_values_json() try: for phase, data in mr.stream(): raise RuntimeError("woops") except RuntimeError: pass # This should not raise an exception obj = bucket.get('one') self.assertEqual(1, obj.data)
def test_stream_cleanoperationsup(self): bucket = self.client.bucket(self.bucket_name) bucket.new('one', data=1).store() bucket.new('two', data=2).store() mr = RiakMapReduce(self.client).add(self.bucket_name, 'one')\ .add(self.bucket_name, 'two') mr.map_values_json() try: for phase, data in mr.stream(): raise RuntimeError("woops") except RuntimeError: pass # This should not raise an exception obj = bucket.get('one') self.assertEqual('1', obj.encoded_data)
def index(self, *args): """ Start assembling a Map/Reduce operation based on secondary index query results. :rtype: :class:`RiakMapReduce` """ mr = RiakMapReduce(self) return apply(mr.index, args)
def add(self, arg1, arg2=None, arg3=None, bucket_type=None): """ Start assembling a Map/Reduce operation. A shortcut for :meth:`~riak.mapreduce.RiakMapReduce.add`. :param arg1: the object or bucket to add :type arg1: RiakObject, string :param arg2: a key or list of keys to add (if a bucket is given in arg1) :type arg2: string, list, None :param arg3: key data for this input (must be convertible to JSON) :type arg3: string, list, dict, None :param bucket_type: Optional name of a bucket type :type bucket_type: string, None :rtype: :class:`~riak.mapreduce.RiakMapReduce` """ mr = RiakMapReduce(self.client) mr.add(self.bucket.name, self.key, bucket_type=bucket_type) return mr.add(arg1, arg2, arg3, bucket_type)
def search(self, *args): """ Start assembling a Map/Reduce operation based on search results. This command will return an error unless executed against a Riak Search cluster. A shortcut for :func:`RiakMapReduce.search`. :rtype: :class:`RiakMapReduce` """ mr = RiakMapReduce(self) return apply(mr.search, args)
def test_mr_search(self): """ Try a successful map/reduce from search results. """ btype = self.client.bucket_type(testrun_yz_mr['btype']) bucket = btype.bucket(testrun_yz_mr['bucket']) bucket.new("Pebbles", {"name_s": "Fruity Pebbles", "maker_s": "Post", "sugar_i": 9, "calories_i": 110, "fruit_b": True}).store() bucket.new("Loops", {"name_s": "Froot Loops", "maker_s": "Kellogg's", "sugar_i": 12, "calories_i": 110, "fruit_b": True}).store() bucket.new("Charms", {"name_s": "Lucky Charms", "maker_s": "General Mills", "sugar_i": 10, "calories_i": 110, "fruit_b": False}).store() bucket.new("Count", {"name_s": "Count Chocula", "maker_s": "General Mills", "sugar_i": 9, "calories_i": 100, "fruit_b": False}).store() bucket.new("Crunch", {"name_s": "Cap'n Crunch", "maker_s": "Quaker Oats", "sugar_i": 12, "calories_i": 110, "fruit_b": False}).store() # Wait for Solr to catch up wait_for_yz_index(bucket, "Crunch") mr = RiakMapReduce(self.client).search(testrun_yz_mr['bucket'], 'fruit_b:false') mr.map("""function(v) { var solr_doc = JSON.parse(v.values[0].data); return [solr_doc["calories_i"]]; }""") result = mr.reduce('function(values, arg) ' + '{ return [values.sort()[0]]; }').run() self.assertEqual(result, [100])
def _mapreduce_keyfilter(self, index, keyfilter, sortby=None, limit=None): """ Helper function to execute a map-reduce query using the given key filter """ results = None log.debug("Riak query %r with key filter %r", index, keyfilter) mapred = RiakMapReduce(self.client) mapred.add_bucket(self._get_bucket(index)) mapred.add_key_filters(keyfilter) # custom Riak.mapValuesJson() function that also adds the entry key to the data structure mapred.map(""" function(value, keyData, arg) { if (value.not_found) { return [value]; } var _data, data = value["values"][0]["data"]; if (Riak.getClassName(data) !== "Array") { _data = JSON.parse(data); _data["_key"] = value.key; return [_data]; } else { return data } } """) if sortby is not None: comp = '<' if limit is not None and limit < 0 else '>' mapred.reduce_sort('function(a,b){ return (a.%s || 0) %s (b.%s || 0) ? 1 : 0; }' % (sortby, comp, sortby)) if limit is not None: mapred.reduce_limit(abs(limit)) try: results = mapred.run() except Exception, e: log.warning("Riak MapReduce exception: %s", str(e)) results = None