async def get_index(self,
                        bucket,
                        index,
                        startkey,
                        endkey=None,
                        return_terms=None,
                        max_results=None,
                        continuation=None,
                        timeout=None,
                        term_regex=None):

        req = self._encode_index_req(bucket,
                                     index,
                                     startkey,
                                     endkey,
                                     return_terms,
                                     max_results,
                                     continuation,
                                     timeout,
                                     term_regex,
                                     streaming=False)
        msg_code, resp = await self._request(messages.MSG_CODE_INDEX_REQ, req,
                                             messages.MSG_CODE_INDEX_RESP)
        if return_terms and resp.results:
            results = [(decode_index_value(index,
                                           pair.key), bytes_to_str(pair.value))
                       for pair in resp.results]
        else:
            results = [bytes_to_str(key) for key in resp.keys]

        if max_results is not None and resp.HasField('continuation'):
            return results, bytes_to_str(resp.continuation)
        else:
            return results, None
    def get_index(self, bucket, index, startkey, endkey=None,
                  return_terms=None, max_results=None, continuation=None,
                  timeout=None, term_regex=None):
        if not self.pb_indexes():
            return self._get_index_mapred_emu(bucket, index, startkey, endkey)

        if term_regex and not self.index_term_regex():
            raise NotImplementedError("Secondary index term_regex is not "
                                      "supported")

        req = self._encode_index_req(bucket, index, startkey, endkey,
                                     return_terms, max_results, continuation,
                                     timeout, term_regex)

        msg_code, resp = self._request(MSG_CODE_INDEX_REQ, req,
                                       MSG_CODE_INDEX_RESP)

        if return_terms and resp.results:
            results = [(decode_index_value(index, pair.key),
                        bytes_to_str(pair.value))
                       for pair in resp.results]
        else:
            results = resp.keys[:]
            if PY3:
                results = [bytes_to_str(key) for key in resp.keys]

        if max_results is not None and resp.HasField('continuation'):
            return (results, bytes_to_str(resp.continuation))
        else:
            return (results, None)
    def test_encode_data_for_put(self):
        c = PbufCodec()
        tsobj = TsObject(None, self.table, self.rows, None)
        msg = c.encode_timeseries_put(tsobj)
        req = riak.pb.riak_ts_pb2.TsPutReq()
        req.ParseFromString(msg.data)

        # NB: expected, actual
        self.assertEqual(self.table.name, bytes_to_str(req.table))
        self.assertEqual(len(self.rows), len(req.rows))

        r0 = req.rows[0]
        self.assertEqual(bytes_to_str(r0.cells[0].varchar_value),
                         self.rows[0][0])
        self.assertEqual(r0.cells[1].sint64_value, self.rows[0][1])
        self.assertEqual(r0.cells[2].double_value, self.rows[0][2])
        self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms)
        self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4])
        self.assertFalse(r0.cells[5].HasField('varchar_value'))

        r1 = req.rows[1]
        self.assertEqual(bytes_to_str(r1.cells[0].varchar_value),
                         self.rows[1][0])
        self.assertEqual(r1.cells[1].sint64_value, self.rows[1][1])
        self.assertEqual(r1.cells[2].double_value, self.rows[1][2])
        self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms)
        self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4])
        self.assertEqual(r1.cells[5].varchar_value, self.rows[1][5])
Example #4
0
    def test_encode_data_for_put(self):
        c = PbufCodec()
        tsobj = TsObject(None, self.table, self.rows, None)
        msg = c.encode_timeseries_put(tsobj)
        req = riak.pb.riak_ts_pb2.TsPutReq()
        req.ParseFromString(msg.data)

        # NB: expected, actual
        self.assertEqual(self.table.name, bytes_to_str(req.table))
        self.assertEqual(len(self.rows), len(req.rows))

        r0 = req.rows[0]
        self.assertEqual(bytes_to_str(r0.cells[0].varchar_value),
                         self.rows[0][0])
        self.assertEqual(r0.cells[1].sint64_value, self.rows[0][1])
        self.assertEqual(r0.cells[2].double_value, self.rows[0][2])
        self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms)
        self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4])
        self.assertFalse(r0.cells[5].HasField('varchar_value'))

        r1 = req.rows[1]
        self.assertEqual(bytes_to_str(r1.cells[0].varchar_value),
                         self.rows[1][0])
        self.assertEqual(r1.cells[1].sint64_value, self.rows[1][1])
        self.assertEqual(r1.cells[2].double_value, self.rows[1][2])
        self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms)
        self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4])
        self.assertEqual(r1.cells[5].varchar_value, self.rows[1][5])
 def get_server_info(self):
     """
     Get information about the server
     """
     msg_code, resp = self._request(MSG_CODE_GET_SERVER_INFO_REQ,
                                    expect=MSG_CODE_GET_SERVER_INFO_RESP)
     return {'node': bytes_to_str(resp.node),
             'server_version': bytes_to_str(resp.server_version)}
Example #6
0
 def get_server_info(self):
     """
     Get information about the server
     """
     msg_code, resp = self._request(
         riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, expect=riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP
     )
     return {"node": bytes_to_str(resp.node), "server_version": bytes_to_str(resp.server_version)}
Example #7
0
 def validate_keylist(self, streamed_keys, keylist):
     self.assertNotEqual([], keylist)
     streamed_keys += keylist
     for key in keylist:
         self.assertIsInstance(key, list)
         self.assertEqual(len(key), 3)
         self.assertEqual(bytes_to_str(key[0]), 'hash1')
         self.assertEqual(bytes_to_str(key[1]), 'user2')
         self.assertIsInstance(key[2], datetime.datetime)
Example #8
0
 def validate_keylist(self, streamed_keys, keylist):
     self.assertNotEqual([], keylist)
     streamed_keys += keylist
     for key in keylist:
         self.assertIsInstance(key, list)
         self.assertEqual(len(key), 3)
         self.assertEqual(bytes_to_str(key[0]), 'hash1')
         self.assertEqual(bytes_to_str(key[1]), 'user2')
         self.assertIsInstance(key[2], datetime.datetime)
Example #9
0
    def _decode_modfun(self, modfun):
        """
        Decodes a protobuf modfun pair into a dict with 'mod' and
        'fun' keys. Used in bucket properties.

        :param modfun: the protobuf message to decode
        :type modfun: riak_pb.RpbModFun
        :rtype dict
        """
        return {"mod": bytes_to_str(modfun.module), "fun": bytes_to_str(modfun.function)}
 def get_server_info(self):
     """
     Get information about the server
     """
     msg_code, resp = self._request(MSG_CODE_GET_SERVER_INFO_REQ,
                                    expect=MSG_CODE_GET_SERVER_INFO_RESP)
     return {
         'node': bytes_to_str(resp.node),
         'server_version': bytes_to_str(resp.server_version)
     }
Example #11
0
 def _decode_search_doc(self, doc):
     resultdoc = MultiDict()
     for pair in doc.fields:
         if PY2:
             ukey = unicode(pair.key, 'utf-8')    # noqa
             uval = unicode(pair.value, 'utf-8')  # noqa
         else:
             ukey = bytes_to_str(pair.key)
             uval = bytes_to_str(pair.value)
         resultdoc.add(ukey, uval)
     return resultdoc.mixed()
Example #12
0
    def decode_modfun(self, modfun):
        """
        Decodes a protobuf modfun pair into a dict with 'mod' and
        'fun' keys. Used in bucket properties.

        :param modfun: the protobuf message to decode
        :type modfun: riak.pb.riak_pb2.RpbModFun
        :rtype dict
        """
        return {'mod': bytes_to_str(modfun.module),
                'fun': bytes_to_str(modfun.function)}
Example #13
0
 def _decode_search_doc(self, doc):
     resultdoc = MultiDict()
     for pair in doc.fields:
         if PY2:
             ukey = unicode(pair.key, 'utf-8')  # noqa
             uval = unicode(pair.value, 'utf-8')  # noqa
         else:
             ukey = bytes_to_str(pair.key)
             uval = bytes_to_str(pair.value)
         resultdoc.add(ukey, uval)
     return resultdoc.mixed()
    def get_search_schema(self, schema):
        if not self.pb_search_admin():
            raise NotImplementedError("Search 2.0 administration is not "
                                      "supported for this version")
        req = riak_pb.RpbYokozunaSchemaGetReq(name=str_to_bytes(schema))

        msg_code, resp = self._request(MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, req,
                                       MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP)
        result = {}
        result['name'] = bytes_to_str(resp.schema.name)
        result['content'] = bytes_to_str(resp.schema.content)
        return result
Example #15
0
 def validate_data(self, ts_obj):
     if ts_obj.columns is not None:
         self.assertEqual(len(ts_obj.columns.names), self.numCols)
         self.assertEqual(len(ts_obj.columns.types), self.numCols)
     self.assertEqual(len(ts_obj.rows), 1)
     row = ts_obj.rows[0]
     self.assertEqual(bytes_to_str(row[0]), 'hash1')
     self.assertEqual(bytes_to_str(row[1]), 'user2')
     self.assertEqual(row[2], self.fiveMinsAgo)
     self.assertEqual(row[2].microsecond, 987000)
     self.assertEqual(bytes_to_str(row[3]), 'wind')
     self.assertIsNone(row[4])
    def get_search_schema(self, schema):
        if not self.pb_search_admin():
            raise NotImplementedError("Search 2.0 administration is not "
                                      "supported for this version")
        req = riak_pb.RpbYokozunaSchemaGetReq(name=str_to_bytes(schema))

        msg_code, resp = self._request(MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, req,
                                       MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP)
        result = {}
        result['name'] = bytes_to_str(resp.schema.name)
        result['content'] = bytes_to_str(resp.schema.content)
        return result
 def validate_data(self, ts_obj):
     if ts_obj.columns is not None:
         self.assertEqual(len(ts_obj.columns.names), self.numCols)
         self.assertEqual(len(ts_obj.columns.types), self.numCols)
     self.assertEqual(len(ts_obj.rows), 1)
     row = ts_obj.rows[0]
     self.assertEqual(bytes_to_str(row[0]), 'hash1')
     self.assertEqual(bytes_to_str(row[1]), 'user2')
     self.assertEqual(row[2], self.fiveMinsAgo)
     self.assertEqual(row[2].microsecond, 987000)
     self.assertEqual(bytes_to_str(row[3]), 'wind')
     self.assertIsNone(row[4])
 def test_stream_keys(self):
     table = Table(self.client, table_name)
     streamed_keys = []
     for keylist in table.stream_keys():
         self.assertNotEqual([], keylist)
         streamed_keys += keylist
         for key in keylist:
             self.assertIsInstance(key, list)
             self.assertEqual(len(key), 3)
             self.assertEqual(bytes_to_str(key[0]), 'hash1')
             self.assertEqual(bytes_to_str(key[1]), 'user2')
             self.assertIsInstance(key[2], datetime.datetime)
     self.assertGreater(len(streamed_keys), 0)
 def test_stream_keys(self):
     table = Table(self.client, table_name)
     streamed_keys = []
     for keylist in table.stream_keys():
         self.assertNotEqual([], keylist)
         streamed_keys += keylist
         for key in keylist:
             self.assertIsInstance(key, list)
             self.assertEqual(len(key), 3)
             self.assertEqual(bytes_to_str(key[0]), 'hash1')
             self.assertEqual(bytes_to_str(key[1]), 'user2')
             self.assertIsInstance(key[2], datetime.datetime)
     self.assertGreater(len(streamed_keys), 0)
 def _parse_msg(self):
     ''' Parse protobuf message'''
     self._msg = self._data[
         self.HEADER_LENGTH:self.HEADER_LENGTH + self._msglen]
     self.msg_code, = struct.unpack("B", self._msg[:1])
     if self.msg_code is messages.MSG_CODE_ERROR_RESP:
         error = self._get_pb_msg(self.msg_code, self._msg[1:])
         logger.error('Riak error message recieved: %s',
                      bytes_to_str(error.errmsg))
         raise RiakError(bytes_to_str(error.errmsg))
     elif self.msg_code in messages.MESSAGE_CLASSES:
         logger.debug('Normal message with code %d received', self.msg_code)
         self.msg = self._get_pb_msg(self.msg_code, self._msg[1:])
     else:
         logger.error('Unknown message received [%d]', self.msg_code)
Example #21
0
    def _decode_search_index(self, index):
        """
        Fills an RpbYokozunaIndex message with the appropriate data.

        :param index: a yz index message
        :type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex
        :rtype dict
        """
        result = {}
        result['name'] = bytes_to_str(index.name)
        if index.HasField('schema'):
            result['schema'] = bytes_to_str(index.schema)
        if index.HasField('n_val'):
            result['n_val'] = index.n_val
        return result
Example #22
0
    def decode_index_req(self, resp, index,
                         return_terms=None, max_results=None):
        if return_terms and resp.results:
            results = [(decode_index_value(index, pair.key),
                        bytes_to_str(pair.value))
                       for pair in resp.results]
        else:
            results = resp.keys[:]
            if six.PY3:
                results = [bytes_to_str(key) for key in resp.keys]

        if max_results is not None and resp.HasField('continuation'):
            return (results, bytes_to_str(resp.continuation))
        else:
            return (results, None)
Example #23
0
    def _decode_search_index(self, index):
        """
        Fills an RpbYokozunaIndex message with the appropriate data.

        :param index: a yz index message
        :type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex
        :rtype dict
        """
        result = {}
        result['name'] = bytes_to_str(index.name)
        if index.HasField('schema'):
            result['schema'] = bytes_to_str(index.schema)
        if index.HasField('n_val'):
            result['n_val'] = index.n_val
        return result
Example #24
0
    def decode_index_req(self, resp, index,
                         return_terms=None, max_results=None):
        if return_terms and resp.results:
            results = [(decode_index_value(index, pair.key),
                        bytes_to_str(pair.value))
                       for pair in resp.results]
        else:
            results = resp.keys[:]
            if six.PY3:
                results = [bytes_to_str(key) for key in resp.keys]

        if max_results is not None and resp.HasField('continuation'):
            return (results, bytes_to_str(resp.continuation))
        else:
            return (results, None)
Example #25
0
    def _decode_search_index(self, index):
        """
        Fills an RpbYokozunaIndex message with the appropriate data.

        :param index: a yz index message
        :type index: riak_pb.RpbYokozunaIndex
        :rtype dict
        """
        result = {}
        result["name"] = bytes_to_str(index.name)
        if index.HasField("schema"):
            result["schema"] = bytes_to_str(index.schema)
        if index.HasField("n_val"):
            result["n_val"] = index.n_val
        return result
 def _parse_msg(self):
     ''' Parse protobuf message'''
     self._msg = self._data[self.HEADER_LENGTH:self.HEADER_LENGTH +
                            self._msglen]
     self.msg_code, = struct.unpack("B", self._msg[:1])
     if self.msg_code is messages.MSG_CODE_ERROR_RESP:
         error = self._get_pb_msg(self.msg_code, self._msg[1:])
         logger.error('Riak error message recieved: %s',
                      bytes_to_str(error.errmsg))
         raise RiakError(bytes_to_str(error.errmsg))
     elif self.msg_code in messages.MESSAGE_CLASSES:
         logger.debug('Normal message with code %d received', self.msg_code)
         self.msg = self._get_pb_msg(self.msg_code, self._msg[1:])
     else:
         logger.error('Unknown message received [%d]', self.msg_code)
Example #27
0
 def test_encode_data_for_listkeys(self):
     c = PbufCodec(client_timeouts=True)
     msg = c.encode_timeseries_listkeysreq(self.table, 1234)
     req = riak.pb.riak_ts_pb2.TsListKeysReq()
     req.ParseFromString(msg.data)
     self.assertEqual(self.table.name, bytes_to_str(req.table))
     self.assertEqual(1234, req.timeout)
Example #28
0
    def riak_http_search_query(self,
                               solr_core,
                               solr_params,
                               count_deleted=False):
        """
        This method is for advanced SOLR queries. Riak HTTP search query endpoint,
        sends solr_params and query string as a proxy and returns solr reponse.
        
        Args:
            solr_core (str): solr core on which query will be executed
            
            solr_params (str): solr specific query params, such as rows, start, fl, df, wt etc..
            
            count_deleted (bool): ignore deleted records or not 
        
        Returns:
            (dict): dict of solr response
        
        """

        # append current _solr_query params
        sq = ["%s%%3A%s" % (q[0], q[1]) for q in self._solr_query]
        if not count_deleted:
            sq.append("-deleted%3ATrue")

        search_host = "http://%s:%s/search/query/%s?wt=json&q=%s&%s" % (
            settings.RIAK_SERVER, settings.RIAK_HTTP_PORT, solr_core,
            "+AND+".join(sq), solr_params)

        return json.loads(bytes_to_str(urlopen(search_host).read()))
Example #29
0
    def _decode_bucket_props(self, msg):
        """
        Decodes the protobuf bucket properties message into a dict.

        :param msg: the protobuf message to decode
        :type msg: riak.pb.riak_pb2.RpbBucketProps
        :rtype dict
        """
        props = {}

        for prop in NORMAL_PROPS:
            if msg.HasField(prop):
                props[prop] = getattr(msg, prop)
                if isinstance(props[prop], bytes):
                    props[prop] = bytes_to_str(props[prop])
        for prop in COMMIT_HOOK_PROPS:
            if getattr(msg, 'has_' + prop):
                props[prop] = self._decode_hooklist(getattr(msg, prop))
        for prop in MODFUN_PROPS:
            if msg.HasField(prop):
                props[prop] = self._decode_modfun(getattr(msg, prop))
        for prop in QUORUM_PROPS:
            if msg.HasField(prop):
                props[prop] = self._decode_quorum(getattr(msg, prop))
        if msg.HasField('repl'):
            props['repl'] = REPL_TO_PY[msg.repl]

        return props
Example #30
0
    def decode_timeseries(self, resp, tsobj,
                          convert_timestamp=False):
        """
        Fills an TsObject with the appropriate data and
        metadata from a TsGetResp / TsQueryResp.

        :param resp: the protobuf message from which to process data
        :type resp: riak.pb.riak_ts_pb2.TsQueryRsp or
                    riak.pb.riak_ts_pb2.TsGetResp
        :param tsobj: a TsObject
        :type tsobj: TsObject
        :param convert_timestamp: Convert timestamps to datetime objects
        :type tsobj: boolean
        """
        if resp.columns is not None:
            col_names = []
            col_types = []
            for col in resp.columns:
                col_names.append(bytes_to_str(col.name))
                col_type = self.decode_timeseries_col_type(col.type)
                col_types.append(col_type)
            tsobj.columns = TsColumns(col_names, col_types)

        tsobj.rows = []
        if resp.rows is not None:
            for row in resp.rows:
                tsobj.rows.append(
                    self.decode_timeseries_row(
                        row, resp.columns, convert_timestamp))
Example #31
0
    def get_buckets(self, transport, bucket_type=None, timeout=None):
        """
        get_buckets(bucket_type=None, timeout=None)

        Get the list of buckets as :class:`RiakBucket
        <riak.bucket.RiakBucket>` instances.

        .. warning:: Do not use this in production, as it requires
           traversing through all keys stored in a cluster.

        .. note:: This request is automatically retried :attr:`retries`
           times if it fails due to network error.

        :param bucket_type: the optional containing bucket type
        :type bucket_type: :class:`~riak.bucket.BucketType`
        :param timeout: a timeout value in milliseconds
        :type timeout: int
        :rtype: list of :class:`RiakBucket <riak.bucket.RiakBucket>`
                instances
        """
        _validate_timeout(timeout)
        if bucket_type:
            bucketfn = self._bucket_type_bucket_builder
        else:
            bucketfn = self._default_type_bucket_builder

        return [
            bucketfn(bytes_to_str(name), bucket_type)
            for name in transport.get_buckets(bucket_type=bucket_type,
                                              timeout=timeout)
        ]
Example #32
0
    def next(self):
        response = super(RiakPbcMapredStream, self).next()

        if response.done and not response.HasField('response'):
            raise StopIteration

        return response.phase, json.loads(bytes_to_str(response.response))
Example #33
0
    def next(self):
        response = super(PbufMapredStream, self).next()

        if response.done and not response.HasField('response'):
            raise StopIteration

        return response.phase, json.loads(bytes_to_str(response.response))
Example #34
0
    def get_index(self, bucket, index, startkey, endkey=None,
                  return_terms=None, max_results=None, continuation=None,
                  timeout=None, term_regex=None):
        """
        Performs a secondary index query.
        """
        if term_regex and not self.index_term_regex():
            raise NotImplementedError("Secondary index term_regex is not "
                                      "supported on %s" %
                                      self.server_version.vstring)

        if timeout == 'infinity':
            timeout = 0

        params = {'return_terms': return_terms, 'max_results': max_results,
                  'continuation': continuation, 'timeout': timeout,
                  'term_regex': term_regex}
        bucket_type = self._get_bucket_type(bucket.bucket_type)
        url = self.index_path(bucket.name, index, startkey, endkey,
                              bucket_type=bucket_type, **params)
        status, headers, body = self._request('GET', url)
        self.check_http_code(status, [200])
        json_data = json.loads(bytes_to_str(body))
        if return_terms and u'results' in json_data:
            results = []
            for result in json_data[u'results'][:]:
                term, key = list(result.items())[0]
                results.append((decode_index_value(index, term), key),)
        else:
            results = json_data[u'keys'][:]

        if max_results and u'continuation' in json_data:
            return (results, json_data[u'continuation'])
        else:
            return (results, None)
Example #35
0
    def get_index(self, bucket, index, startkey, endkey=None,
                  return_terms=None, max_results=None, continuation=None,
                  timeout=None, term_regex=None):
        """
        Performs a secondary index query.
        """
        if term_regex and not self.index_term_regex():
            raise NotImplementedError("Secondary index term_regex is not "
                                      "supported on %s" %
                                      self.server_version.vstring)

        if timeout == 'infinity':
            timeout = 0

        params = {'return_terms': return_terms, 'max_results': max_results,
                  'continuation': continuation, 'timeout': timeout,
                  'term_regex': term_regex}
        bucket_type = self._get_bucket_type(bucket.bucket_type)
        url = self.index_path(bucket.name, index, startkey, endkey,
                              bucket_type=bucket_type, **params)
        status, headers, body = self._request('GET', url)
        self.check_http_code(status, [200])
        json_data = json.loads(bytes_to_str(body))
        if return_terms and u'results' in json_data:
            results = []
            for result in json_data[u'results'][:]:
                term, key = list(result.items())[0]
                results.append((decode_index_value(index, term), key),)
        else:
            results = json_data[u'keys'][:]

        if max_results and u'continuation' in json_data:
            return (results, json_data[u'continuation'])
        else:
            return (results, None)
Example #36
0
    def decode_timeseries(self, resp, tsobj,
                          convert_timestamp=False):
        """
        Fills an TsObject with the appropriate data and
        metadata from a TsGetResp / TsQueryResp.

        :param resp: the protobuf message from which to process data
        :type resp: riak.pb.riak_ts_pb2.TsQueryRsp or
                    riak.pb.riak_ts_pb2.TsGetResp
        :param tsobj: a TsObject
        :type tsobj: TsObject
        :param convert_timestamp: Convert timestamps to datetime objects
        :type tsobj: boolean
        """
        if resp.columns is not None:
            col_names = []
            col_types = []
            for col in resp.columns:
                col_names.append(bytes_to_str(col.name))
                col_type = self.decode_timeseries_col_type(col.type)
                col_types.append(col_type)
            tsobj.columns = TsColumns(col_names, col_types)

        tsobj.rows = []
        if resp.rows is not None:
            for row in resp.rows:
                tsobj.rows.append(
                    self.decode_timeseries_row(
                        row, resp.columns, convert_timestamp))
Example #37
0
    def _decode_bucket_props(self, msg):
        """
        Decodes the protobuf bucket properties message into a dict.

        :param msg: the protobuf message to decode
        :type msg: riak.pb.riak_pb2.RpbBucketProps
        :rtype dict
        """
        props = {}

        for prop in NORMAL_PROPS:
            if msg.HasField(prop):
                props[prop] = getattr(msg, prop)
                if isinstance(props[prop], bytes):
                    props[prop] = bytes_to_str(props[prop])
        for prop in COMMIT_HOOK_PROPS:
            if getattr(msg, 'has_' + prop):
                props[prop] = self._decode_hooklist(getattr(msg, prop))
        for prop in MODFUN_PROPS:
            if msg.HasField(prop):
                props[prop] = self._decode_modfun(getattr(msg, prop))
        for prop in QUORUM_PROPS:
            if msg.HasField(prop):
                props[prop] = self._decode_quorum(getattr(msg, prop))
        if msg.HasField('repl'):
            props['repl'] = REPL_TO_PY[msg.repl]

        return props
Example #38
0
    async def put(self, robj, return_body=True):
        bucket = robj.bucket

        req = riak_pb.RpbPutReq()

        if return_body:
            req.return_body = 1

        req.bucket = str_to_bytes(bucket.name)
        self._add_bucket_type(req, bucket.bucket_type)

        if robj.key:
            req.key = str_to_bytes(robj.key)
        if robj.vclock:
            req.vclock = robj.vclock.encode('binary')

        self._encode_content(robj, req.content)

        msg_code, resp = await self._request(messages.MSG_CODE_PUT_REQ, req,
                                             messages.MSG_CODE_PUT_RESP)

        if resp is not None:
            if resp.HasField('key'):
                robj.key = bytes_to_str(resp.key)
            if resp.HasField('vclock'):
                robj.vclock = VClock(resp.vclock, 'binary')
            if resp.content:
                self._decode_contents(resp.content, robj)
        elif not robj.key:
            raise RiakError("missing response object")

        return robj
Example #39
0
def mkpath(*segments, **query):
    """
    Constructs the path & query portion of a URI from path segments
    and a dict.
    """
    # Remove empty segments (e.g. no key specified)
    segments = [bytes_to_str(s) for s in segments if s is not None]
    # Join the segments into a path
    pathstring = '/'.join(segments)
    # Remove extra slashes
    pathstring = re.sub('/+', '/', pathstring)

    # Add the query string if it exists
    _query = {}
    for key in query:
        if query[key] in [False, True]:
            _query[key] = str(query[key]).lower()
        elif query[key] is not None:
            if PY2 and isinstance(query[key], unicode):  # noqa
                _query[key] = query[key].encode('utf-8')
            else:
                _query[key] = query[key]

    if len(_query) > 0:
        pathstring += "?" + urlencode(_query)

    if not pathstring.startswith('/'):
        pathstring = '/' + pathstring

    return pathstring
 def test_encode_data_for_listkeys(self):
     c = PbufCodec(client_timeouts=True)
     msg = c.encode_timeseries_listkeysreq(self.table, 1234)
     req = riak.pb.riak_ts_pb2.TsListKeysReq()
     req.ParseFromString(msg.data)
     self.assertEqual(self.table.name, bytes_to_str(req.table))
     self.assertEqual(1234, req.timeout)
    async def put(self, robj, return_body=True):
        bucket = robj.bucket

        req = riak_pb.RpbPutReq()

        if return_body:
            req.return_body = 1

        req.bucket = str_to_bytes(bucket.name)
        self._add_bucket_type(req, bucket.bucket_type)

        if robj.key:
            req.key = str_to_bytes(robj.key)
        if robj.vclock:
            req.vclock = robj.vclock.encode('binary')

        self._encode_content(robj, req.content)

        msg_code, resp = await self._request(messages.MSG_CODE_PUT_REQ, req,
                                             messages.MSG_CODE_PUT_RESP)

        if resp is not None:
            if resp.HasField('key'):
                robj.key = bytes_to_str(resp.key)
            if resp.HasField('vclock'):
                robj.vclock = VClock(resp.vclock, 'binary')
            if resp.content:
                self._decode_contents(resp.content, robj)
        elif not robj.key:
            raise RiakError("missing response object")

        return robj
Example #42
0
 def __init__(self, current):
     import sys
     from pyoko.modelmeta import model_registry
     """
     GET method handler
     Args:
         req: Request object.
         resp: Response object.
     """
     out = []
     for mdl_name in sys.PYOKO_LOGS.copy():
         try:
             mdl = model_registry.get_model(mdl_name)
         except KeyError:
             continue
         bucket_name = mdl.objects.adapter.bucket.name
         mdl.objects.adapter.bucket.set_decoder('application/json', lambda a: bytes_to_str(a))
         for k in set(sys.PYOKO_LOGS[mdl_name]):
             if k not in sys.PYOKO_LOGS['new']:
                 obj = mdl.objects.data().get(k)
                 print(obj)
                 out.append("{}/|{}/|{}".format(
                     bucket_name, k, obj[0]))
                 # print(str(mdl.objects.get(k).name))
         sys.PYOKO_LOGS[mdl_name] = []
         mdl.objects.adapter.bucket.set_decoder('application/json', binary_json_decoder)
     sys.PYOKO_LOGS['new'] = []
     current.output = {
         'response': "\n".join(out),
         'http_headers': (('Content-Type', 'text/plain; charset=utf-8'),
                          ),
     }
Example #43
0
 def maybe_riak_error(self, msg_code, data=None):
     if msg_code == riak.pb.messages.MSG_CODE_ERROR_RESP:
         if data is None:
             raise RiakError('no error provided!')
         else:
             err = parse_pbuf_msg(msg_code, data)
             raise RiakError(bytes_to_str(err.errmsg))
Example #44
0
def mkpath(*segments, **query):
    """
    Constructs the path & query portion of a URI from path segments
    and a dict.
    """
    # Remove empty segments (e.g. no key specified)
    segments = [bytes_to_str(s) for s in segments if s is not None]
    # Join the segments into a path
    pathstring = '/'.join(segments)
    # Remove extra slashes
    pathstring = re.sub('/+', '/', pathstring)

    # Add the query string if it exists
    _query = {}
    for key in query:
        if query[key] in [False, True]:
            _query[key] = str(query[key]).lower()
        elif query[key] is not None:
            if PY2 and isinstance(query[key], unicode):
                _query[key] = query[key].encode('utf-8')
            else:
                _query[key] = query[key]

    if len(_query) > 0:
        pathstring += "?" + urlencode(_query)

    if not pathstring.startswith('/'):
        pathstring = '/' + pathstring

    return pathstring
Example #45
0
 def maybe_riak_error(self, msg_code, data=None):
     if msg_code == riak.pb.messages.MSG_CODE_ERROR_RESP:
         if data is None:
             raise RiakError('no error provided!')
         else:
             err = parse_pbuf_msg(msg_code, data)
             raise RiakError(bytes_to_str(err.errmsg))
Example #46
0
    def get_buckets(self, transport, bucket_type=None, timeout=None):
        """
        get_buckets(bucket_type=None, timeout=None)

        Get the list of buckets as :class:`RiakBucket
        <riak.bucket.RiakBucket>` instances.

        .. warning:: Do not use this in production, as it requires
           traversing through all keys stored in a cluster.

        .. note:: This request is automatically retried :attr:`retries`
           times if it fails due to network error.

        :param bucket_type: the optional containing bucket type
        :type bucket_type: :class:`~riak.bucket.BucketType`
        :param timeout: a timeout value in milliseconds
        :type timeout: int
        :rtype: list of :class:`RiakBucket <riak.bucket.RiakBucket>`
                instances
        """
        _validate_timeout(timeout)
        if bucket_type:
            bucketfn = self._bucket_type_bucket_builder
        else:
            bucketfn = self._default_type_bucket_builder

        return [bucketfn(bytes_to_str(name), bucket_type) for name in
                transport.get_buckets(bucket_type=bucket_type,
                                      timeout=timeout)]
Example #47
0
    def _parse_body(self, robj, response, expected_statuses):
        """
        Parse the body of an object response and populate the object.
        """
        # If no response given, then return.
        if response is None:
            return None

        status, headers, data = response

        # Check if the server is down(status==0)
        if not status:
            m = 'Could not contact Riak Server: http://{0}:{1}!'.format(
                self._node.host, self._node.http_port)
            raise RiakError(m)

        # Make sure expected code came back
        self.check_http_code(status, expected_statuses)

        if 'x-riak-vclock' in headers:
            robj.vclock = VClock(headers['x-riak-vclock'], 'base64')

        # If 404(Not Found), then clear the object.
        if status == 404:
            robj.siblings = []
            return None
        # If 201 Created, we need to extract the location and set the
        # key on the object.
        elif status == 201:
            robj.key = headers['location'].strip().split('/')[-1]
        # If 300(Siblings), apply the siblings to the object
        elif status == 300:
            ctype, params = parse_header(headers['content-type'])
            if ctype == 'multipart/mixed':
                if six.PY3:
                    data = bytes_to_str(data)
                boundary = re.compile('\r?\n--%s(?:--)?\r?\n' %
                                      re.escape(params['boundary']))
                parts = [message_from_string(p)
                         for p in re.split(boundary, data)[1:-1]]
                robj.siblings = [self._parse_sibling(RiakContent(robj),
                                                     part.items(),
                                                     part.get_payload())
                                 for part in parts]

                # Invoke sibling-resolution logic
                if robj.resolver is not None:
                    robj.resolver(robj)

                return robj
            else:
                raise Exception('unexpected sibling response format: {0}'.
                                format(ctype))

        robj.siblings = [self._parse_sibling(RiakContent(robj),
                                             headers.items(),
                                             data)]

        return robj
Example #48
0
    def _parse_body(self, robj, response, expected_statuses):
        """
        Parse the body of an object response and populate the object.
        """
        # If no response given, then return.
        if response is None:
            return None

        status, headers, data = response

        # Check if the server is down(status==0)
        if not status:
            m = 'Could not contact Riak Server: http://{0}:{1}!'.format(
                self._node.host, self._node.http_port)
            raise RiakError(m)

        # Make sure expected code came back
        self.check_http_code(status, expected_statuses)

        if 'x-riak-vclock' in headers:
            robj.vclock = VClock(headers['x-riak-vclock'], 'base64')

        # If 404(Not Found), then clear the object.
        if status == 404:
            robj.siblings = []
            return None
        # If 201 Created, we need to extract the location and set the
        # key on the object.
        elif status == 201:
            robj.key = headers['location'].strip().split('/')[-1]
        # If 300(Siblings), apply the siblings to the object
        elif status == 300:
            ctype, params = parse_header(headers['content-type'])
            if ctype == 'multipart/mixed':
                if six.PY3:
                    data = bytes_to_str(data)
                boundary = re.compile('\r?\n--%s(?:--)?\r?\n' %
                                      re.escape(params['boundary']))
                parts = [message_from_string(p)
                         for p in re.split(boundary, data)[1:-1]]
                robj.siblings = [self._parse_sibling(RiakContent(robj),
                                                     part.items(),
                                                     part.get_payload())
                                 for part in parts]

                # Invoke sibling-resolution logic
                if robj.resolver is not None:
                    robj.resolver(robj)

                return robj
            else:
                raise Exception('unexpected sibling response format: {0}'.
                                format(ctype))

        robj.siblings = [self._parse_sibling(RiakContent(robj),
                                             headers.items(),
                                             data)]

        return robj
Example #49
0
 def _decode_map_value(self, entries):
     out = {}
     for entry in entries:
         name = bytes_to_str(entry.field.name[:])
         dtype = MAP_FIELD_TYPES[entry.field.type]
         if dtype == 'counter':
             value = entry.counter_value
         elif dtype == 'set':
             value = self._decode_set_value(entry.set_value)
         elif dtype == 'register':
             value = bytes_to_str(entry.register_value[:])
         elif dtype == 'flag':
             value = entry.flag_value
         elif dtype == 'map':
             value = self._decode_map_value(entry.map_value)
         out[(name, dtype)] = value
     return out
Example #50
0
 def _decode_map_value(self, entries):
     out = {}
     for entry in entries:
         name = bytes_to_str(entry.field.name[:])
         dtype = MAP_FIELD_TYPES[entry.field.type]
         if dtype == 'counter':
             value = entry.counter_value
         elif dtype == 'set':
             value = self._decode_set_value(entry.set_value)
         elif dtype == 'register':
             value = bytes_to_str(entry.register_value[:])
         elif dtype == 'flag':
             value = entry.flag_value
         elif dtype == 'map':
             value = self._decode_map_value(entry.map_value)
         out[(name, dtype)] = value
     return out
Example #51
0
    def next(self):
        response = super(PbufIndexStream, self).next()

        if response.done and not (response.keys or response.results
                                  or response.continuation):
            raise StopIteration

        if self.return_terms and response.results:
            return [(decode_index_value(self.index,
                                        r.key), bytes_to_str(r.value))
                    for r in response.results]
        elif response.keys:
            if PY2:
                return response.keys[:]
            else:
                return [bytes_to_str(key) for key in response.keys]
        elif response.continuation:
            return CONTINUATION(bytes_to_str(response.continuation))
def binary_json_encoder(obj):
    """
    Default encoder for JSON datatypes, which returns UTF-8 encoded
    json instead of the default bloated backslash u XXXX escaped ASCII strings.
    """
    if isinstance(obj, bytes):
        return json.dumps(bytes_to_str(obj), ensure_ascii=False).encode("utf-8")
    else:
        return json.dumps(obj, ensure_ascii=False).encode("utf-8")
 def stats(self):
     """
     Gets performance statistics and server information
     """
     status, _, body = self._request("GET", self.stats_path(), {"Accept": "application/json"})
     if status == 200:
         return json.loads(bytes_to_str(body))
     else:
         return None
Example #54
0
    def stream_buckets(self, bucket_type=None, timeout=None):
        """
        Streams the list of buckets. This is a generator method that
        should be iterated over.

        .. warning:: Do not use this in production, as it requires
           traversing through all keys stored in a cluster.

        The caller should explicitly close the returned iterator,
        either using :func:`contextlib.closing` or calling ``close()``
        explicitly. Consuming the entire iterator will also close the
        stream. If it does not, the associated connection might not be
        returned to the pool. Example::

            from contextlib import closing

            # Using contextlib.closing
            with closing(client.stream_buckets()) as buckets:
                for bucket_list in buckets:
                    do_something(bucket_list)

            # Explicit close()
            stream = client.stream_buckets()
            for bucket_list in stream:
                 do_something(bucket_list)
            stream.close()

        :param bucket_type: the optional containing bucket type
        :type bucket_type: :class:`~riak.bucket.BucketType`
        :param timeout: a timeout value in milliseconds
        :type timeout: int
        :rtype: iterator that yields lists of :class:`RiakBucket
             <riak.bucket.RiakBucket>` instances

        """
        _validate_timeout(timeout)
        if bucket_type:
            bucketfn = self._bucket_type_bucket_builder
        else:
            bucketfn = self._default_type_bucket_builder

        resource = self._acquire()
        transport = resource.object
        stream = transport.stream_buckets(bucket_type=bucket_type,
                                          timeout=timeout)
        stream.attach(resource)
        try:
            for bucket_list in stream:
                bucket_list = [
                    bucketfn(bytes_to_str(name), bucket_type)
                    for name in bucket_list
                ]
                if len(bucket_list) > 0:
                    yield bucket_list
        finally:
            stream.close()
 def stats(self):
     """
     Gets performance statistics and server information
     """
     status, _, body = self._request('GET', self.stats_path(),
                                     {'Accept': 'application/json'})
     if status == 200:
         return json.loads(bytes_to_str(body))
     else:
         return None
    def get_keys(self, bucket, timeout=None):
        """
        Lists all keys within a bucket.
        """
        keys = []
        for keylist in self.stream_keys(bucket, timeout=timeout):
            for key in keylist:
                keys.append(bytes_to_str(key))

        return keys
Example #57
0
    def next(self):
        response = super(RiakPbcIndexStream, self).next()

        if response.done and not (response.keys or
                                  response.results or
                                  response.continuation):
            raise StopIteration

        if self.return_terms and response.results:
            return [(decode_index_value(self.index, r.key),
                     bytes_to_str(r.value))
                    for r in response.results]
        elif response.keys:
            if PY2:
                return response.keys[:]
            else:
                return [bytes_to_str(key) for key in response.keys]
        elif response.continuation:
            return CONTINUATION(bytes_to_str(response.continuation))
    def get_keys(self, bucket, timeout=None):
        """
        Lists all keys within a bucket.
        """
        keys = []
        for keylist in self.stream_keys(bucket, timeout=timeout):
            for key in keylist:
                keys.append(bytes_to_str(key))

        return keys
Example #59
0
def binary_json_encoder(obj):
    """
    Default encoder for JSON datatypes, which returns UTF-8 encoded
    json instead of the default bloated backslash u XXXX escaped ASCII strings.
    """
    if isinstance(obj, bytes):
        return json.dumps(bytes_to_str(obj),
                          ensure_ascii=False).encode("utf-8")
    else:
        return json.dumps(obj, ensure_ascii=False).encode("utf-8")
Example #60
0
    def stream_buckets(self, bucket_type=None, timeout=None):
        """
        Streams the list of buckets. This is a generator method that
        should be iterated over.

        .. warning:: Do not use this in production, as it requires
           traversing through all keys stored in a cluster.

        The caller should explicitly close the returned iterator,
        either using :func:`contextlib.closing` or calling ``close()``
        explicitly. Consuming the entire iterator will also close the
        stream. If it does not, the associated connection might not be
        returned to the pool. Example::

            from contextlib import closing

            # Using contextlib.closing
            with closing(client.stream_buckets()) as buckets:
                for bucket_list in buckets:
                    do_something(bucket_list)

            # Explicit close()
            stream = client.stream_buckets()
            for bucket_list in stream:
                 do_something(bucket_list)
            stream.close()

        :param bucket_type: the optional containing bucket type
        :type bucket_type: :class:`~riak.bucket.BucketType`
        :param timeout: a timeout value in milliseconds
        :type timeout: int
        :rtype: iterator that yields lists of :class:`RiakBucket
             <riak.bucket.RiakBucket>` instances

        """
        _validate_timeout(timeout)
        if bucket_type:
            bucketfn = self._bucket_type_bucket_builder
        else:
            bucketfn = self._default_type_bucket_builder

        resource = self._acquire()
        transport = resource.object
        stream = transport.stream_buckets(bucket_type=bucket_type,
                                          timeout=timeout)
        stream.attach(resource)
        try:
            for bucket_list in stream:
                bucket_list = [bucketfn(bytes_to_str(name), bucket_type)
                               for name in bucket_list]
                if len(bucket_list) > 0:
                    yield bucket_list
        finally:
            stream.close()