def encode_get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None): bucket = robj.bucket req = kvhosting.pb.riak_kv_pb2.RpbGetReq() if r: req.r = self.encode_quorum(r) if self._quorum_controls: if pr: req.pr = self.encode_quorum(pr) if basic_quorum is not None: req.basic_quorum = basic_quorum if notfound_ok is not None: req.notfound_ok = notfound_ok if self._client_timeouts and timeout: req.timeout = timeout if self._tombstone_vclocks: req.deletedvclock = True req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) mc = kvhosting.pb.messages.MSG_CODE_GET_REQ rc = kvhosting.pb.messages.MSG_CODE_GET_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_stream_mapred(self, content): req = kvhosting.pb.riak_kv_pb2.RpbMapRedReq() req.request = str_to_bytes(content) req.content_type = str_to_bytes("application/json") mc = kvhosting.pb.messages.MSG_CODE_MAP_RED_REQ rc = kvhosting.pb.messages.MSG_CODE_MAP_RED_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_bucket_props(self, props, msg): """ Encodes a dict of bucket properties into the protobuf message. :param props: bucket properties :type props: dict :param msg: the protobuf message to fill :type msg: kvhosting.pb.riak_pb2.RpbSetBucketReq """ for prop in NORMAL_PROPS: if prop in props and props[prop] is not None: if isinstance(props[prop], six.string_types): setattr(msg.props, prop, str_to_bytes(props[prop])) else: setattr(msg.props, prop, props[prop]) for prop in COMMIT_HOOK_PROPS: if prop in props: setattr(msg.props, 'has_' + prop, True) self.encode_hooklist(props[prop], getattr(msg.props, prop)) for prop in MODFUN_PROPS: if prop in props and props[prop] is not None: self.encode_modfun(props[prop], getattr(msg.props, prop)) for prop in QUORUM_PROPS: if prop in props and props[prop] not in (None, 'default'): value = self.encode_quorum(props[prop]) if value is not None: if isinstance(value, six.string_types): setattr(msg.props, prop, str_to_bytes(value)) else: setattr(msg.props, prop, value) if 'repl' in props: msg.props.repl = REPL_TO_PB[props['repl']] return msg
def encode_put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): bucket = robj.bucket req = kvhosting.pb.riak_kv_pb2.RpbPutReq() if w: req.w = self.encode_quorum(w) if dw: req.dw = self.encode_quorum(dw) if self._quorum_controls and pw: req.pw = self.encode_quorum(pw) if return_body: req.return_body = 1 if if_none_match: req.if_none_match = 1 if self._client_timeouts and timeout: req.timeout = timeout req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) if robj.key: req.key = str_to_bytes(robj.key) if robj.vclock: req.vclock = robj.vclock.encode('binary') self.encode_content(robj, req.content) mc = kvhosting.pb.messages.MSG_CODE_PUT_REQ rc = kvhosting.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_search(self, index, query, **kwargs): req = kvhosting.pb.riak_search_pb2.RpbSearchQueryReq( index=str_to_bytes(index), q=str_to_bytes(query)) self.encode_search_query(req, **kwargs) mc = kvhosting.pb.messages.MSG_CODE_SEARCH_QUERY_REQ rc = kvhosting.pb.messages.MSG_CODE_SEARCH_QUERY_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_auth(self, username, password): req = kvhosting.pb.riak_pb2.RpbAuthReq() req.user = str_to_bytes(username) req.password = str_to_bytes(password) mc = kvhosting.pb.messages.MSG_CODE_AUTH_REQ rc = kvhosting.pb.messages.MSG_CODE_AUTH_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_create_search_schema(self, schema, content): scma = kvhosting.pb.riak_yokozuna_pb2.RpbYokozunaSchema( name=str_to_bytes(schema), content=str_to_bytes(content)) req = kvhosting.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( schema=scma) mc = kvhosting.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ rc = kvhosting.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_get_preflist(self, bucket, key): req = kvhosting.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.type = str_to_bytes(bucket.bucket_type.name) mc = kvhosting.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ rc = kvhosting.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_fetch_datatype(self, bucket, key, **kwargs): req = kvhosting.pb.riak_dt_pb2.DtFetchReq() req.type = str_to_bytes(bucket.bucket_type.name) req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) self.encode_dt_options(req, **kwargs) mc = kvhosting.pb.messages.MSG_CODE_DT_FETCH_REQ rc = kvhosting.pb.messages.MSG_CODE_DT_FETCH_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_get_counter(self, bucket, key, **kwargs): req = kvhosting.pb.riak_kv_pb2.RpbCounterGetReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) if kwargs.get('r') is not None: req.r = self.encode_quorum(kwargs['r']) if kwargs.get('pr') is not None: req.pr = self.encode_quorum(kwargs['pr']) if kwargs.get('basic_quorum') is not None: req.basic_quorum = kwargs['basic_quorum'] if kwargs.get('notfound_ok') is not None: req.notfound_ok = kwargs['notfound_ok'] mc = kvhosting.pb.messages.MSG_CODE_COUNTER_GET_REQ rc = kvhosting.pb.messages.MSG_CODE_COUNTER_GET_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_timeseries_put(self, tsobj): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :param req: the protobuf message to fill :type req: kvhosting.pb.riak_ts_pb2.TsPutReq """ req = kvhosting.pb.riak_ts_pb2.TsPutReq() req.table = str_to_bytes(tsobj.table.name) if tsobj.columns: raise NotImplementedError("columns are not implemented yet") if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: tsr = req.rows.add() # NB: type TsRow if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: tsc = tsr.cells.add() # NB: type TsCell self.encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") mc = kvhosting.pb.messages.MSG_CODE_TS_PUT_REQ rc = kvhosting.pb.messages.MSG_CODE_TS_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_clear_bucket_props(self, bucket): req = kvhosting.pb.riak_pb2.RpbResetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) mc = kvhosting.pb.messages.MSG_CODE_RESET_BUCKET_REQ rc = kvhosting.pb.messages.MSG_CODE_RESET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_modfun(self, props, msg=None): """ Encodes a dict with 'mod' and 'fun' keys into a protobuf modfun pair. Used in bucket properties. :param props: the module/function pair :type props: dict :param msg: the protobuf message to fill :type msg: kvhosting.pb.riak_pb2.RpbModFun :rtype kvhosting.pb.riak_pb2.RpbModFun """ if msg is None: msg = kvhosting.pb.riak_pb2.RpbModFun() msg.module = str_to_bytes(props['mod']) msg.function = str_to_bytes(props['fun']) return msg
def encode_set_bucket_type_props(self, bucket_type, props): req = kvhosting.pb.riak_pb2.RpbSetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) self.encode_bucket_props(props, req) mc = kvhosting.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ rc = kvhosting.pb.messages.MSG_CODE_SET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_update_counter(self, bucket, key, value, **kwargs): req = kvhosting.pb.riak_kv_pb2.RpbCounterUpdateReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.amount = value if kwargs.get('w') is not None: req.w = self.encode_quorum(kwargs['w']) if kwargs.get('dw') is not None: req.dw = self.encode_quorum(kwargs['dw']) if kwargs.get('pw') is not None: req.pw = self.encode_quorum(kwargs['pw']) if kwargs.get('returnvalue') is not None: req.returnvalue = kwargs['returnvalue'] mc = kvhosting.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ rc = kvhosting.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_timeseries_listkeysreq(self, table, timeout=None): req = kvhosting.pb.riak_ts_pb2.TsListKeysReq() req.table = str_to_bytes(table.name) if self._client_timeouts and timeout: req.timeout = timeout mc = kvhosting.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ rc = kvhosting.pb.messages.MSG_CODE_TS_LIST_KEYS_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_map_op(self, msg, ops): for op in ops: name, dtype = op[1] ftype = MAP_FIELD_TYPES[dtype] if op[0] == 'add': add = msg.adds.add() add.name = str_to_bytes(name) add.type = ftype elif op[0] == 'remove': remove = msg.removes.add() remove.name = str_to_bytes(name) remove.type = ftype elif op[0] == 'update': update = msg.updates.add() update.field.name = str_to_bytes(name) update.field.type = ftype self.encode_map_update(dtype, update, op[2])
def encode_create_search_index(self, index, schema=None, n_val=None, timeout=None): index = str_to_bytes(index) idx = kvhosting.pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) if schema: idx.schema = str_to_bytes(schema) if n_val: idx.n_val = n_val req = kvhosting.pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) if timeout is not None: req.timeout = timeout mc = kvhosting.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ rc = kvhosting.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_timeseries_query(self, table, query, interpolations=None): req = kvhosting.pb.riak_ts_pb2.TsQueryReq() q = query if '{table}' in q: q = q.format(table=table.name) req.query.base = str_to_bytes(q) mc = kvhosting.pb.messages.MSG_CODE_TS_QUERY_REQ rc = kvhosting.pb.messages.MSG_CODE_TS_QUERY_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_stream_keys(self, bucket, timeout=None): req = kvhosting.pb.riak_kv_pb2.RpbListKeysReq() req.bucket = str_to_bytes(bucket.name) if self._client_timeouts and timeout: req.timeout = timeout self._add_bucket_type(req, bucket.bucket_type) mc = kvhosting.pb.messages.MSG_CODE_LIST_KEYS_REQ rc = kvhosting.pb.messages.MSG_CODE_LIST_KEYS_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_update_datatype(self, datatype, **kwargs): op = datatype.to_op() type_name = datatype.type_name if not op: raise ValueError( "No operation to send on datatype {!r}".format(datatype)) req = kvhosting.pb.riak_dt_pb2.DtUpdateReq() req.bucket = str_to_bytes(datatype.bucket.name) req.type = str_to_bytes(datatype.bucket.bucket_type.name) if datatype.key: req.key = str_to_bytes(datatype.key) if datatype._context: req.context = datatype._context self.encode_dt_options(req, **kwargs) self.encode_dt_op(type_name, req, op) mc = kvhosting.pb.messages.MSG_CODE_DT_UPDATE_REQ rc = kvhosting.pb.messages.MSG_CODE_DT_UPDATE_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_to_ts_cell(self, cell, ts_cell): if cell is not None: if isinstance(cell, datetime.datetime): ts_cell.timestamp_value = unix_time_millis(cell) elif isinstance(cell, bool): ts_cell.boolean_value = cell elif isinstance(cell, six.binary_type): ts_cell.varchar_value = cell elif isinstance(cell, six.text_type): ts_cell.varchar_value = str_to_bytes(cell) elif isinstance(cell, six.string_types): ts_cell.varchar_value = str_to_bytes(cell) elif (isinstance(cell, six.integer_types)): ts_cell.sint64_value = cell elif isinstance(cell, float): ts_cell.double_value = cell else: t = type(cell) raise RiakError("can't serialize type '{}', value '{}'".format( t, cell))
def encode_delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): req = kvhosting.pb.riak_kv_pb2.RpbDelReq() if rw: req.rw = self.encode_quorum(rw) if r: req.r = self.encode_quorum(r) if w: req.w = self.encode_quorum(w) if dw: req.dw = self.encode_quorum(dw) if self._quorum_controls: if pr: req.pr = self.encode_quorum(pr) if pw: req.pw = self.encode_quorum(pw) if self._client_timeouts and timeout: req.timeout = timeout use_vclocks = (self._tombstone_vclocks and hasattr(robj, 'vclock') and robj.vclock) if use_vclocks: req.vclock = robj.vclock.encode('binary') bucket = robj.bucket req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) mc = kvhosting.pb.messages.MSG_CODE_DEL_REQ rc = kvhosting.pb.messages.MSG_CODE_DEL_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_search_query(self, req, **kwargs): if 'rows' in kwargs: req.rows = kwargs['rows'] if 'start' in kwargs: req.start = kwargs['start'] if 'sort' in kwargs: req.sort = str_to_bytes(kwargs['sort']) if 'filter' in kwargs: req.filter = str_to_bytes(kwargs['filter']) if 'df' in kwargs: req.df = str_to_bytes(kwargs['df']) if 'op' in kwargs: req.op = str_to_bytes(kwargs['op']) if 'q.op' in kwargs: req.op = kwargs['q.op'] if 'fl' in kwargs: if isinstance(kwargs['fl'], list): req.fl.extend(kwargs['fl']) else: req.fl.append(kwargs['fl']) if 'presort' in kwargs: req.presort = kwargs['presort']
def _security_auth_headers(self, username, password, headers): """ Add in the requisite HTTP Authentication Headers :param username: Riak Security Username :type str :param password: Riak Security Password :type str :param headers: Dictionary of headers :type dict """ userColonPassword = username + ":" + password b64UserColonPassword = base64. \ b64encode(str_to_bytes(userColonPassword)).decode("ascii") headers['Authorization'] = 'Basic %s' % b64UserColonPassword
def encode_hook(self, hook, msg): """ Encodes a commit hook dict into the protobuf message. Used in bucket properties. :param hook: the hook to encode :type hook: dict :param msg: the protobuf message to fill :type msg: kvhosting.pb.riak_pb2.RpbCommitHook :rtype kvhosting.pb.riak_pb2.RpbCommitHook """ if 'name' in hook: msg.name = str_to_bytes(hook['name']) else: self.encode_modfun(hook, msg.modfun) return msg
def encode_map_update(self, dtype, msg, op): if dtype == 'counter': # ('increment', some_int) msg.counter_op.increment = op[1] elif dtype == 'set': self.encode_set_op(msg, op) elif dtype == 'map': self.encode_map_op(msg.map_op, op) elif dtype == 'register': # ('assign', some_str) msg.register_op = str_to_bytes(op[1]) elif dtype == 'flag': if op == 'enable': msg.flag_op = kvhosting.pb.riak_dt_pb2.MapUpdate.ENABLE else: msg.flag_op = kvhosting.pb.riak_dt_pb2.MapUpdate.DISABLE else: raise ValueError('Map may not contain datatype: {}'.format(dtype))
def encode_timeseries_keyreq(self, table, key, is_delete=False): key_vals = None if isinstance(key, list): key_vals = key else: raise ValueError("key must be a list") req = kvhosting.pb.riak_ts_pb2.TsGetReq() mc = kvhosting.pb.messages.MSG_CODE_TS_GET_REQ rc = kvhosting.pb.messages.MSG_CODE_TS_GET_RESP if is_delete: req = kvhosting.pb.riak_ts_pb2.TsDelReq() mc = kvhosting.pb.messages.MSG_CODE_TS_DEL_REQ rc = kvhosting.pb.messages.MSG_CODE_TS_DEL_RESP req.table = str_to_bytes(table.name) for cell in key_vals: ts_cell = req.key.add() self.encode_to_ts_cell(cell, ts_cell) return Msg(mc, req.SerializeToString(), rc)
def setUpClass(cls): super(TimeseriesPbufTests, cls).setUpClass() cls.now = datetime.datetime.utcfromtimestamp(144379690.987000) fiveMinsAgo = cls.now - fiveMins tenMinsAgo = fiveMinsAgo - fiveMins fifteenMinsAgo = tenMinsAgo - fiveMins twentyMinsAgo = fifteenMinsAgo - fiveMins twentyFiveMinsAgo = twentyMinsAgo - fiveMins client = cls.create_client() table = client.table(table_name) rows = [ ['hash1', 'user2', twentyFiveMinsAgo, 'typhoon', 90.3], ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], ['hash1', 'user2', fiveMinsAgo, 'wind', None], ['hash1', 'user2', cls.now, 'snow', 20.1] ] ts_obj = table.new(rows) result = ts_obj.store() if result is not True: raise AssertionError("expected success") client.close() cls.nowMsec = unix_time_millis(cls.now) cls.fiveMinsAgo = fiveMinsAgo cls.twentyMinsAgo = twentyMinsAgo cls.twentyFiveMinsAgo = twentyFiveMinsAgo cls.tenMinsAgoMsec = unix_time_millis(tenMinsAgo) cls.twentyMinsAgoMsec = unix_time_millis(twentyMinsAgo) cls.numCols = len(rows[0]) cls.rows = rows encoded_rows = [ [str_to_bytes('hash1'), str_to_bytes('user2'), twentyFiveMinsAgo, str_to_bytes('typhoon'), 90.3], [str_to_bytes('hash1'), str_to_bytes('user2'), twentyMinsAgo, str_to_bytes('hurricane'), 82.3], [str_to_bytes('hash1'), str_to_bytes('user2'), fifteenMinsAgo, str_to_bytes('rain'), 79.0], [str_to_bytes('hash1'), str_to_bytes('user2'), fiveMinsAgo, str_to_bytes('wind'), None], [str_to_bytes('hash1'), str_to_bytes('user2'), cls.now, str_to_bytes('snow'), 20.1] ] cls.encoded_rows = encoded_rows
def test_decode_data_from_query(self): tqr = riak.pb.riak_ts_pb2.TsQueryResp() c0 = tqr.columns.add() c0.name = str_to_bytes('col_varchar') c0.type = TsColumnType.Value('VARCHAR') c1 = tqr.columns.add() c1.name = str_to_bytes('col_integer') c1.type = TsColumnType.Value('SINT64') c2 = tqr.columns.add() c2.name = str_to_bytes('col_double') c2.type = TsColumnType.Value('DOUBLE') c3 = tqr.columns.add() c3.name = str_to_bytes('col_timestamp') c3.type = TsColumnType.Value('TIMESTAMP') c4 = tqr.columns.add() c4.name = str_to_bytes('col_boolean') c4.type = TsColumnType.Value('BOOLEAN') r0 = tqr.rows.add() r0c0 = r0.cells.add() r0c0.varchar_value = str_to_bytes(self.rows[0][0]) r0c1 = r0.cells.add() r0c1.sint64_value = self.rows[0][1] r0c2 = r0.cells.add() r0c2.double_value = self.rows[0][2] r0c3 = r0.cells.add() r0c3.timestamp_value = self.ts0ms r0c4 = r0.cells.add() r0c4.boolean_value = self.rows[0][4] r1 = tqr.rows.add() r1c0 = r1.cells.add() r1c0.varchar_value = str_to_bytes(self.rows[1][0]) r1c1 = r1.cells.add() r1c1.sint64_value = self.rows[1][1] r1c2 = r1.cells.add() r1c2.double_value = self.rows[1][2] r1c3 = r1.cells.add() r1c3.timestamp_value = self.ts1ms r1c4 = r1.cells.add() r1c4.boolean_value = self.rows[1][4] tsobj = TsObject(None, self.table) c = PbufCodec() c.decode_timeseries(tqr, tsobj, True) self.assertEqual(len(tsobj.rows), len(self.rows)) self.assertEqual(len(tsobj.columns.names), len(tqr.columns)) self.assertEqual(len(tsobj.columns.types), len(tqr.columns)) cn, ct = tsobj.columns self.assertEqual(cn[0], 'col_varchar') self.assertEqual(ct[0], 'varchar') self.assertEqual(cn[1], 'col_integer') self.assertEqual(ct[1], 'sint64') self.assertEqual(cn[2], 'col_double') self.assertEqual(ct[2], 'double') self.assertEqual(cn[3], 'col_timestamp') self.assertEqual(ct[3], 'timestamp') self.assertEqual(cn[4], 'col_boolean') self.assertEqual(ct[4], 'boolean') r0 = tsobj.rows[0] self.assertEqual(bytes_to_str(r0[0]), self.rows[0][0]) self.assertEqual(r0[1], self.rows[0][1]) self.assertEqual(r0[2], self.rows[0][2]) self.assertEqual(r0[3], ts0) self.assertEqual(r0[4], self.rows[0][4]) r1 = tsobj.rows[1] self.assertEqual(bytes_to_str(r1[0]), self.rows[1][0]) self.assertEqual(r1[1], self.rows[1][1]) self.assertEqual(r1[2], self.rows[1][2]) self.assertEqual(r1[3], ts1) self.assertEqual(r1[4], self.rows[1][4])