def encode_stream_mapred(self, content): req = riak.pb.riak_kv_pb2.RpbMapRedReq() req.request = str_to_bytes(content) req.content_type = str_to_bytes("application/json") mc = riak.pb.messages.MSG_CODE_MAP_RED_REQ rc = riak.pb.messages.MSG_CODE_MAP_RED_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): bucket = robj.bucket req = riak.pb.riak_kv_pb2.RpbPutReq() if w: req.w = self.encode_quorum(w) if dw: req.dw = self.encode_quorum(dw) if self._quorum_controls and pw: req.pw = self.encode_quorum(pw) if return_body: req.return_body = 1 if if_none_match: req.if_none_match = 1 if self._client_timeouts and timeout: req.timeout = timeout req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) if robj.key: req.key = str_to_bytes(robj.key) if robj.vclock: req.vclock = robj.vclock.encode('binary') self.encode_content(robj, req.content) mc = riak.pb.messages.MSG_CODE_PUT_REQ rc = riak.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
async def update_datatype(self, datatype, **options): if datatype.bucket.bucket_type.is_default(): raise NotImplementedError("Datatypes cannot be used in the default" " bucket-type.") op = datatype.to_op() type_name = datatype.type_name if not op: raise ValueError("No operation to send on datatype {!r}". format(datatype)) req = riak_pb.DtUpdateReq() req.bucket = str_to_bytes(datatype.bucket.name) req.type = str_to_bytes(datatype.bucket.bucket_type.name) if datatype.key: req.key = str_to_bytes(datatype.key) if datatype._context: req.context = datatype._context self._encode_dt_options(req, options) self._encode_dt_op(type_name, req, op) msg_code, resp = await self._request( messages.MSG_CODE_DT_UPDATE_REQ, req, messages.MSG_CODE_DT_UPDATE_RESP) if resp.HasField('key'): datatype.key = resp.key[:] if resp.HasField('context'): datatype._context = resp.context[:] datatype._set_value(self._decode_dt_value(type_name, resp)) return True
def encode_get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): bucket = robj.bucket req = riak.pb.riak_kv_pb2.RpbGetReq() if r: req.r = self.encode_quorum(r) if self._quorum_controls: if pr: req.pr = self.encode_quorum(pr) if basic_quorum is not None: req.basic_quorum = basic_quorum if notfound_ok is not None: req.notfound_ok = notfound_ok if self._client_timeouts and timeout: req.timeout = timeout if self._tombstone_vclocks: req.deletedvclock = True req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) req.head = head_only mc = riak.pb.messages.MSG_CODE_GET_REQ rc = riak.pb.messages.MSG_CODE_GET_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): req = riak.pb.riak_kv_pb2.RpbDelReq() if rw: req.rw = self.encode_quorum(rw) if r: req.r = self.encode_quorum(r) if w: req.w = self.encode_quorum(w) if dw: req.dw = self.encode_quorum(dw) if self._quorum_controls: if pr: req.pr = self.encode_quorum(pr) if pw: req.pw = self.encode_quorum(pw) if self._client_timeouts and timeout: req.timeout = timeout use_vclocks = (self._tombstone_vclocks and hasattr(robj, 'vclock') and robj.vclock) if use_vclocks: req.vclock = robj.vclock.encode('binary') bucket = robj.bucket req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) mc = riak.pb.messages.MSG_CODE_DEL_REQ rc = riak.pb.messages.MSG_CODE_DEL_RESP return Msg(mc, req.SerializeToString(), rc)
def get_counter(self, bucket, key, **params): if not bucket.bucket_type.is_default(): raise NotImplementedError("Counters are not " "supported with bucket-types, " "use datatypes instead.") if not self.counters(): raise NotImplementedError("Counters are not supported") req = riak_pb.RpbCounterGetReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) if params.get('r') is not None: req.r = self._encode_quorum(params['r']) if params.get('pr') is not None: req.pr = self._encode_quorum(params['pr']) if params.get('basic_quorum') is not None: req.basic_quorum = params['basic_quorum'] if params.get('notfound_ok') is not None: req.notfound_ok = params['notfound_ok'] msg_code, resp = self._request(MSG_CODE_COUNTER_GET_REQ, req, MSG_CODE_COUNTER_GET_RESP) if resp.HasField('value'): return resp.value else: return None
def encode_create_search_schema(self, schema, content): scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( name=str_to_bytes(schema), content=str_to_bytes(content)) req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq(schema=scma) mc = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ rc = riak.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
def update_counter(self, bucket, key, value, **params): if not bucket.bucket_type.is_default(): raise NotImplementedError("Counters are not " "supported with bucket-types, " "use datatypes instead.") if not self.counters(): raise NotImplementedError("Counters are not supported") req = riak_pb.RpbCounterUpdateReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.amount = value if params.get('w') is not None: req.w = self._encode_quorum(params['w']) if params.get('dw') is not None: req.dw = self._encode_quorum(params['dw']) if params.get('pw') is not None: req.pw = self._encode_quorum(params['pw']) if params.get('returnvalue') is not None: req.returnvalue = params['returnvalue'] msg_code, resp = self._request(MSG_CODE_COUNTER_UPDATE_REQ, req, MSG_CODE_COUNTER_UPDATE_RESP) if resp.HasField('value'): return resp.value else: return True
def encode_auth(self, username, password): req = riak.pb.riak_pb2.RpbAuthReq() req.user = str_to_bytes(username) req.password = str_to_bytes(password) mc = riak.pb.messages.MSG_CODE_AUTH_REQ rc = riak.pb.messages.MSG_CODE_AUTH_RESP return Msg(mc, req.SerializeToString(), rc)
async def update_datatype(self, datatype, **options): if datatype.bucket.bucket_type.is_default(): raise NotImplementedError("Datatypes cannot be used in the default" " bucket-type.") op = datatype.to_op() type_name = datatype.type_name if not op: raise ValueError( "No operation to send on datatype {!r}".format(datatype)) req = riak_dt_pb2.DtUpdateReq() req.bucket = str_to_bytes(datatype.bucket.name) req.type = str_to_bytes(datatype.bucket.bucket_type.name) if datatype.key: req.key = str_to_bytes(datatype.key) if datatype._context: req.context = datatype._context self._encode_dt_options(req, options) self._encode_dt_op(type_name, req, op) msg_code, resp = await self._request(messages.MSG_CODE_DT_UPDATE_REQ, req, messages.MSG_CODE_DT_UPDATE_RESP) if resp.HasField('key'): datatype.key = resp.key[:] if resp.HasField('context'): datatype._context = resp.context[:] datatype._set_value(self._decode_dt_value(type_name, resp)) return True
def _encode_bucket_props(self, props, msg): """ Encodes a dict of bucket properties into the protobuf message. :param props: bucket properties :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbSetBucketReq """ for prop in NORMAL_PROPS: if prop in props and props[prop] is not None: if isinstance(props[prop], string_types): setattr(msg.props, prop, str_to_bytes(props[prop])) else: setattr(msg.props, prop, props[prop]) for prop in COMMIT_HOOK_PROPS: if prop in props: setattr(msg.props, 'has_' + prop, True) self._encode_hooklist(props[prop], getattr(msg.props, prop)) for prop in MODFUN_PROPS: if prop in props and props[prop] is not None: self._encode_modfun(props[prop], getattr(msg.props, prop)) for prop in QUORUM_PROPS: if prop in props and props[prop] not in (None, 'default'): value = self._encode_quorum(props[prop]) if value is not None: if isinstance(value, string_types): setattr(msg.props, prop, str_to_bytes(value)) else: setattr(msg.props, prop, value) if 'repl' in props: msg.props.repl = REPL_TO_PB[props['repl']] return msg
def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): req = riak_pb.RpbDelReq() if rw: req.rw = self._encode_quorum(rw) if r: req.r = self._encode_quorum(r) if w: req.w = self._encode_quorum(w) if dw: req.dw = self._encode_quorum(dw) if self.quorum_controls(): if pr: req.pr = self._encode_quorum(pr) if pw: req.pw = self._encode_quorum(pw) if self.client_timeouts() and timeout: req.timeout = timeout use_vclocks = (self.tombstone_vclocks() and hasattr(robj, 'vclock') and robj.vclock) if use_vclocks: req.vclock = robj.vclock.encode('binary') bucket = robj.bucket req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) msg_code, resp = self._request(MSG_CODE_DEL_REQ, req, MSG_CODE_DEL_RESP) return self
def encode_search(self, index, query, **kwargs): req = riak.pb.riak_search_pb2.RpbSearchQueryReq( index=str_to_bytes(index), q=str_to_bytes(query)) self.encode_search_query(req, **kwargs) mc = riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ rc = riak.pb.messages.MSG_CODE_SEARCH_QUERY_RESP return Msg(mc, req.SerializeToString(), rc)
def _encode_bucket_props(self, props, msg): """ Encodes a dict of bucket properties into the protobuf message. :param props: bucket properties :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbSetBucketReq """ for prop in NORMAL_PROPS: if prop in props and props[prop] is not None: if isinstance(props[prop], string_types): setattr(msg.props, prop, str_to_bytes(props[prop])) else: setattr(msg.props, prop, props[prop]) for prop in COMMIT_HOOK_PROPS: if prop in props: setattr(msg.props, 'has_' + prop, True) self._encode_hooklist(props[prop], getattr(msg.props, prop)) for prop in MODFUN_PROPS: if prop in props and props[prop] is not None: self._encode_modfun(props[prop], getattr(msg.props, prop)) for prop in QUORUM_PROPS: if prop in props and props[prop] not in (None, 'default'): value = self._encode_quorum(props[prop]) if value is not None: if isinstance(value, string_types): setattr(msg.props, prop, str_to_bytes(value)) else: setattr(msg.props, prop, value) if 'repl' in props: msg.props.repl = REPL_TO_PY[props['repl']] return msg
async def put(self, robj, return_body=True): bucket = robj.bucket req = riak_pb.RpbPutReq() if return_body: req.return_body = 1 req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) if robj.key: req.key = str_to_bytes(robj.key) if robj.vclock: req.vclock = robj.vclock.encode('binary') self._encode_content(robj, req.content) msg_code, resp = await self._request(messages.MSG_CODE_PUT_REQ, req, messages.MSG_CODE_PUT_RESP) if resp is not None: if resp.HasField('key'): robj.key = bytes_to_str(resp.key) if resp.HasField('vclock'): robj.vclock = VClock(resp.vclock, 'binary') if resp.content: self._decode_contents(resp.content, robj) elif not robj.key: raise RiakError("missing response object") return robj
def get_counter(self, bucket, key, **params): if not bucket.bucket_type.is_default(): raise NotImplementedError("Counters are not " "supported with bucket-types, " "use datatypes instead.") if not self.counters(): raise NotImplementedError("Counters are not supported") req = riak.pb.riak_kv_pb2.RpbCounterGetReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) if params.get("r") is not None: req.r = self._encode_quorum(params["r"]) if params.get("pr") is not None: req.pr = self._encode_quorum(params["pr"]) if params.get("basic_quorum") is not None: req.basic_quorum = params["basic_quorum"] if params.get("notfound_ok") is not None: req.notfound_ok = params["notfound_ok"] msg_code, resp = self._request( riak.pb.messages.MSG_CODE_COUNTER_GET_REQ, req, riak.pb.messages.MSG_CODE_COUNTER_GET_RESP ) if resp.HasField("value"): return resp.value else: return None
def encode_get_preflist(self, bucket, key): req = riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.type = str_to_bytes(bucket.bucket_type.name) mc = riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ rc = riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_fetch_datatype(self, bucket, key, **kwargs): req = riak.pb.riak_dt_pb2.DtFetchReq() req.type = str_to_bytes(bucket.bucket_type.name) req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) self.encode_dt_options(req, **kwargs) mc = riak.pb.messages.MSG_CODE_DT_FETCH_REQ rc = riak.pb.messages.MSG_CODE_DT_FETCH_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_create_search_schema(self, schema, content): scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( name=str_to_bytes(schema), content=str_to_bytes(content)) req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( schema=scma) mc = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ rc = riak.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
def create_search_schema(self, schema, content): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") scma = riak_pb.RpbYokozunaSchema(name=str_to_bytes(schema), content=str_to_bytes(content)) req = riak_pb.RpbYokozunaSchemaPutReq(schema=scma) self._request(MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, req, MSG_CODE_PUT_RESP) return True
def test_encode_data_for_get(self): keylist = [ str_to_bytes('hash1'), str_to_bytes('user2'), unix_time_millis(ts0) ] req = tsgetreq_a, str_to_bytes(table_name), keylist, udef_a req_test = encode(req) test_key = ['hash1', 'user2', ts0] c = TtbCodec() msg = c.encode_timeseries_keyreq(self.table, test_key) self.assertEqual(req_test, msg.data)
def stream_mapred(self, inputs, query, timeout=None): # Construct the job, optionally set the timeout... content = self._construct_mapred_json(inputs, query, timeout) req = riak_pb.RpbMapRedReq() req.request = str_to_bytes(content) req.content_type = str_to_bytes("application/json") self._send_msg(MSG_CODE_MAP_RED_REQ, req) return RiakPbcMapredStream(self)
def create_search_index(self, index, schema=None, n_val=None): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") index = str_to_bytes(index) idx = riak_pb.RpbYokozunaIndex(name=index) if schema: idx.schema = str_to_bytes(schema) if n_val: idx.n_val = n_val req = riak_pb.RpbYokozunaIndexPutReq(index=idx) self._request(MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, req, MSG_CODE_PUT_RESP) return True
def encode_create_search_index(self, index, schema=None, n_val=None, timeout=None): index = str_to_bytes(index) idx = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) if schema: idx.schema = str_to_bytes(schema) if n_val: idx.n_val = n_val req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) if timeout is not None: req.timeout = timeout mc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ rc = riak.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
async def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): bucket = robj.bucket req = riak_kv_pb2.RpbPutReq() if w: req.w = self._encode_quorum(w) if dw: req.dw = self._encode_quorum(dw) if pw: req.pw = self._encode_quorum(pw) if return_body: req.return_body = 1 if if_none_match: req.if_none_match = 1 if timeout: req.timeout = timeout req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) if robj.key: req.key = str_to_bytes(robj.key) if robj.vclock: req.vclock = robj.vclock.encode('binary') self._encode_content(robj, req.content) msg_code, resp = await self._request(messages.MSG_CODE_PUT_REQ, req, messages.MSG_CODE_PUT_RESP) if resp is not None: if resp.HasField('key'): robj.key = bytes_to_str(resp.key) if resp.HasField('vclock'): robj.vclock = VClock(resp.vclock, 'binary') if resp.content: self._decode_contents(resp.content, robj) elif not robj.key: raise RiakError("missing response object") return robj
async def delete(self, robj): req = riak_kv_pb2.RpbDelReq() use_vclocks = (hasattr(robj, 'vclock') and robj.vclock) if use_vclocks: req.vclock = robj.vclock.encode('binary') bucket = robj.bucket req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) msg_code, resp = await self._request(messages.MSG_CODE_DEL_REQ, req, messages.MSG_CODE_DEL_RESP) return self
def encode_get_counter(self, bucket, key, **kwargs): req = riak.pb.riak_kv_pb2.RpbCounterGetReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) if kwargs.get('r') is not None: req.r = self.encode_quorum(kwargs['r']) if kwargs.get('pr') is not None: req.pr = self.encode_quorum(kwargs['pr']) if kwargs.get('basic_quorum') is not None: req.basic_quorum = kwargs['basic_quorum'] if kwargs.get('notfound_ok') is not None: req.notfound_ok = kwargs['notfound_ok'] mc = riak.pb.messages.MSG_CODE_COUNTER_GET_REQ rc = riak.pb.messages.MSG_CODE_COUNTER_GET_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_set_bucket_type_props(self, bucket_type, props): req = riak.pb.riak_pb2.RpbSetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) self.encode_bucket_props(props, req) mc = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ rc = riak.pb.messages.MSG_CODE_SET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc)
def _auth(self): """ Perform an authorization request against Riak returns True upon success, False otherwise Note: Riak will sleep for a short period of time upon a failed auth request/response to prevent denial of service attacks """ req = riak_pb.RpbAuthReq() req.user = str_to_bytes(self._client._credentials.username) req.password = str_to_bytes(self._client._credentials.password) msg_code, _ = self._non_connect_request(MSG_CODE_AUTH_REQ, req, MSG_CODE_AUTH_RESP) if msg_code == MSG_CODE_AUTH_RESP: return True else: return False
async def delete(self, robj): req = riak_pb.RpbDelReq() use_vclocks = (hasattr(robj, 'vclock') and robj.vclock) if use_vclocks: req.vclock = robj.vclock.encode('binary') bucket = robj.bucket req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) msg_code, resp = await self._request( messages.MSG_CODE_DEL_REQ, req, messages.MSG_CODE_DEL_RESP) return self
def encode_update_counter(self, bucket, key, value, **kwargs): req = riak.pb.riak_kv_pb2.RpbCounterUpdateReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.amount = value if kwargs.get('w') is not None: req.w = self.encode_quorum(kwargs['w']) if kwargs.get('dw') is not None: req.dw = self.encode_quorum(kwargs['dw']) if kwargs.get('pw') is not None: req.pw = self.encode_quorum(kwargs['pw']) if kwargs.get('returnvalue') is not None: req.returnvalue = kwargs['returnvalue'] mc = riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ rc = riak.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP return Msg(mc, req.SerializeToString(), rc)
def encode_clear_bucket_props(self, bucket): req = riak.pb.riak_pb2.RpbResetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) mc = riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ rc = riak.pb.messages.MSG_CODE_RESET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc)
def _encode_modfun(self, props, msg=None): """ Encodes a dict with 'mod' and 'fun' keys into a protobuf modfun pair. Used in bucket properties. :param props: the module/function pair :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbModFun :rtype riak.pb.riak_pb2.RpbModFun """ if msg is None: msg = riak.pb.riak_pb2.RpbModFun() msg.module = str_to_bytes(props['mod']) msg.function = str_to_bytes(props['fun']) return msg
def _encode_timeseries_put(self, tsobj, req): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :param req: the protobuf message to fill :type req: riak.pb.riak_ts_pb2.TsPutReq """ req.table = str_to_bytes(tsobj.table.name) if tsobj.columns: raise NotImplementedError("columns are not implemented yet") if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: tsr = req.rows.add() # NB: type TsRow if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: tsc = tsr.cells.add() # NB: type TsCell self._encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows")
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None): """ Serialize get request and deserialize response """ bucket = robj.bucket req = riak_pb.RpbGetReq() if r: req.r = self._encode_quorum(r) if self.quorum_controls(): if pr: req.pr = self._encode_quorum(pr) if basic_quorum is not None: req.basic_quorum = basic_quorum if notfound_ok is not None: req.notfound_ok = notfound_ok if self.client_timeouts() and timeout: req.timeout = timeout if self.tombstone_vclocks(): req.deletedvclock = True req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) msg_code, resp = self._request(MSG_CODE_GET_REQ, req, MSG_CODE_GET_RESP) if resp is not None: if resp.HasField('vclock'): robj.vclock = VClock(resp.vclock, 'binary') # We should do this even if there are no contents, i.e. # the object is tombstoned self._decode_contents(resp.content, robj) else: # "not found" returns an empty message, # so let's make sure to clear the siblings robj.siblings = [] return robj
def _encode_map_op(self, msg, ops): for op in ops: name, dtype = op[1] ftype = MAP_FIELD_TYPES[dtype] if op[0] == 'add': add = msg.adds.add() add.name = str_to_bytes(name) add.type = ftype elif op[0] == 'remove': remove = msg.removes.add() remove.name = str_to_bytes(name) remove.type = ftype elif op[0] == 'update': update = msg.updates.add() update.field.name = str_to_bytes(name) update.field.type = ftype self._encode_map_update(dtype, update, op[2])
def _encode_map_op(self, msg, ops): for op in ops: name, dtype = op[1] ftype = codec.MAP_FIELD_TYPES[dtype] if op[0] == 'add': add = msg.adds.add() add.name = str_to_bytes(name) add.type = ftype elif op[0] == 'remove': remove = msg.removes.add() remove.name = str_to_bytes(name) remove.type = ftype elif op[0] == 'update': update = msg.updates.add() update.field.name = str_to_bytes(name) update.field.type = ftype self._encode_map_update(dtype, update, op[2])