def next(self): message = super(HttpIndexStream, self).next() payload = json.loads(message.get_payload()) if u'error' in payload: raise RiakError(payload[u'error']) elif u'keys' in payload: return payload[u'keys'] elif u'results' in payload: structs = payload[u'results'] # Format is {"results":[{"2ikey":"primarykey"}, ...]} return [self._decode_pair(list(d.items())[0]) for d in structs] elif u'continuation' in payload: return CONTINUATION(payload[u'continuation'])
def get_bucket_props(self, bucket): """ Get properties for a bucket """ bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.bucket_properties_path(bucket.name, bucket_type=bucket_type) status, headers, body = self._request('GET', url) if status == 200: props = json.loads(body) return props['props'] else: raise RiakError('Error getting bucket properties.')
def get_bucket_type_props(self, bucket_type): """ Get properties for a bucket-type """ self._check_bucket_types(bucket_type) url = self.bucket_type_properties_path(bucket_type.name) status, headers, body = self._request('GET', url) if status == 200: props = json.loads(bytes_to_str(body)) return props['props'] else: raise RiakError('Error getting bucket-type properties.')
def get_buckets(self, bucket_type=None, timeout=None): """ Fetch a list of all buckets """ bucket_type = self._get_bucket_type(bucket_type) url = self.bucket_list_path(bucket_type=bucket_type, timeout=timeout) status, headers, body = self._request('GET', url) if status == 200: props = json.loads(bytes_to_str(body)) return props['buckets'] else: raise RiakError('Error getting buckets.')
def recv_msg(self, conn, expect): self.recv_pkt(conn) msg_code, = struct.unpack("B", self._inbuf[:1]) if msg_code == MSG_CODE_ERROR_RESP: msg = riak_pb.RpbErrorResp() msg.ParseFromString(self._inbuf[1:]) raise Exception(msg.errmsg) elif msg_code == MSG_CODE_PING_RESP: msg = None elif msg_code == MSG_CODE_GET_SERVER_INFO_RESP: msg = riak_pb.RpbGetServerInfoResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_GET_CLIENT_ID_RESP: msg = riak_pb.RpbGetClientIdResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_SET_CLIENT_ID_RESP: msg = None elif msg_code == MSG_CODE_GET_RESP: msg = riak_pb.RpbGetResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_PUT_RESP: msg = riak_pb.RpbPutResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_DEL_RESP: msg = None elif msg_code == MSG_CODE_LIST_KEYS_RESP: msg = riak_pb.RpbListKeysResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_LIST_BUCKETS_RESP: msg = riak_pb.RpbListBucketsResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_GET_BUCKET_RESP: msg = riak_pb.RpbGetBucketResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_SET_BUCKET_RESP: msg = None elif msg_code == MSG_CODE_MAPRED_RESP: msg = riak_pb.RpbMapRedResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_INDEX_RESP: msg = riak_pb.RpbIndexResp() msg.ParseFromString(self._inbuf[1:]) elif msg_code == MSG_CODE_SEARCH_QUERY_RESP: msg = riak_pb.RpbSearchQueryResp() msg.ParseFromString(self._inbuf[1:]) else: raise Exception("unknown msg code %s" % msg_code) if expect and msg_code != expect: raise RiakError("unexpected protocol buffer message code: %d" % msg_code) return msg_code, msg
def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): """ Serialize get request and deserialize response """ bucket = robj.bucket req = riak_pb.RpbPutReq() if w: req.w = self._encode_quorum(w) if dw: req.dw = self._encode_quorum(dw) if self.quorum_controls() and pw: req.pw = self._encode_quorum(pw) if return_body: req.return_body = 1 if if_none_match: req.if_none_match = 1 if self.client_timeouts() and timeout: req.timeout = timeout req.bucket = bucket.name if robj.key: req.key = robj.key if robj.vclock: req.vclock = robj.vclock.encode('binary') self._encode_content(robj, req.content) msg_code, resp = self._request(MSG_CODE_PUT_REQ, req, MSG_CODE_PUT_RESP) if resp is not None: if resp.HasField('key'): robj.key = resp.key if resp.HasField("vclock"): robj.vclock = VClock(resp.vclock, 'binary') if resp.content: self._decode_contents(resp.content, robj) elif not robj.key: raise RiakError("missing response object") return robj
def stream_mapred(self, inputs, query, timeout=None): content = self._construct_mapred_json(inputs, query, timeout) url = self.mapred_path(chunked=True) reqheaders = {'Content-Type': 'application/json'} status, headers, response = self._request('POST', url, reqheaders, content, stream=True) if status == 200: return RiakHttpMapReduceStream(response) else: raise RiakError( 'Error running MapReduce operation. Headers: %s Body: %s' % (repr(headers), repr(response.read())))
def get_keys(self, bucket, timeout=None): """ Fetch a list of keys for the bucket """ bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.key_list_path(bucket.name, bucket_type=bucket_type, timeout=timeout) status, _, body = self._request('GET', url) if status == 200: props = json.loads(bytes_to_str(body)) return props['keys'] else: raise RiakError('Error listing keys.')
def _recv(self, msglen): # TODO FUTURE re-use buffer # http://stackoverflow.com/a/15964489 msgbuf = bytearray(msglen) view = memoryview(msgbuf) nread = 0 toread = msglen while toread: nbytes = self._socket.recv_into(view, toread) # https://docs.python.org/2/howto/sockets.html#using-a-socket # https://github.com/basho/riak-python-client/issues/399 if nbytes == 0: msg = 'socket recv returned zero bytes unexpectedly, ' \ 'expected {}'.format(toread) ex = RiakError(msg) raise ConnectionClosed(ex) view = view[nbytes:] # slicing views is cheap toread -= nbytes nread += nbytes if nread != msglen: raise RiakError("Socket returned short packet %d - expected %d" % (nread, msglen)) return msgbuf
def get_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( name=str_to_bytes(index)) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) if len(resp.index) > 0: return self._decode_search_index(resp.index[0]) else: raise RiakError('notfound')
def search_schema_path(self, index, **options): """ Builds a Yokozuna search Solr schema URL. :param index: a name of a yz solr schema :type index: string :param options: optional list of additional arguments :type index: dict :rtype URL string """ if not self.yz_wm_schema: raise RiakError("Yokozuna search is unsupported by this Riak node") return mkpath(self.yz_wm_schema, "schema", quote_plus(index), **options)
def _recv_pkt(self): nmsglen = self._socket.recv(4) while len(nmsglen) < 4: x = self._socket.recv(4 - len(nmsglen)) if not x: break nmsglen += x if len(nmsglen) != 4: raise RiakError( "Socket returned short packet length %d - expected 4" % len(nmsglen)) msglen, = struct.unpack('!i', nmsglen) self._inbuf_len = msglen self._inbuf = '' while len(self._inbuf) < msglen: want_len = min(8192, msglen - len(self._inbuf)) recv_buf = self._socket.recv(want_len) if not recv_buf: break self._inbuf += recv_buf if len(self._inbuf) != self._inbuf_len: raise RiakError("Socket returned short packet %d - expected %d" % (len(self._inbuf), self._inbuf_len))
def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): bucket = robj.bucket req = riak.pb.riak_kv_pb2.RpbPutReq() if w: req.w = self._encode_quorum(w) if dw: req.dw = self._encode_quorum(dw) if self.quorum_controls() and pw: req.pw = self._encode_quorum(pw) if return_body: req.return_body = 1 if if_none_match: req.if_none_match = 1 if self.client_timeouts() and timeout: req.timeout = timeout req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) if robj.key: req.key = str_to_bytes(robj.key) if robj.vclock: req.vclock = robj.vclock.encode('binary') self._encode_content(robj, req.content) msg_code, resp = self._request(riak.pb.messages.MSG_CODE_PUT_REQ, req, riak.pb.messages.MSG_CODE_PUT_RESP) if resp is not None: if resp.HasField('key'): robj.key = bytes_to_str(resp.key) if resp.HasField("vclock"): robj.vclock = VClock(resp.vclock, 'binary') if resp.content: self._decode_contents(resp.content, robj) elif not robj.key: raise RiakError("missing response object") return robj
def search_index_path(self, index=None, **options): """ Builds a Yokozuna search index URL. :param index: optional name of a yz index :type index: string :param options: optional list of additional arguments :type index: dict :rtype URL string """ if not self.yz_wm_index: raise RiakError("Yokozuna search is unsupported by this Riak node") if index: quote_plus(index) return mkpath(self.yz_wm_index, "index", index, **options)
def decode_timeseries_col_type(self, col_type): # NB: these match the atom names for column types if col_type == TsColumnType.Value('VARCHAR'): return 'varchar' elif col_type == TsColumnType.Value('SINT64'): return 'sint64' elif col_type == TsColumnType.Value('DOUBLE'): return 'double' elif col_type == TsColumnType.Value('TIMESTAMP'): return 'timestamp' elif col_type == TsColumnType.Value('BOOLEAN'): return 'boolean' else: msg = 'could not decode column type: {}'.format(col_type) raise RiakError(msg)
def set_client_id(self, client_id): """ Set the client id used by this connection """ req = riakclient_pb2.RpbSetClientIdReq() req.client_id = client_id self.maybe_connect() self.send_msg(MSG_CODE_SET_CLIENT_ID_REQ, req) msg_code, resp = self.recv_msg() if msg_code == MSG_CODE_SET_CLIENT_ID_RESP: return True else: raise RiakError("unexpected protocol buffer message code: ", msg_code)
def index_path(self, bucket, index, start, finish=None, bucket_type=None, **options): if not self.riak_kv_wm_buckets: raise RiakError("Indexes are unsupported by this Riak node") if finish is not None: finish = quote_plus(str(finish)) if self.riak_kv_wm_bucket_type and bucket_type: return mkpath("/types", quote_plus(bucket_type), "buckets", quote_plus(bucket), "index", quote_plus(index), quote_plus(str(start)), finish, **options) else: return mkpath("/buckets", quote_plus(bucket), "index", quote_plus(index), quote_plus(str(start)), finish, **options)
def set_bucket_type_props(self, bucket_type, props): """ Set the properties on the bucket-type """ self._check_bucket_types(bucket_type) url = self.bucket_type_properties_path(bucket_type.name) headers = {'Content-Type': 'application/json'} content = json.dumps({'props': props}) # Run the request... status, _, _ = self._request('PUT', url, headers, content) if status != 204: raise RiakError('Error setting bucket-type properties.') return True
def next(self): while '}' not in self.buffer and not self.response_done: self._read() if '}' in self.buffer: idx = string.index(self.buffer, '}') + 1 chunk = self.buffer[:idx] self.buffer = self.buffer[idx:] jsdict = json.loads(chunk) if 'error' in jsdict: self.close() raise RiakError(jsdict['error']) field = jsdict[self._json_field] return field else: raise StopIteration
def stream_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): """ Streams a secondary index query. """ if not self.stream_indexes(): raise NotImplementedError("Secondary index streaming is not " "supported on %s" % self.server_version.vstring) if term_regex and not self.index_term_regex(): raise NotImplementedError("Secondary index term_regex is not " "supported on %s" % self.server_version.vstring) if timeout == 'infinity': timeout = 0 params = { 'return_terms': return_terms, 'stream': True, 'max_results': max_results, 'continuation': continuation, 'timeout': timeout, 'term_regex': term_regex } bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.index_path(bucket.name, index, startkey, endkey, bucket_type=bucket_type, **params) status, headers, response = self._request('GET', url, stream=True) if status == 200: return RiakHttpIndexStream(response, index, return_terms) else: raise RiakError('Error streaming secondary index.')
def stream_buckets(self, bucket_type=None, timeout=None): """ Stream list of buckets through an iterator """ if not self.bucket_stream(): raise NotImplementedError('Streaming list-buckets is not ' "supported on %s" % self.server_version.vstring) bucket_type = self._get_bucket_type(bucket_type) url = self.bucket_list_path(bucket_type=bucket_type, buckets="stream", timeout=timeout) status, headers, response = self._request('GET', url, stream=True) if status == 200: return RiakHttpBucketStream(response) else: raise RiakError('Error listing buckets.')
def set_bucket_props(self, bucket, props): """ Set the properties on the bucket object given """ bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.bucket_properties_path(bucket.name, bucket_type=bucket_type) headers = {'Content-Type': 'application/json'} content = json.dumps({'props': props}) # Run the request... status, _, body = self._request('PUT', url, headers, content) if status == 401: raise SecurityError('Not authorized to set bucket properties.') elif status != 204: raise RiakError('Error setting bucket properties.') return True
def _populate(self, result): """ Populate the object based on the return from get. If None returned, then object is not found If a tuple of vclock, contents then one or more whole revisions of the key were found If a list of vtags is returned there are multiple sibling that need to be retrieved with get. """ if result is None or result is self: return self elif type(result) is RiakObject: self.clear() self.__dict__ = result.__dict__.copy() else: raise RiakError("do not know how to handle type %s" % type(result))
def get_encoded_data(self): """ Get the data encoded for storing """ if self._encode_data == True: content_type = self.get_content_type() encoder = self._bucket.get_encoder(content_type) if encoder is None: if isinstance(self._data, basestring): return self._data.encode() else: raise RiakError( "No encoder for non-string data " "with content type ${0}".format(content_type)) else: return encoder(self._data) else: return self._data
def clear_bucket_props(self, bucket): """ reset the properties on the bucket object given """ bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.bucket_properties_path(bucket.name, bucket_type=bucket_type) url = self.bucket_properties_path(bucket.name) headers = {'Content-Type': 'application/json'} # Run the request... status, _, _ = self._request('DELETE', url, headers, None) if status == 204: return True elif status == 405: return False else: raise RiakError('Error %s clearing bucket properties.' % status)
def add_index(self, field, value): """ Tag this object with the specified field/value pair for indexing. :param field: The index field. :type field: string :param value: The index value. :type value: string or integer :rtype: RiakObject """ if field[-4:] not in ("_bin", "_int"): raise RiakError("Riak 2i fields must end with either '_bin'" " or '_int'.") self.indexes.add((field, value)) return self
def run(self, timeout=None): """ Run the map/reduce operation synchronously. Returns a list of results, or a list of links if the last phase is a link phase. Shortcut for :meth:`riak.client.RiakClient.mapred`. :param timeout: Timeout in milliseconds :type timeout: integer, None :rtype: list """ query, link_results_flag = self._normalize_query() try: result = self._client.mapred(self._inputs, query, timeout) except RiakError as e: if 'worker_startup_failed' in e.value: for phase in self._phases: if phase._language == 'erlang': if type(phase._function) is str: raise RiakError('May have tried erlang strfun ' 'when not allowed\n' 'original error: ' + e.value) raise e # If the last phase is NOT a link phase, then return the result. if not (link_results_flag or isinstance(self._phases[-1], RiakLinkPhase)): return result # If there are no results, then return an empty list. if result is None: return [] # Otherwise, if the last phase IS a link phase, then convert the # results to link tuples. a = [] for r in result: if (len(r) == 2): link = (r[0], r[1], None) elif (len(r) == 3): link = (r[0], r[1], r[2]) a.append(link) return a
def _encode_content(self, robj, rpb_content): """ Fills an RpbContent message with the appropriate data and metadata from a RiakObject. :param robj: a RiakObject :type robj: RiakObject :param rpb_content: the protobuf message to fill :type rpb_content: riak.pb.riak_pb2.RpbContent """ if robj.content_type: rpb_content.content_type = str_to_bytes(robj.content_type) if robj.charset: rpb_content.charset = str_to_bytes(robj.charset) if robj.content_encoding: rpb_content.content_encoding = str_to_bytes(robj.content_encoding) for uk in robj.usermeta: pair = rpb_content.usermeta.add() pair.key = str_to_bytes(uk) pair.value = str_to_bytes(robj.usermeta[uk]) for link in robj.links: pb_link = rpb_content.links.add() try: bucket, key, tag = link except ValueError: raise RiakError("Invalid link tuple %s" % link) pb_link.bucket = str_to_bytes(bucket) pb_link.key = str_to_bytes(key) if tag: pb_link.tag = str_to_bytes(tag) else: pb_link.tag = str_to_bytes('') for field, value in robj.indexes: pair = rpb_content.indexes.add() pair.key = str_to_bytes(field) pair.value = str_to_bytes(str(value)) # Python 2.x data is stored in a string if PY2: rpb_content.value = str(robj.encoded_data) else: rpb_content.value = robj.encoded_data
def set_bucket_props(self, bucket, props): """ Serialize set bucket property request and deserialize response """ req = riakclient_pb2.RpbSetBucketReq() req.bucket = bucket.get_name() if 'n_val' in props: req.props.n_val = props['n_val'] if 'allow_mult' in props: req.props.allow_mult = props['allow_mult'] self.maybe_connect() self.send_msg(MSG_CODE_SET_BUCKET_REQ, req) msg_code, resp = self.recv_msg() if msg_code != MSG_CODE_SET_BUCKET_RESP: raise RiakError("unexpected protocol buffer message code: ", msg_code) return self
def _encode_to_ts_cell(self, cell, ts_cell): if cell is not None: if isinstance(cell, datetime.datetime): ts_cell.timestamp_value = self._unix_time_millis(cell) elif isinstance(cell, bool): ts_cell.boolean_value = cell elif isinstance(cell, string_types): logging.debug("cell -> str: '%s'", cell) ts_cell.varchar_value = str_to_bytes(cell) elif (isinstance(cell, int) or (PY2 and isinstance(cell, long))): # noqa logging.debug("cell -> int/long: '%s'", cell) ts_cell.sint64_value = cell elif isinstance(cell, float): ts_cell.double_value = cell else: t = type(cell) raise RiakError("can't serialize type '{}', value '{}'".format( t, cell))
def __init__(self, value): socket.error.__init__(self, errno.EIO, value) RiakError.__init__(self, value)