def maybe_riak_error(self, msg_code, data=None): if msg_code == kvhosting.pb.messages.MSG_CODE_ERROR_RESP: if data is None: raise RiakError('no error provided!') else: err = parse_pbuf_msg(msg_code, data) raise RiakError(bytes_to_str(err.errmsg))
def remove_index(self, field=None, value=None): """ remove_index(field=None, value=None) Remove the specified field/value pair as an index on this object. :param field: The index field. :type field: string :param value: The index value. :type value: string or integer :rtype: :class:`RiakObject <riak.riak_object.RiakObject>` """ if not field and not value: self.indexes.clear() elif field and not value: for index in [x for x in self.indexes if x[0] == field]: self.indexes.remove(index) elif field and value: self.indexes.remove((field, value)) else: raise RiakError("Cannot pass value without a field" " name while removing index") return self._robject
def stream_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): """ Streams a secondary index query. """ if not self.stream_indexes(): raise NotImplementedError("Secondary index streaming is not " "supported on %s" % self.server_version.vstring) if term_regex and not self.index_term_regex(): raise NotImplementedError("Secondary index term_regex is not " "supported on %s" % self.server_version.vstring) if timeout == 'infinity': timeout = 0 params = {'return_terms': return_terms, 'stream': True, 'max_results': max_results, 'continuation': continuation, 'timeout': timeout, 'term_regex': term_regex} bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.index_path(bucket.name, index, startkey, endkey, bucket_type=bucket_type, **params) status, headers, response = self._request('GET', url, stream=True) if status == 200: return HttpIndexStream(response, index, return_terms) else: raise RiakError('Error streaming secondary index.')
def encode_timeseries_put(self, tsobj): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :param req: the protobuf message to fill :type req: kvhosting.pb.riak_ts_pb2.TsPutReq """ req = kvhosting.pb.riak_ts_pb2.TsPutReq() req.table = str_to_bytes(tsobj.table.name) if tsobj.columns: raise NotImplementedError("columns are not implemented yet") if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: tsr = req.rows.add() # NB: type TsRow if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: tsc = tsr.cells.add() # NB: type TsCell self.encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") mc = kvhosting.pb.messages.MSG_CODE_TS_PUT_REQ rc = kvhosting.pb.messages.MSG_CODE_TS_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
def datatypes_path(self, bucket_type, bucket, key=None, **options): if not self.bucket_types(): raise RiakError("Datatypes are unsupported by this Riak node") if key: key = quote_plus(key) return mkpath("/types", quote_plus(bucket_type), "buckets", quote_plus(bucket), "datatypes", key, **options)
def solr_select_path(self, index, query, **options): if not self.riak_solr_searcher_wm and not self.yz_wm_search: raise RiakError("Search is unsupported by this Riak node") qs = {'q': query, 'wt': 'json', 'fl': '*,score'} qs.update(options) if index: index = quote_plus(index) return mkpath("/solr", index, "select", **qs)
def ts_delete(self, table, key): msg_code = kvhosting.pb.messages.MSG_CODE_TS_DEL_REQ codec = self._get_codec(msg_code) msg = codec.encode_timeseries_keyreq(table, key, is_delete=True) resp_code, resp = self._request(msg, codec) if resp is not None: return True else: raise RiakError("missing response object")
def stream_keys(self, bucket, timeout=None): bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.key_list_path(bucket.name, bucket_type=bucket_type, keys='stream', timeout=timeout) status, headers, response = self._request('GET', url, stream=True) if status == 200: return HttpKeyStream(response) else: raise RiakError('Error listing keys.')
def decode_put(self, robj, resp): if resp is not None: if resp.HasField('key'): robj.key = bytes_to_str(resp.key) if resp.HasField("vclock"): robj.vclock = VClock(resp.vclock, 'binary') if resp.content: self.decode_contents(resp.content, robj) elif not robj.key: raise RiakError("missing response object") return robj
def _recv(self, msglen): # TODO FUTURE re-use buffer # http://stackoverflow.com/a/15964489 msgbuf = bytearray(msglen) view = memoryview(msgbuf) nread = 0 toread = msglen while toread: nbytes = self._socket.recv_into(view, toread) # https://docs.python.org/2/howto/sockets.html#using-a-socket # https://github.com/basho/riak-python-client/issues/399 if nbytes == 0: ex = RiakError('recv_into returned zero bytes unexpectedly') raise BadResource(ex) view = view[nbytes:] # slicing views is cheap toread -= nbytes nread += nbytes if nread != msglen: raise RiakError("Socket returned short packet %d - expected %d" % (nread, msglen)) return msgbuf
def get_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") msg_code = kvhosting.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ codec = self._get_codec(msg_code) msg = codec.encode_get_search_index(index) resp_code, resp = self._request(msg, codec) if len(resp.index) > 0: return codec.decode_search_index(resp.index[0]) else: raise RiakError('notfound')
def _to_link_header(self, link): """ Convert the link tuple to a link header string. Used internally. """ try: bucket, key, tag = link except ValueError: raise RiakError("Invalid link tuple %s" % link) tag = tag if tag is not None else bucket url = self.object_path(bucket, key) header = '<%s>; riaktag="%s"' % (url, tag) return header
def get_bucket_type_props(self, bucket_type): """ Get properties for a bucket-type """ self._check_bucket_types(bucket_type) url = self.bucket_type_properties_path(bucket_type.name) status, headers, body = self._request('GET', url) if status == 200: props = json.loads(bytes_to_str(body)) return props['props'] else: raise RiakError('Error getting bucket-type properties.')
def next(self): message = super(HttpIndexStream, self).next() payload = json.loads(message.get_payload()) if u'error' in payload: raise RiakError(payload[u'error']) elif u'keys' in payload: return payload[u'keys'] elif u'results' in payload: structs = payload[u'results'] # Format is {"results":[{"2ikey":"primarykey"}, ...]} return [self._decode_pair(list(d.items())[0]) for d in structs] elif u'continuation' in payload: return CONTINUATION(payload[u'continuation'])
def search_schema_path(self, index, **options): """ Builds a Yokozuna search Solr schema URL. :param index: a name of a yz solr schema :type index: string :param options: optional list of additional arguments :type index: dict :rtype URL string """ if not self.yz_wm_schema: raise RiakError("Yokozuna search is unsupported by this Riak node") return mkpath(self.yz_wm_schema, "schema", quote_plus(index), **options)
def get_buckets(self, bucket_type=None, timeout=None): """ Fetch a list of all buckets """ bucket_type = self._get_bucket_type(bucket_type) url = self.bucket_list_path(bucket_type=bucket_type, timeout=timeout) status, headers, body = self._request('GET', url) if status == 200: props = json.loads(bytes_to_str(body)) return props['buckets'] else: raise RiakError('Error getting buckets.')
def stream_mapred(self, inputs, query, timeout=None): content = self._construct_mapred_json(inputs, query, timeout) url = self.mapred_path(chunked=True) reqheaders = {'Content-Type': 'application/json'} status, headers, response = self._request('POST', url, reqheaders, content, stream=True) if status == 200: return HttpMapReduceStream(response) else: raise RiakError( 'Error running MapReduce operation. Headers: %s Body: %s' % (repr(headers), repr(response.read())))
def get_keys(self, bucket, timeout=None): """ Fetch a list of keys for the bucket """ bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.key_list_path(bucket.name, bucket_type=bucket_type, timeout=timeout) status, _, body = self._request('GET', url) if status == 200: props = json.loads(bytes_to_str(body)) return props['keys'] else: raise RiakError('Error listing keys.')
def decode_timeseries_col_type(self, col_type): # NB: these match the atom names for column types if col_type == TsColumnType.Value('VARCHAR'): return 'varchar' elif col_type == TsColumnType.Value('SINT64'): return 'sint64' elif col_type == TsColumnType.Value('DOUBLE'): return 'double' elif col_type == TsColumnType.Value('TIMESTAMP'): return 'timestamp' elif col_type == TsColumnType.Value('BOOLEAN'): return 'boolean' else: msg = 'could not decode column type: {}'.format(col_type) raise RiakError(msg)
def search_index_path(self, index=None, **options): """ Builds a Yokozuna search index URL. :param index: optional name of a yz index :type index: string :param options: optional list of additional arguments :type index: dict :rtype URL string """ if not self.yz_wm_index: raise RiakError("Yokozuna search is unsupported by this Riak node") if index: quote_plus(index) return mkpath(self.yz_wm_index, "index", index, **options)
def set_bucket_type_props(self, bucket_type, props): """ Set the properties on the bucket-type """ self._check_bucket_types(bucket_type) url = self.bucket_type_properties_path(bucket_type.name) headers = {'Content-Type': 'application/json'} content = json.dumps({'props': props}) # Run the request... status, _, _ = self._request('PUT', url, headers, content) if status != 204: raise RiakError('Error setting bucket-type properties.') return True
def next(self): # Python 2.x Version while '}' not in self.buffer and not self.response_done: self._read() if '}' in self.buffer: idx = self.buffer.index('}') + 1 chunk = self.buffer[:idx] self.buffer = self.buffer[idx:] jsdict = json.loads(chunk) if 'error' in jsdict: self.close() raise RiakError(jsdict['error']) field = jsdict[self._json_field] return field else: raise StopIteration
def stream_buckets(self, bucket_type=None, timeout=None): """ Stream list of buckets through an iterator """ if not self.bucket_stream(): raise NotImplementedError('Streaming list-buckets is not ' "supported on %s" % self.server_version.vstring) bucket_type = self._get_bucket_type(bucket_type) url = self.bucket_list_path(bucket_type=bucket_type, buckets="stream", timeout=timeout) status, headers, response = self._request('GET', url, stream=True) if status == 200: return HttpBucketStream(response) else: raise RiakError('Error listing buckets.')
def set_bucket_props(self, bucket, props): """ Set the properties on the bucket object given """ bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.bucket_properties_path(bucket.name, bucket_type=bucket_type) headers = {'Content-Type': 'application/json'} content = json.dumps({'props': props}) # Run the request... status, _, body = self._request('PUT', url, headers, content) if status == 401: raise SecurityError('Not authorized to set bucket properties.') elif status != 204: raise RiakError('Error setting bucket properties.') return True
def encode_content(self, robj, rpb_content): """ Fills an RpbContent message with the appropriate data and metadata from a RiakObject. :param robj: a RiakObject :type robj: RiakObject :param rpb_content: the protobuf message to fill :type rpb_content: kvhosting.pb.riak_pb2.RpbContent """ if robj.content_type: rpb_content.content_type = str_to_bytes(robj.content_type) if robj.charset: rpb_content.charset = str_to_bytes(robj.charset) if robj.content_encoding: rpb_content.content_encoding = str_to_bytes(robj.content_encoding) for uk in robj.usermeta: pair = rpb_content.usermeta.add() pair.key = str_to_bytes(uk) pair.value = str_to_bytes(robj.usermeta[uk]) for link in robj.links: pb_link = rpb_content.links.add() try: bucket, key, tag = link except ValueError: raise RiakError("Invalid link tuple %s" % link) pb_link.bucket = str_to_bytes(bucket) pb_link.key = str_to_bytes(key) if tag: pb_link.tag = str_to_bytes(tag) else: pb_link.tag = str_to_bytes('') for field, value in robj.indexes: pair = rpb_content.indexes.add() pair.key = str_to_bytes(field) pair.value = str_to_bytes(str(value)) # Python 2.x data is stored in a string if six.PY2: rpb_content.value = str(robj.encoded_data) else: rpb_content.value = robj.encoded_data
def run(self, timeout=None): """ Run the map/reduce operation synchronously. Returns a list of results, or a list of links if the last phase is a link phase. Shortcut for :meth:`riak.client.RiakClient.mapred`. :param timeout: Timeout in milliseconds :type timeout: integer, None :rtype: list """ query, link_results_flag = self._normalize_query() try: result = self._client.mapred(self._inputs, query, timeout) except RiakError as e: if 'worker_startup_failed' in e.value: for phase in self._phases: if phase._language == 'erlang': if type(phase._function) is str: raise RiakError('May have tried erlang strfun ' 'when not allowed\n' 'original error: ' + e.value) raise e # If the last phase is NOT a link phase, then return the result. if not (link_results_flag or isinstance(self._phases[-1], RiakLinkPhase)): return result # If there are no results, then return an empty list. if result is None: return [] # Otherwise, if the last phase IS a link phase, then convert the # results to link tuples. a = [] for r in result: if (len(r) == 2): link = (r[0], r[1], None) elif (len(r) == 3): link = (r[0], r[1], r[2]) a.append(link) return a
def index_path(self, bucket, index, start, finish=None, bucket_type=None, **options): if not self.riak_kv_wm_buckets: raise RiakError("Indexes are unsupported by this Riak node") if finish is not None: finish = quote_plus(str(finish)) if self.riak_kv_wm_bucket_type and bucket_type: return mkpath("/types", quote_plus(bucket_type), "buckets", quote_plus(bucket), "index", quote_plus(index), quote_plus(str(start)), finish, **options) else: return mkpath("/buckets", quote_plus(bucket), "index", quote_plus(index), quote_plus(str(start)), finish, **options)
def mapred(self, inputs, query, timeout=None): """ Run a MapReduce query. """ # Construct the job, optionally set the timeout... content = self._construct_mapred_json(inputs, query, timeout) # Do the request... url = self.mapred_path() headers = {'Content-Type': 'application/json'} status, headers, body = self._request('POST', url, headers, content) # Make sure the expected status code came back... if status != 200: raise RiakError( 'Error running MapReduce operation. Headers: %s Body: %s' % (repr(headers), repr(body))) result = json.loads(bytes_to_str(body)) return result
def add_index(self, field, value): """ add_index(field, value) Tag this object with the specified field/value pair for indexing. :param field: The index field. :type field: string :param value: The index value. :type value: string or integer :rtype: :class:`RiakObject <riak.riak_object.RiakObject>` """ if field[-4:] not in ("_bin", "_int"): raise RiakError("Riak 2i fields must end with either '_bin'" " or '_int'.") self.indexes.add((field, value)) return self._robject
def clear_bucket_props(self, bucket): """ reset the properties on the bucket object given """ bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.bucket_properties_path(bucket.name, bucket_type=bucket_type) url = self.bucket_properties_path(bucket.name) headers = {'Content-Type': 'application/json'} # Run the request... status, _, _ = self._request('DELETE', url, headers, None) if status == 204: return True elif status == 405: return False else: raise RiakError('Error %s clearing bucket properties.' % status)