Esempio n. 1
0
class PQ(object):
    """Convenient queue manager."""

    table = 'queue'

    template_path = os.path.dirname(__file__)

    def __init__(self, *args, **kwargs):
        self.queue_class = kwargs.pop('queue_class', Queue)
        self.params = args, kwargs
        self.queues = WeakValueDictionary()

    def __getitem__(self, name):
        try:
            return self.queues[name]
        except KeyError:
            return self.queues.setdefault(
                name, self.queue_class(name, *self.params[0], **self.params[1])
            )

    def close(self):
        self[''].close()

    def create(self):
        queue = self['']

        with open(os.path.join(self.template_path, 'create.sql'), 'r') as f:
            sql = f.read()

        with queue._transaction() as cursor:
            cursor.execute(sql, {'name': Literal(queue.table)})
Esempio n. 2
0
File: __init__.py Progetto: omad/pq
class PQ(object):
    """Convenient queue manager."""

    table = 'queue'

    queue_class = Queue

    template_path = os.path.dirname(__file__)

    def __init__(self, *args, **kwargs):
        self.queue_class = kwargs.pop('queue_class', self.queue_class)
        self.params = args, kwargs
        self.queues = WeakValueDictionary()

    def __getitem__(self, name):
        try:
            return self.queues[name]
        except KeyError:
            return self.queues.setdefault(
                name, self.queue_class(name, *self.params[0],
                                       **self.params[1]))

    def close(self):
        self[''].close()

    def create(self):
        queue = self['']

        with open(os.path.join(self.template_path, 'create.sql'), 'r') as f:
            sql = f.read()

        with queue._transaction() as cursor:
            cursor.execute(sql, {'name': Literal(queue.table)})
Esempio n. 3
0
class PQ(object):
    """Convenient queue manager."""

    table = 'queue'

    def __init__(self, *args, **kwargs):
        self.params = args, kwargs
        self.queues = WeakValueDictionary()

    def __getitem__(self, name):
        try:
            return self.queues[name]
        except KeyError:
            return self.queues.setdefault(
                name, Queue(name, *self.params[0], **self.params[1])
            )

    def close(self):
        self[''].close()

    def create(self):
        q = self['']
        conn = q._conn()
        sql = _read_sql('create')
        with transaction(conn) as cursor:
            cursor.execute(sql, {'name': Literal(q.table)})
Esempio n. 4
0
class Throttle:
    def __init__(self, concurrency: int, concurrency_per_host: int,
                 loop: asyncio.AbstractEventLoop) -> None:
        self._concurrency_semaphore = asyncio.Semaphore(concurrency, loop=loop)
        self._host_semaphore_factory = partial(asyncio.Semaphore,
                                               concurrency_per_host,
                                               loop=loop)
        self._hosts = WeakValueDictionary()

    @asynccontextmanager
    async def request(self, host: str) -> None:
        semaphore = self._host_semaphore_factory()
        host_semaphore = self._hosts.setdefault(host, semaphore)
        async with self._concurrency_semaphore, host_semaphore:
            yield
Esempio n. 5
0
class RiakClient(RiakMapReduceChain, RiakClientOperations):
    """
    The ``RiakClient`` object holds information necessary to connect
    to Riak. Requests can be made to Riak directly through the client
    or by using the methods on related objects.
    """

    #: The supported protocols
    PROTOCOLS = ['http', 'pbc']

    def __init__(self,
                 protocol='pbc',
                 transport_options={},
                 nodes=None,
                 credentials=None,
                 multiget_pool_size=None,
                 multiput_pool_size=None,
                 **kwargs):
        """
        Construct a new ``RiakClient`` object.

        :param protocol: the preferred protocol, defaults to 'pbc'
        :type protocol: string
        :param nodes: a list of node configurations,
           where each configuration is a dict containing the keys
           'host', 'http_port', and 'pb_port'
        :type nodes: list
        :param transport_options: Optional key-value args to pass to
                                  the transport constructor
        :type transport_options: dict
        :param credentials: optional object of security info
        :type credentials: :class:`~riak.security.SecurityCreds` or dict
        :param multiget_pool_size: the number of threads to use in
           :meth:`multiget` operations. Defaults to a factor of the number of
           CPUs in the system
        :type multiget_pool_size: int
        :param multiput_pool_size: the number of threads to use in
           :meth:`multiput` operations. Defaults to a factor of the number of
           CPUs in the system
        :type multiput_pool_size: int
        """
        kwargs = kwargs.copy()

        if nodes is None:
            self.nodes = [
                self._create_node(kwargs),
            ]
        else:
            self.nodes = [self._create_node(n) for n in nodes]

        self._multiget_pool_size = multiget_pool_size
        self._multiput_pool_size = multiput_pool_size
        self.protocol = protocol or 'pbc'
        self._resolver = None
        self._credentials = self._create_credentials(credentials)
        self._http_pool = HttpPool(self, **transport_options)
        self._tcp_pool = TcpPool(self, **transport_options)
        self._closed = False

        if PY2:
            self._encoders = {
                'application/json': default_encoder,
                'text/json': default_encoder,
                'text/plain': str
            }
            self._decoders = {
                'application/json': json.loads,
                'text/json': json.loads,
                'text/plain': str
            }
        else:
            self._encoders = {
                'application/json': binary_json_encoder,
                'text/json': binary_json_encoder,
                'text/plain': str_to_bytes,
                'binary/octet-stream': binary_encoder_decoder
            }
            self._decoders = {
                'application/json': binary_json_decoder,
                'text/json': binary_json_decoder,
                'text/plain': bytes_to_str,
                'binary/octet-stream': binary_encoder_decoder
            }
        self._buckets = WeakValueDictionary()
        self._bucket_types = WeakValueDictionary()
        self._tables = WeakValueDictionary()

    def __del__(self):
        self.close()

    def _get_protocol(self):
        return self._protocol

    def _set_protocol(self, value):
        if value not in self.PROTOCOLS:
            raise ValueError("protocol option is invalid, must be one of %s" %
                             repr(self.PROTOCOLS))
        self._protocol = value

    protocol = property(_get_protocol,
                        _set_protocol,
                        doc="""
                        Which protocol to prefer, one of
                        :attr:`PROTOCOLS
                        <riak.client.RiakClient.PROTOCOLS>`. Please
                        note that when one protocol is selected, the
                        other protocols MAY NOT attempt to connect.
                        Changing to another protocol will cause a
                        connection on the next request.

                        Some requests are only valid over ``'http'``,
                        and will always be sent via
                        those transports, regardless of which protocol
                        is preferred.
                         """)

    def _get_resolver(self):
        return self._resolver or default_resolver

    def _set_resolver(self, value):
        if value is None or callable(value):
            self._resolver = value
        else:
            raise TypeError("resolver is not a function")

    resolver = property(
        _get_resolver,
        _set_resolver,
        doc=""" The sibling-resolution function for this client.
                        Defaults to :func:`riak.resolver.default_resolver`.""")

    def _get_client_id(self):
        with self._transport() as transport:
            return transport.client_id

    def _set_client_id(self, client_id):
        for http in self._http_pool:
            http.client_id = client_id
        for pb in self._tcp_pool:
            pb.client_id = client_id

    client_id = property(_get_client_id,
                         _set_client_id,
                         doc="""The client ID for this client instance""")

    def get_encoder(self, content_type):
        """
        Get the encoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :rtype: function
        """
        return self._encoders.get(content_type)

    def set_encoder(self, content_type, encoder):
        """
        Set the encoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :param encoder: an encoding function, takes a single object
            argument and returns encoded data
        :type encoder: function
        """
        self._encoders[content_type] = encoder

    def get_decoder(self, content_type):
        """
        Get the decoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :rtype: function
        """
        return self._decoders.get(content_type)

    def set_decoder(self, content_type, decoder):
        """
        Set the decoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :param decoder: a decoding function, takes encoded data and
            returns a Python type
        :type decoder: function
        """
        self._decoders[content_type] = decoder

    def bucket(self, name, bucket_type='default'):
        """
        Get the bucket by the specified name. Since buckets always exist,
        this will always return a
        :class:`RiakBucket <riak.bucket.RiakBucket>`.

        If you are using a bucket that is contained in a bucket type, it is
        preferable to access it from the bucket type object::

            # Preferred:
            client.bucket_type("foo").bucket("bar")

            # Equivalent, but not preferred:
            client.bucket("bar", bucket_type="foo")

        :param name: the bucket name
        :type name: str
        :param bucket_type: the parent bucket-type
        :type bucket_type: :class:`BucketType <riak.bucket.BucketType>`
              or str
        :rtype: :class:`RiakBucket <riak.bucket.RiakBucket>`

        """
        if not isinstance(name, string_types):
            raise TypeError('Bucket name must be a string')

        if isinstance(bucket_type, string_types):
            bucket_type = self.bucket_type(bucket_type)
        elif not isinstance(bucket_type, BucketType):
            raise TypeError('bucket_type must be a string '
                            'or riak.bucket.BucketType')

        return self._buckets.setdefault((bucket_type, name),
                                        RiakBucket(self, name, bucket_type))

    def bucket_type(self, name):
        """
        Gets the bucket-type by the specified name. Bucket-types do
        not always exist (unlike buckets), but this will always return
        a :class:`BucketType <riak.bucket.BucketType>` object.

        :param name: the bucket-type name
        :type name: str
        :rtype: :class:`BucketType <riak.bucket.BucketType>`
        """
        if not isinstance(name, string_types):
            raise TypeError('BucketType name must be a string')

        if name in self._bucket_types:
            return self._bucket_types[name]
        else:
            btype = BucketType(self, name)
            self._bucket_types[name] = btype
            return btype

    def table(self, name):
        """
        Gets the table by the specified name. Tables do
        not always exist (unlike buckets), but this will always return
        a :class:`Table <riak.table.Table>` object.

        :param name: the table name
        :type name: str
        :rtype: :class:`Table <riak.table.Table>`
        """
        if not isinstance(name, string_types):
            raise TypeError('Table name must be a string')

        if name in self._tables:
            return self._tables[name]
        else:
            table = Table(self, name)
            self._tables[name] = table
            return table

    def close(self):
        """
        Iterate through all of the connections and close each one.
        """
        if not self._closed:
            self._closed = True
            self._stop_multi_pools()
            if self._http_pool is not None:
                self._http_pool.clear()
                self._http_pool = None
            if self._tcp_pool is not None:
                self._tcp_pool.clear()
                self._tcp_pool = None

    def _stop_multi_pools(self):
        if self._multiget_pool:
            self._multiget_pool.stop()
            self._multiget_pool = None
        if self._multiput_pool:
            self._multiput_pool.stop()
            self._multiput_pool = None

    def _create_node(self, n):
        if isinstance(n, RiakNode):
            return n
        elif isinstance(n, tuple) and len(n) is 3:
            host, http_port, pb_port = n
            return RiakNode(host=host, http_port=http_port, pb_port=pb_port)
        elif isinstance(n, dict):
            return RiakNode(**n)
        else:
            raise TypeError("%s is not a valid node configuration" % repr(n))

    def _create_credentials(self, n):
        """
        Create security credentials, if necessary.
        """
        if not n:
            return n
        elif isinstance(n, SecurityCreds):
            return n
        elif isinstance(n, dict):
            return SecurityCreds(**n)
        else:
            raise TypeError("%s is not a valid security configuration" %
                            repr(n))

    def _choose_node(self, nodes=None):
        """
        Chooses a random node from the list of nodes in the client,
        taking into account each node's recent error rate.
        :rtype RiakNode
        """
        if not nodes:
            nodes = self.nodes

        # Prefer nodes which have gone a reasonable time without
        # errors
        def _error_rate(node):
            return node.error_rate.value()

        good = [n for n in nodes if _error_rate(n) < 0.1]

        if len(good) is 0:
            # Fall back to a minimally broken node
            return min(nodes, key=_error_rate)
        else:
            return random.choice(good)

    @lazy_property
    def _multiget_pool(self):
        if self._multiget_pool_size:
            return MultiGetPool(self._multiget_pool_size)
        else:
            return None

    @lazy_property
    def _multiput_pool(self):
        if self._multiput_pool_size:
            return MultiPutPool(self._multiput_pool_size)
        else:
            return None

    def __hash__(self):
        return hash(
            frozenset([(n.host, n.http_port, n.pb_port) for n in self.nodes]))

    def __eq__(self, other):
        if isinstance(other, self.__class__):
            return hash(self) == hash(other)
        else:
            return False

    def __ne__(self, other):
        if isinstance(other, self.__class__):
            return hash(self) != hash(other)
        else:
            return True
class RiakClient(RiakMapReduceChain, RiakClientOperations):
    """
    The ``RiakClient`` object holds information necessary to connect
    to Riak. Requests can be made to Riak directly through the client
    or by using the methods on related objects.
    """

    #: The supported protocols
    PROTOCOLS = ['http', 'pbc']

    def __init__(self, protocol='pbc', transport_options={},
                 nodes=None, credentials=None,
                 multiget_pool_size=None, multiput_pool_size=None,
                 **kwargs):
        """
        Construct a new ``RiakClient`` object.

        :param protocol: the preferred protocol, defaults to 'pbc'
        :type protocol: string
        :param nodes: a list of node configurations,
           where each configuration is a dict containing the keys
           'host', 'http_port', and 'pb_port'
        :type nodes: list
        :param transport_options: Optional key-value args to pass to
                                  the transport constructor
        :type transport_options: dict
        :param credentials: optional object of security info
        :type credentials: :class:`~riak.security.SecurityCreds` or dict
        :param multiget_pool_size: the number of threads to use in
           :meth:`multiget` operations. Defaults to a factor of the number of
           CPUs in the system
        :type multiget_pool_size: int
        :param multiput_pool_size: the number of threads to use in
           :meth:`multiput` operations. Defaults to a factor of the number of
           CPUs in the system
        :type multiput_pool_size: int
        """
        kwargs = kwargs.copy()

        if nodes is None:
            self.nodes = [self._create_node(kwargs), ]
        else:
            self.nodes = [self._create_node(n) for n in nodes]

        self._multiget_pool_size = multiget_pool_size
        self._multiput_pool_size = multiput_pool_size
        self.protocol = protocol or 'pbc'
        self._resolver = None
        self._credentials = self._create_credentials(credentials)
        self._http_pool = HttpPool(self, **transport_options)
        self._tcp_pool = TcpPool(self, **transport_options)

        if PY2:
            self._encoders = {'application/json': default_encoder,
                              'text/json': default_encoder,
                              'text/plain': str}
            self._decoders = {'application/json': json.loads,
                              'text/json': json.loads,
                              'text/plain': str}
        else:
            self._encoders = {'application/json': binary_json_encoder,
                              'text/json': binary_json_encoder,
                              'text/plain': str_to_bytes,
                              'binary/octet-stream': binary_encoder_decoder}
            self._decoders = {'application/json': binary_json_decoder,
                              'text/json': binary_json_decoder,
                              'text/plain': bytes_to_str,
                              'binary/octet-stream': binary_encoder_decoder}
        self._buckets = WeakValueDictionary()
        self._bucket_types = WeakValueDictionary()
        self._tables = WeakValueDictionary()

    def _get_protocol(self):
        return self._protocol

    def _set_protocol(self, value):
        if value not in self.PROTOCOLS:
            raise ValueError("protocol option is invalid, must be one of %s" %
                             repr(self.PROTOCOLS))
        self._protocol = value

    protocol = property(_get_protocol, _set_protocol,
                        doc="""
                        Which protocol to prefer, one of
                        :attr:`PROTOCOLS
                        <riak.client.RiakClient.PROTOCOLS>`. Please
                        note that when one protocol is selected, the
                        other protocols MAY NOT attempt to connect.
                        Changing to another protocol will cause a
                        connection on the next request.

                        Some requests are only valid over ``'http'``,
                        and will always be sent via
                        those transports, regardless of which protocol
                        is preferred.
                         """)

    def _get_resolver(self):
        return self._resolver or default_resolver

    def _set_resolver(self, value):
        if value is None or callable(value):
            self._resolver = value
        else:
            raise TypeError("resolver is not a function")

    resolver = property(_get_resolver, _set_resolver,
                        doc=""" The sibling-resolution function for this client.
                        Defaults to :func:`riak.resolver.default_resolver`.""")

    def _get_client_id(self):
        with self._transport() as transport:
            return transport.client_id

    def _set_client_id(self, client_id):
        for http in self._http_pool:
            http.client_id = client_id
        for pb in self._tcp_pool:
            pb.client_id = client_id

    client_id = property(_get_client_id, _set_client_id,
                         doc="""The client ID for this client instance""")

    def get_encoder(self, content_type):
        """
        Get the encoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :rtype: function
        """
        return self._encoders.get(content_type)

    def set_encoder(self, content_type, encoder):
        """
        Set the encoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :param encoder: an encoding function, takes a single object
            argument and returns encoded data
        :type encoder: function
        """
        self._encoders[content_type] = encoder

    def get_decoder(self, content_type):
        """
        Get the decoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :rtype: function
        """
        return self._decoders.get(content_type)

    def set_decoder(self, content_type, decoder):
        """
        Set the decoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :param decoder: a decoding function, takes encoded data and
            returns a Python type
        :type decoder: function
        """
        self._decoders[content_type] = decoder

    def bucket(self, name, bucket_type='default'):
        """
        Get the bucket by the specified name. Since buckets always exist,
        this will always return a
        :class:`RiakBucket <riak.bucket.RiakBucket>`.

        If you are using a bucket that is contained in a bucket type, it is
        preferable to access it from the bucket type object::

            # Preferred:
            client.bucket_type("foo").bucket("bar")

            # Equivalent, but not preferred:
            client.bucket("bar", bucket_type="foo")

        :param name: the bucket name
        :type name: str
        :param bucket_type: the parent bucket-type
        :type bucket_type: :class:`BucketType <riak.bucket.BucketType>`
              or str
        :rtype: :class:`RiakBucket <riak.bucket.RiakBucket>`

        """
        if not isinstance(name, string_types):
            raise TypeError('Bucket name must be a string')

        if isinstance(bucket_type, string_types):
            bucket_type = self.bucket_type(bucket_type)
        elif not isinstance(bucket_type, BucketType):
            raise TypeError('bucket_type must be a string '
                            'or riak.bucket.BucketType')

        return self._buckets.setdefault((bucket_type, name),
                                        RiakBucket(self, name, bucket_type))

    def bucket_type(self, name):
        """
        Gets the bucket-type by the specified name. Bucket-types do
        not always exist (unlike buckets), but this will always return
        a :class:`BucketType <riak.bucket.BucketType>` object.

        :param name: the bucket-type name
        :type name: str
        :rtype: :class:`BucketType <riak.bucket.BucketType>`
        """
        if not isinstance(name, string_types):
            raise TypeError('BucketType name must be a string')

        if name in self._bucket_types:
            return self._bucket_types[name]
        else:
            btype = BucketType(self, name)
            self._bucket_types[name] = btype
            return btype

    def table(self, name):
        """
        Gets the table by the specified name. Tables do
        not always exist (unlike buckets), but this will always return
        a :class:`Table <riak.table.Table>` object.

        :param name: the table name
        :type name: str
        :rtype: :class:`Table <riak.table.Table>`
        """
        if not isinstance(name, string_types):
            raise TypeError('Table name must be a string')

        if name in self._tables:
            return self._tables[name]
        else:
            table = Table(self, name)
            self._tables[name] = table
            return table

    def close(self):
        """
        Iterate through all of the connections and close each one.
        """
        if self._http_pool is not None:
            self._http_pool.clear()
        if self._tcp_pool is not None:
            self._tcp_pool.clear()

    def _create_node(self, n):
        if isinstance(n, RiakNode):
            return n
        elif isinstance(n, tuple) and len(n) is 3:
            host, http_port, pb_port = n
            return RiakNode(host=host,
                            http_port=http_port,
                            pb_port=pb_port)
        elif isinstance(n, dict):
            return RiakNode(**n)
        else:
            raise TypeError("%s is not a valid node configuration"
                            % repr(n))

    def _create_credentials(self, n):
        """
        Create security credentials, if necessary.
        """
        if not n:
            return n
        elif isinstance(n, SecurityCreds):
            return n
        elif isinstance(n, dict):
            return SecurityCreds(**n)
        else:
            raise TypeError("%s is not a valid security configuration"
                            % repr(n))

    def _choose_node(self, nodes=None):
        """
        Chooses a random node from the list of nodes in the client,
        taking into account each node's recent error rate.
        :rtype RiakNode
        """
        if not nodes:
            nodes = self.nodes

        # Prefer nodes which have gone a reasonable time without
        # errors
        def _error_rate(node):
            return node.error_rate.value()

        good = [n for n in nodes if _error_rate(n) < 0.1]

        if len(good) is 0:
            # Fall back to a minimally broken node
            return min(nodes, key=_error_rate)
        else:
            return random.choice(good)

    @lazy_property
    def _multiget_pool(self):
        if self._multiget_pool_size:
            return MultiGetPool(self._multiget_pool_size)
        else:
            return None

    @lazy_property
    def _multiput_pool(self):
        if self._multiput_pool_size:
            return MultiPutPool(self._multiput_pool_size)
        else:
            return None

    def __hash__(self):
        return hash(frozenset([(n.host, n.http_port, n.pb_port)
                               for n in self.nodes]))

    def __eq__(self, other):
        if isinstance(other, self.__class__):
            return hash(self) == hash(other)
        else:
            return False

    def __ne__(self, other):
        if isinstance(other, self.__class__):
            return hash(self) != hash(other)
        else:
            return True
Esempio n. 7
0
class RiakClient:
    '''
    The ``RiakClient`` object holds information necessary to connect
    to Riak. Requests can be made to Riak directly through the client
    or by using the methods on related objects.
    '''
    def __init__(self, host='localhost', port=8087, loop=None):
        self._host = host
        self._port = port
        self._loop = loop
        self._bucket_types = WeakValueDictionary()
        self._buckets = WeakValueDictionary()
        self._resolver = None
        self._decoders = {
            'application/json': binary_json_decoder,
            'text/json': binary_json_decoder,
            'text/plain': bytes_to_str,
            'binary/octet-stream': binary_encoder_decoder
        }
        self._encoders = {
            'application/json': binary_json_encoder,
            'text/json': binary_json_encoder,
            'text/plain': str_to_bytes,
            'binary/octet-stream': binary_encoder_decoder
        }

    def get_decoder(self, content_type):
        '''
        Get the decoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :rtype: function
        '''
        return self._decoders.get(content_type)

    def get_encoder(self, content_type):
        '''
        Get the encoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :rtype: function
        '''
        return self._encoders.get(content_type)

    def set_encoder(self, content_type, encoder):
        '''
        Set the encoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :param encoder: an encoding function, takes a single object
            argument and returns encoded data
        :type encoder: function
        '''
        self._encoders[content_type] = encoder

    def set_decoder(self, content_type, decoder):
        '''
        Set the decoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :param decoder: a decoding function, takes encoded data and
            returns a Python type
        :type decoder: function
        '''
        self._decoders[content_type] = decoder

    def _get_resolver(self):
        return self._resolver or default_resolver

    def _set_resolver(self, value):
        if value is None or callable(value):
            self._resolver = value
        else:
            raise TypeError('resolver is not a function')

    resolver = property(_get_resolver,
                        _set_resolver,
                        doc='''The sibling-resolution function for this client.
                        Defaults to :func:`riak.resolver.default_resolver`.''')

    def close(self):
        self._transport.close()

    async def _create_transport(self):
        self._transport = await create_transport(self._host, self._port,
                                                 self._loop)

    @classmethod
    async def create(cls, host='localhost', port=8087, loop=None):
        '''
        Return initialized instance of RiakClient since
        RiakClient.__init__() can't be async.

        :Example:

        .. code-block:: python

            import asyncio
            from aioriak import RiakClient
            loop = asyncio.get_event_loop()
            async def go():
                client = await RiakClient.create('localhost', 8087, loop)
            loop.run_until_complete(go())

        :param host: Hostname or ip address of Riak instance
        :type host: str
        :param port: Port of riak instance
        :type port: int
        :param loop: asyncio event loop
        :rtype: :class:`~aioriak.client.RiakClient`
        '''
        client = cls(host, port, loop)
        await client._create_transport()
        return client

    async def fetch_datatype(self, bucket, key):
        '''
        Fetches the value of a Riak Datatype.

        :param bucket: the bucket of the datatype, which must belong to a
            :class:`~aioriak.bucket.BucketType`
        :type bucket: :class:`~aioriak.bucket.Bucket`
        :param key: the key of the datatype
        :type key: string
        :rtype: :class:`~aioriak.datatypes.Datatype`
        '''
        dtype, value, context = await self._fetch_datatype(bucket, key)

        return TYPES[dtype](bucket=bucket,
                            key=key,
                            value=value,
                            context=context)

    async def _fetch_datatype(self, bucket, key):
        '''
        _fetch_datatype(bucket, key)

        Fetches the value of a Riak Datatype as raw data. This is used
        internally to update already reified Datatype objects. Use the
        public version to fetch a reified type.

        :param bucket: the bucket of the datatype, which must belong to a
            :class:`~aioriak.BucketType`
        :type bucket: RiakBucket
        :param key: the key of the datatype
        :type key: string, None
        :rtype: tuple of type, value and context
        '''
        return await self._transport.fetch_datatype(bucket, key)

    async def ping(self):
        '''
        Check if the Riak server for this ``RiakClient`` instance is alive.

        :rtype: boolean
        '''
        return await self._transport.ping()

    is_alive = ping

    async def get_client_id(self):
        '''
        Get client ID for this RiakClient instance

        :rtype: bytes
        '''
        return await self._transport.get_client_id()

    async def set_client_id(self, id):
        '''
        Set Client ID for this RiakClient instance'
        '''
        return await self._transport.set_client_id(id)

    async def get_buckets(self, bucket_type=None):
        '''
        Get the list of buckets as :class:`Bucket
        <aioriak.bucket.Bucket>` instances.

        .. warning:: Do not use this in production, as it requires
            traversing through all keys stored in a cluster.

        :param bucket_type: the optional containing bucket type
        :type bucket_type: :class:`~aioriak.bucket.BucketType`
        :rtype: list of :class:`Bucket <aioriak.bucket.Bucket>`
            instances
        '''
        if bucket_type:
            maker = bucket_type.bucket
        else:
            maker = self.bucket

        return [
            maker(name.decode()) for name in await self._transport.get_buckets(
                bucket_type=bucket_type)
        ]

    def bucket_type(self, name):
        '''
        Gets the bucket-type by the specified name. Bucket-types do
        not always exist (unlike buckets), but this will always return
        a :class:`BucketType <aioriak.bucket.BucketType>` object.

        :param name: the bucket name
        :type name: str
        :rtype: :class:`BucketType <aioriak.bucket.BucketType>`
        '''
        if not isinstance(name, str):
            raise TypeError('Bucket name must be a string')

        if name in self._bucket_types:
            return self._bucket_types[name]
        else:
            btype = BucketType(self, name)
            self._bucket_types[name] = btype
            return btype

    def bucket(self, name, bucket_type='default'):
        '''
        Get the bucket by the specified name. Since buckets always exist,
        this will always return a
        :class:`Bucket <aioriak.bucket.Bucket>`.
        If you are using a bucket that is contained in a bucket type, it is
        preferable to access it from the bucket type object::

            # Preferred:
            client.bucket_type("foo").bucket("bar")
            # Equivalent, but not preferred:
            client.bucket("bar", bucket_type="foo")

        :param name: the bucket name
        :type name: str
        :param bucket_type: the parent bucket-type
        :type bucket_type: :class:`BucketType <aioriak.bucket.BucketType>`
            or str
        :rtype: :class:`Bucket <aioriak.bucket.Bucket>`
        '''
        if not isinstance(name, str):
            raise TypeError('Bucket name must be a string')

        if isinstance(bucket_type, str):
            bucket_type = self.bucket_type(bucket_type)
        elif not isinstance(bucket_type, BucketType):
            raise TypeError('bucket_type must be a string '
                            'or aioriak.bucket.BucketType')

        return self._buckets.setdefault((bucket_type, name),
                                        Bucket(self, name, bucket_type))

    async def get_bucket_type_props(self, bucket_type):
        '''
        Fetches properties for the given bucket-type.

        :param bucket_type: the bucket-type whose properties will be fetched
        :type bucket_type: BucketType
        :rtype: dict
        '''
        return await self._transport.get_bucket_type_props(bucket_type)

    async def set_bucket_type_props(self, bucket_type, props):
        '''
        Sets properties for the given bucket-type.

        :param bucket_type: the bucket-type whose properties will be set
        :type bucket_type: BucketType
        :param props: the properties to set
        :type props: dict
        '''
        return await self._transport.set_bucket_type_props(bucket_type, props)

    async def get_bucket_props(self, bucket):
        '''
        Fetches bucket properties for the given bucket.

        :param bucket: the bucket whose properties will be fetched
        :type bucket: Bucket
        :rtype: dict
        '''
        return await self._transport.get_bucket_props(bucket)

    async def set_bucket_props(self, bucket, props):
        '''
        Sets bucket properties for the given bucket.

        :param bucket: the bucket whose properties will be set
        :type bucket: Bucket
        :param props: the properties to set
        :type props: dict
        '''
        return await self._transport.set_bucket_props(bucket, props)

    async def get_keys(self, bucket):
        '''
        Lists all keys in a bucket.

        .. warning:: Do not use this in production, as it requires
           traversing through all keys stored in a cluster.

        :param bucket: the bucket whose keys are fetched
        :type bucket: Bucket
        :rtype: list
        '''
        return await self._transport.get_keys(bucket)

    async def get(self, robj):
        '''
        Fetches the contents of a Riak object.

        :param robj: the object to fetch
        :type robj: RiakObject
        '''
        if not isinstance(robj.key, str):
            raise TypeError('key must be a string, instead got {0}'.format(
                repr(robj.key)))

        return await self._transport.get(robj)

    async def put(self, robj, return_body):
        '''
        Stores an object in the Riak cluster.

        :param return_body: whether to return the resulting object
            after the write
        :type return_body: boolean
        :param robj: the object to store
        :type robj: RiakObject
        '''
        return await self._transport.put(robj, return_body=return_body)

    async def delete(self, robj):
        '''
        Deletes an object from Riak.

        :param robj: the object to delete
        :type robj: RiakObject
        '''
        return await self._transport.delete(robj)

    async def update_datatype(self, datatype, **params):
        '''
        Sends an update to a Riak Datatype to the server.

        :param datatype: the datatype with pending updates
        :type datatype: :class:`~aioriak.datatypes.Datatype`
        :rtype: tuple of datatype, opaque value and opaque context
        '''

        return await self._transport.update_datatype(datatype, **params)
Esempio n. 8
0
class BuildRequestMerger(config.ReconfigurableServiceMixin, service.Service):

    # Basic list of properties that must match for a buildrequest to be merged
    # BuilderConfigs can define additional properties (see _propertiesMatch)
    BASE_MERGE_PROPERTIES = ['force_rebuild', 'force_chain_rebuild']

    def __init__(self, master):
        self.master = master
        self.properties_cache = AsyncLRUCache(
            miss_fn=None,  # Cache contents are handled manually
            max_size=20000)

        # Locks to indicate that merged builds are being added
        self.build_merging_locks = WeakValueDictionary()

    def getMergingLocks(self, build_request_ids):
        return [
            self.build_merging_locks.setdefault(brid, defer.DeferredLock())
            for brid in build_request_ids
        ]

    @defer.inlineCallbacks
    def addBuildset(self,
                    sourcestampsetid,
                    reason,
                    properties,
                    triggeredbybrid=None,
                    builderNames=None,
                    external_idstring=None,
                    _reactor=reactor):
        """
        Wrapper around buildsets.addBuildset that does merging before
        buildrequests hit the db.

        ..seealso:: self._getMergeBrDict
            For more documentation on merge conditions

        ..seealso:: buildsets.addBuildset
            For parameter details
        """
        builderNames = sorted(builderNames)

        start = time.time()
        buildsetLog = {
            'name': 'addBuildset',
            'description':
            'Log merges within a chain while adding new buildsets',
            'sourcestampsetid': sourcestampsetid,
            'builderNames': builderNames,
        }
        # For every builderName in this buildset, check which ones can be merged
        brDictsToMerge = {}

        # Don't read sourcestamp information yet, since we might not need it
        sourcestamps = None

        for builderName in builderNames:
            builderMergeStart = time.time()

            if 'selected_slave' in properties:
                # Never merge if a build request has a selected_slave
                # This might happen when a user wants to test the same build in different
                # slaves to look for instabilities
                mergeBrDict = None
            else:
                # Look for a builder that matches the configured properties
                mergeProperties = self.master.botmaster.builders[
                    builderName].config.mergeProperties

                # And sourcestamps (only need to read them once)
                if sourcestamps is None:
                    sourcestamps = yield self.master.db.sourcestamps.getSimpleSourceStamps(
                        sourcestampsetid)

                mergeBrDict = yield self._getMergeBrDict(
                    builderName, sourcestamps, properties, mergeProperties)

            # If we found one, add it to our merge map
            if mergeBrDict:
                brDictsToMerge[builderName] = mergeBrDict

            buildsetLog[builderName] = {
                'elapsed': time.time() - builderMergeStart,
                'mergeBrid':
                brDictsToMerge.get(builderName, {}).get('brid', None)
            }

        buildsetLog['elapsed_merge'] = time.time() - start

        # Finally add the buildset passing the map of `brDictsToMerge`
        # This method will make sure that all new breqs will enter the db
        # marked as merged, and will not run.
        _master_objectid = yield self.master.getObjectId()

        # Create a lock on every build we can merge into.
        acquiring_locks_start = time.time()
        build_merging_locks = {
            builderName : self.getMergingLocks([brDict['brid']])[0]
            for builderName, brDict in brDictsToMerge.iteritems()
        }
        for builderName, lock in build_merging_locks.iteritems():
            yield lock.acquire()
            buildsetLog[builderName]['elapsed_acquiring_lock'] = \
                time.time() - acquiring_locks_start
        buildsetLog['elapsed_acquiring_locks'] = time.time() - acquiring_locks_start
        using_locks_start = time.time()

        # Some builds might have finished before we locked into them, so release those.
        finishedBrDicts = yield self.master.db.buildrequests.getBuildRequests(
            brids=[brDict['brid'] for brDict in brDictsToMerge.values()],
            complete=True)
        for brDict in finishedBrDicts:
            brDictsToMerge.pop(brDict['buildername'])
            build_merging_locks.pop(brDict['buildername']).release()

        # Add buildset
        try:
            result = yield self.master.db.buildsets.addBuildset(
                sourcestampsetid=sourcestampsetid,
                reason=reason,
                properties=properties,
                triggeredbybrid=triggeredbybrid,
                builderNames=builderNames,
                brDictsToMerge=brDictsToMerge,
                external_idstring=external_idstring,
                _reactor=_reactor,
                _master_objectid=_master_objectid)
        finally:
            for lock in build_merging_locks.itervalues():
                lock.release()
            buildsetLog['elapsed_using_locks'] = time.time() - using_locks_start

        # Log more ids
        (bsid, brids) = result
        buildsetLog['buildsetid'] = bsid
        for builderName, brid in brids.iteritems():
            buildsetLog[builderName]['brid'] = brid
        buildsetLog['elapsed_total'] = time.time() - start
        log.msg(json.dumps(buildsetLog))

        defer.returnValue(result)


    @defer.inlineCallbacks
    def _getMergeBrDict(self, builderName, sourcestamps, properties,
                        mergeProperties):
        """
        Looks for a buildrequest we can merge into.

        It must match `builderName`, `sourcestamps` and all properties defined
        in `mergeProperties`.

        This will only merge against builds that have already been claimed
        and are currently running (unfinished builds).

        :return BrDict or None:
            Buildrequest dictionary for a request that can be merged into.
            `None` if no match was found.
        """
        matchingBrDicts = yield self.master.db.buildrequests.getBuildRequests(
            buildername=builderName,
            complete=False,
            claimed=True,
            sourcestamps=sourcestamps,
            mergebrids="exclude")

        # Get properties for matching breqs (done in a single query for optimization)
        otherProperties = yield self._getBuildsetsProperties(
            [brdict['buildsetid'] for brdict in matchingBrDicts])

        # Check if relevant properties match
        # Sort list of build requests to ensure we always merge against smallest id possible
        for brdict in sorted(matchingBrDicts, key=lambda b: int(b['brid'])):
            if self._propertiesMatch(properties,
                                     otherProperties[brdict['buildsetid']],
                                     mergeProperties):

                # If they match, fetch the build number and merge against this buildrequest
                brdict[
                    'build_number'] = yield self.master.db.builds.getBuildNumberForRequest(
                        brdict['brid'])
                defer.returnValue(brdict)
                return

        # If we can't find any match, return None
        defer.returnValue(None)

    def _propertiesMatch(self, properties, otherProperties, mergeProperties):
        """
        :param dict(str,str) properties:
        :param dict(str,str) otherProperties:
        :param list(str) mergeProperties:
            List of properties that must match
        :return bool:
            True if `properties` and `otherProperties` match for a list of
            `mergeProperties` to be checked
        """
        if 'selected_slave' in otherProperties:
            return False

        for propertyName in self.BASE_MERGE_PROPERTIES + mergeProperties:
            if properties.get(propertyName, None) != otherProperties.get(
                    propertyName, None):
                return False

        return True

    @defer.inlineCallbacks
    def _getBuildsetsProperties(self, buildsetids):
        properties = {}

        # Fill in properties from cache
        for buildsetid in buildsetids:
            if buildsetid in self.properties_cache.cache:
                properties[buildsetid] = yield self.properties_cache.get(
                    buildsetid)

        # Read missing properties in a single query
        missing_buildsetids = set(buildsetids).difference(properties.keys())
        if missing_buildsetids:
            db_properties = yield self.master.db.buildsets.getBuildsetsProperties(
                missing_buildsetids)
            properties.update(db_properties)

        # Populate cache with new values
        for buildsetid in missing_buildsetids:
            bs_properties = PropertiesDict(properties[buildsetid])
            self.properties_cache.put_new(buildsetid, bs_properties)

        defer.returnValue(properties)
Esempio n. 9
0
class DiskStorage(BaseStorage):

    INFO_FILE = 'data.bin'

    class _Info(object):

        _int_fmt = '<Q'
        _int_length = 8

        def __init__(self, p):
            self._info_file = make_direct_open(p, 'r+b', buffering=0)
            self._total_size = 0
            self._total_count = 0
            self._lock = threading.RLock()

        @classmethod
        def load_file(cls, file_path):
            obj = cls(file_path)
            with obj._lock:
                obj._info_file.seek(0)
                raw = obj._info_file.read(cls._int_length * 2)
                try:
                    raw_size = raw[:cls._int_length]
                    raw_count = raw[cls._int_length:]
                    obj._total_size = struct.unpack(cls._int_fmt, raw_size)[0]
                    obj._total_count = struct.unpack(cls._int_fmt,
                                                     raw_count)[0]
                except struct.error:
                    raise StandardError(repr(raw))
            return obj

        def _write_num(self, i, n):
            with self._lock:
                self._info_file.seek(i * self._int_length)
                bin_str = struct.pack(self._int_fmt, n)
                self._info_file.write(bin_str)

        def reset(self):
            with self._lock:
                self._total_count = 0
                self._total_size = 0
                self._info_file.flush()

        @property
        def size(self):
            return self._total_size

        @size.setter
        def size(self, v):
            self._write_num(0, v)
            self._total_size = v

        @property
        def count(self):
            return self._total_count

        @count.setter
        def count(self, v):
            self._write_num(1, v)
            self._total_count = v

    def __init__(self, path, path_func=None):
        self._storage_path = os.path.join(path, "cache")
        self._path_func = path_func or (lambda _: _)

        self._file_locks = WeakValueDictionary()
        self._load_info_file()

    def _load_info_file(self):
        info_path = os.path.join(self._storage_path, self.INFO_FILE)
        if os.path.isfile(info_path):
            try:
                self._info = self._Info.load_file(info_path)
            except StandardError:
                self._info = self._Info(info_path)
        else:
            self._info = self._Info(info_path)

    def _get_file_lock(self, path):
        return self._file_locks.setdefault(path, threading.RLock())

    def full_path(self, key):
        return os.path.join(self._storage_path, self._path_func(key))

    def has(self, key):
        return os.path.isfile(self.full_path(key))

    def set(self, key, value):
        full_path = self.full_path(key)
        tmp_path = full_path + ".tmp"

        with self._get_file_lock(full_path):
            try:
                with make_direct_open(tmp_path, 'wb') as fp:
                    fp.write(value)
            except IOError:
                return False
            else:
                self.delete(key)  # 防止文件存在导致移动失败
                os.rename(tmp_path, full_path)
                self._info.count += 1
                self._info.size += len(value)
                return True

    def get(self, key, default=None):
        full_path = self.full_path(key)
        if os.path.isfile(full_path):
            with self._get_file_lock(full_path):
                if os.path.isfile(full_path):
                    with open(full_path, 'rb') as fp:
                        return fp.read()
        return default

    def clear(self):
        os.rmdir(self._storage_path)
        self._info.reset()

    def delete(self, key):
        full_path = self.full_path(key)
        if os.path.isfile(full_path):
            with self._get_file_lock(full_path):
                if os.path.isfile(full_path):
                    self._info.size -= os.path.getsize(full_path)
                    self._info.count -= 1
                    os.remove(full_path)

    @property
    def size(self):
        return self._info.size

    @property
    def count(self):
        return self._info.count
Esempio n. 10
0
class RiakClient:
    '''
    The ``RiakClient`` object holds information necessary to connect
    to Riak. Requests can be made to Riak directly through the client
    or by using the methods on related objects.
    '''
    def __init__(self, host='localhost', port=8087, loop=None):
        self._host = host
        self._port = port
        self._loop = loop
        self._bucket_types = WeakValueDictionary()
        self._buckets = WeakValueDictionary()
        self._resolver = None
        self._decoders = {'application/json': binary_json_decoder,
                          'text/json': binary_json_decoder,
                          'text/plain': bytes_to_str,
                          'binary/octet-stream': binary_encoder_decoder}
        self._encoders = {'application/json': binary_json_encoder,
                          'text/json': binary_json_encoder,
                          'text/plain': str_to_bytes,
                          'binary/octet-stream': binary_encoder_decoder}

    def get_decoder(self, content_type):
        '''
        Get the decoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :rtype: function
        '''
        return self._decoders.get(content_type)

    def get_encoder(self, content_type):
        '''
        Get the encoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :rtype: function
        '''
        return self._encoders.get(content_type)

    def set_encoder(self, content_type, encoder):
        '''
        Set the encoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :param encoder: an encoding function, takes a single object
            argument and returns encoded data
        :type encoder: function
        '''
        self._encoders[content_type] = encoder

    def set_decoder(self, content_type, decoder):
        '''
        Set the decoding function for the provided content type.

        :param content_type: the requested media type
        :type content_type: str
        :param decoder: a decoding function, takes encoded data and
            returns a Python type
        :type decoder: function
        '''
        self._decoders[content_type] = decoder

    def _get_resolver(self):
        return self._resolver or default_resolver

    def _set_resolver(self, value):
        if value is None or callable(value):
            self._resolver = value
        else:
            raise TypeError('resolver is not a function')

    resolver = property(_get_resolver, _set_resolver,
                        doc='''The sibling-resolution function for this client.
                        Defaults to :func:`riak.resolver.default_resolver`.''')

    def close(self):
        self._transport.close()

    async def _create_transport(self):
        self._transport = await create_transport(
            self._host, self._port, self._loop)

    @classmethod
    async def create(cls, host='localhost', port=8087, loop=None):
        '''
        Return initialized instance of RiakClient since
        RiakClient.__init__() can't be async.

        :Example:

        .. code-block:: python

            import asyncio
            from aioriak import RiakClient
            loop = asyncio.get_event_loop()
            async def go():
                client = await RiakClient.create('localhost', 8087, loop)
            loop.run_until_complete(go())

        :param host: Hostname or ip address of Riak instance
        :type host: str
        :param port: Port of riak instance
        :type port: int
        :param loop: asyncio event loop
        :rtype: :class:`~aioriak.client.RiakClient`
        '''
        client = cls(host, port, loop)
        await client._create_transport()
        return client

    async def fetch_datatype(self, bucket, key):
        '''
        Fetches the value of a Riak Datatype.

        :param bucket: the bucket of the datatype, which must belong to a
            :class:`~aioriak.bucket.BucketType`
        :type bucket: :class:`~aioriak.bucket.Bucket`
        :param key: the key of the datatype
        :type key: string
        :rtype: :class:`~aioriak.datatypes.Datatype`
        '''
        dtype, value, context = await self._fetch_datatype(bucket, key)

        return TYPES[dtype](bucket=bucket, key=key, value=value,
                            context=context)

    async def _fetch_datatype(self, bucket, key):
        '''
        _fetch_datatype(bucket, key)

        Fetches the value of a Riak Datatype as raw data. This is used
        internally to update already reified Datatype objects. Use the
        public version to fetch a reified type.

        :param bucket: the bucket of the datatype, which must belong to a
            :class:`~aioriak.BucketType`
        :type bucket: RiakBucket
        :param key: the key of the datatype
        :type key: string, None
        :rtype: tuple of type, value and context
        '''
        return await self._transport.fetch_datatype(bucket, key)

    async def ping(self):
        '''
        Check if the Riak server for this ``RiakClient`` instance is alive.

        :rtype: boolean
        '''
        return await self._transport.ping()

    is_alive = ping

    async def get_client_id(self):
        '''
        Get client ID for this RiakClient instance

        :rtype: bytes
        '''
        return await self._transport.get_client_id()

    async def set_client_id(self, id):
        '''
        Set Client ID for this RiakClient instance'
        '''
        return await self._transport.set_client_id(id)

    async def get_buckets(self, bucket_type=None):
        '''
        Get the list of buckets as :class:`Bucket
        <aioriak.bucket.Bucket>` instances.

        .. warning:: Do not use this in production, as it requires
            traversing through all keys stored in a cluster.

        :param bucket_type: the optional containing bucket type
        :type bucket_type: :class:`~aioriak.bucket.BucketType`
        :rtype: list of :class:`Bucket <aioriak.bucket.Bucket>`
            instances
        '''
        if bucket_type:
            maker = bucket_type.bucket
        else:
            maker = self.bucket

        return [maker(name.decode())
                for name in await self._transport.get_buckets(
                    bucket_type=bucket_type)]

    def bucket_type(self, name):
        '''
        Gets the bucket-type by the specified name. Bucket-types do
        not always exist (unlike buckets), but this will always return
        a :class:`BucketType <aioriak.bucket.BucketType>` object.

        :param name: the bucket name
        :type name: str
        :rtype: :class:`BucketType <aioriak.bucket.BucketType>`
        '''
        if not isinstance(name, str):
            raise TypeError('Bucket name must be a string')

        if name in self._bucket_types:
            return self._bucket_types[name]
        else:
            btype = BucketType(self, name)
            self._bucket_types[name] = btype
            return btype

    def bucket(self, name, bucket_type='default'):
        '''
        Get the bucket by the specified name. Since buckets always exist,
        this will always return a
        :class:`Bucket <aioriak.bucket.Bucket>`.
        If you are using a bucket that is contained in a bucket type, it is
        preferable to access it from the bucket type object::

            # Preferred:
            client.bucket_type("foo").bucket("bar")
            # Equivalent, but not preferred:
            client.bucket("bar", bucket_type="foo")

        :param name: the bucket name
        :type name: str
        :param bucket_type: the parent bucket-type
        :type bucket_type: :class:`BucketType <aioriak.bucket.BucketType>`
            or str
        :rtype: :class:`Bucket <aioriak.bucket.Bucket>`
        '''
        if not isinstance(name, str):
            raise TypeError('Bucket name must be a string')

        if isinstance(bucket_type, str):
            bucket_type = self.bucket_type(bucket_type)
        elif not isinstance(bucket_type, BucketType):
            raise TypeError('bucket_type must be a string '
                            'or aioriak.bucket.BucketType')

        return self._buckets.setdefault((bucket_type, name),
                                        Bucket(self, name, bucket_type))

    async def get_bucket_type_props(self, bucket_type):
        '''
        Fetches properties for the given bucket-type.

        :param bucket_type: the bucket-type whose properties will be fetched
        :type bucket_type: BucketType
        :rtype: dict
        '''
        return await self._transport.get_bucket_type_props(bucket_type)

    async def set_bucket_type_props(self, bucket_type, props):
        '''
        Sets properties for the given bucket-type.

        :param bucket_type: the bucket-type whose properties will be set
        :type bucket_type: BucketType
        :param props: the properties to set
        :type props: dict
        '''
        return await self._transport.set_bucket_type_props(bucket_type, props)

    async def get_bucket_props(self, bucket):
        '''
        Fetches bucket properties for the given bucket.

        :param bucket: the bucket whose properties will be fetched
        :type bucket: Bucket
        :rtype: dict
        '''
        return await self._transport.get_bucket_props(bucket)

    async def set_bucket_props(self, bucket, props):
        '''
        Sets bucket properties for the given bucket.

        :param bucket: the bucket whose properties will be set
        :type bucket: Bucket
        :param props: the properties to set
        :type props: dict
        '''
        return await self._transport.set_bucket_props(bucket, props)

    async def get_keys(self, bucket):
        '''
        Lists all keys in a bucket.

        .. warning:: Do not use this in production, as it requires
           traversing through all keys stored in a cluster.

        :param bucket: the bucket whose keys are fetched
        :type bucket: Bucket
        :rtype: list
        '''
        return await self._transport.get_keys(bucket)

    async def get(self, robj):
        '''
        Fetches the contents of a Riak object.

        :param robj: the object to fetch
        :type robj: RiakObject
        '''
        if not isinstance(robj.key, str):
            raise TypeError(
                'key must be a string, instead got {0}'.format(repr(robj.key)))

        return await self._transport.get(robj)

    async def put(self, robj, return_body):
        '''
        Stores an object in the Riak cluster.

        :param return_body: whether to return the resulting object
            after the write
        :type return_body: boolean
        :param robj: the object to store
        :type robj: RiakObject
        '''
        return await self._transport.put(robj, return_body=return_body)

    async def delete(self, robj):
        '''
        Deletes an object from Riak.

        :param robj: the object to delete
        :type robj: RiakObject
        '''
        return await self._transport.delete(robj)

    async def update_datatype(self, datatype, **params):
        '''
        Sends an update to a Riak Datatype to the server.

        :param datatype: the datatype with pending updates
        :type datatype: :class:`~aioriak.datatypes.Datatype`
        :rtype: tuple of datatype, opaque value and opaque context
        '''

        return await self._transport.update_datatype(datatype, **params)