예제 #1
0
 def _delete(self, url, headers=None, callback=None):
     h = HTTPHeaders()
     h.update(self._default_headers)
     if headers:
         h.update(headers)
     req = HTTPRequest(url, headers=headers, method="DELETE")
     self._client.fetch(req, callback)
예제 #2
0
def make_post_request(url,
                      data='',
                      headers=None,
                      files=None,
                      content_type=None,
                      connect_timeout=None,
                      request_timeout=None,
                      follow_redirects=True):
    if files:
        body, content_type = make_mfd(data, files)
    else:
        body = make_body(data)

    headers = HTTPHeaders() if headers is None else HTTPHeaders(headers)
    if content_type is None:
        content_type = headers.get('Content-Type',
                                   'application/x-www-form-urlencoded')

    headers.update({
        'Content-Type': content_type,
        'Content-Length': str(len(body))
    })

    return HTTPRequest(url=_encode(url),
                       body=body,
                       method='POST',
                       headers=headers,
                       follow_redirects=follow_redirects,
                       connect_timeout=connect_timeout,
                       request_timeout=request_timeout)
def set_stub(http_client,
             url,
             request_method='GET',
             response_function=None,
             response_file=None,
             response_body='',
             response_code=httpcodes.OK,
             response_headers=None,
             response_body_processor=safe_template,
             **kwargs):
    """Set response stub for requested url.

    :param str url: url to be stubbed. Url can contain PEP-292 placeholders
        (see https://docs.python.org/3/library/string.html#template-strings) which will be replaced
        with `kwargs` values.
    :param str request_method: 'GET', 'POST' or any other request method.
    :param callable response_function: function that takes the `HTTPRequest` instance passed to `fetch_impl`
        and must return an instance of `HTTPResponse` instead of making actual HTTP request.
        If `response_function` is defined, all other response_* arguments are ignored.
    :param str response_file: filename containing response body. If `response_file` is specified,
        `response_body` argument is ignored.
    :param str response_body: a string containing response body.
    :param int response_code: response code of the stub response.
    :param dict response_headers: stub response headers.
    :param callable response_body_processor: a function that takes response body
        (loaded from `response_file` or specified in `response_body`) and `kwargs`.
        It can be used to make any kind of modifications to response body, like templating or
        gzipping. By default the same templating function that is used for replacing PEP-292 placeholders
        in `url` is called.
    :param kwargs: parameters that are passed to `url` templating function and to `response_body_processor`.
    """
    url = safe_template(url, **kwargs)

    if response_function is not None:
        _add_route(http_client, url, request_method, response_function)
        return

    if response_file is not None:
        headers = _guess_headers(response_file)
        content = _get_stub(response_file)
    else:
        headers = HTTPHeaders()
        content = response_body

    if callable(response_body_processor):
        content = response_body_processor(content, **kwargs)

    if response_headers is not None:
        headers.update(response_headers)

    def _response_function(request):
        return get_response_stub(request,
                                 code=response_code,
                                 headers=headers,
                                 buffer=content,
                                 effective_url=url)

    _add_route(http_client, url, request_method, _response_function)
예제 #4
0
파일: client.py 프로젝트: purplecow/doppler
    def _get(self, url, headers=None, callback=None):
        """
        A `GET` request to the solr.
        """
        h = HTTPHeaders()
        h.update(self._default_headers)
        if headers:
            h.update(headers)

        req = HTTPRequest(url, headers=headers)
        self._client.fetch(req, callback)
예제 #5
0
파일: client.py 프로젝트: sojoner/dopplr
    def _get(self, url, headers=None, callback=None):
        """
        A `GET` request to the solr.
        """
        h = HTTPHeaders()
        h.update(self._default_headers)
        if headers:
            h.update(headers)

        req = HTTPRequest(url, headers=headers)
        self._client.fetch(req, callback)
예제 #6
0
def make_post_request(url, data='', headers=None, files=None, content_type=None,
                      connect_timeout=None, request_timeout=None, follow_redirects=True):
    if files:
        body, content_type = make_mfd(data, files)
    else:
        body = make_body(data)

    headers = HTTPHeaders() if headers is None else HTTPHeaders(headers)
    if content_type is None:
        content_type = headers.get('Content-Type', 'application/x-www-form-urlencoded')

    headers.update({'Content-Type': content_type, 'Content-Length': str(len(body))})

    return HTTPRequest(
        url=url,
        body=body,
        method='POST',
        headers=headers,
        follow_redirects=follow_redirects,
        connect_timeout=connect_timeout,
        request_timeout=request_timeout
    )
예제 #7
0
파일: client.py 프로젝트: purplecow/doppler
class SolrClient(object):
    """
    Apache Solr Client class.
    """

    def __init__(self, search_host, update_host=None, default_headers=None,
            required_query_params=[], client_args={}, select_path='/select',
            update_path='/update/json', mlt_path='/mlt',
            document_verifier=None, ioloop=None):
        """
        Initialize me.
        """
        self._ioloop = ioloop or IOLoop.instance()

        self._search_url = '%s%s' % (search_host, select_path)
        self._mlt_url = '%s%s' % (search_host, mlt_path)
        uhost = update_host or search_host
        self._update_url = '%s%s' % (uhost, update_path)

        self._required_query_params = required_query_params
        if len([k for (k,v) in self._required_query_params if k=="wt"]) == 0:
            self._required_query_params.append(('wt', 'json'))

        self._document_verifier = document_verifier

        self._default_headers = HTTPHeaders()
        if default_headers:
            self._default_headers.update(default_headers)
        self._client = AsyncHTTPClient(self._ioloop, **client_args)

    def _get(self, url, headers=None, callback=None):
        """
        A `GET` request to the solr.
        """
        h = HTTPHeaders()
        h.update(self._default_headers)
        if headers:
            h.update(headers)

        req = HTTPRequest(url, headers=headers)
        self._client.fetch(req, callback)

    def _post(self, url, body, headers=None, callback=None):
        """
        A `POST` request to the solr.
        """
        h = headers or HTTPHeaders()
        h.update(self._default_headers)
        h["Content-type"] = "application/json"
        request = HTTPRequest(url, headers=h, method="POST",
            body=json.dumps(body))
        self._client.fetch(request, callback)

    def search(self, querybuilder, callback=None):
        """
        Search the Solr with `querybuilder.get_params()` as query parameter.
        """
        query_params = querybuilder.get_params()
        for p in self._required_query_params:
            if p not in query_params:
                query_params.append(p)

        log.debug('Searching solr with params: %s' % query_params)
        qs = urllib.urlencode(query_params)
        final_url = "?".join([self._search_url, qs])
        log.debug('Final search URL: %s' % final_url)

        self._get(final_url, headers=querybuilder.headers,
                callback=handle_search_response(querybuilder, callback))

    def more_like_this(self, querybuilder, callback=None, match_include=True,
            match_offset=None, interestingTerms=None):
        """
        `interestingTerms` can be one of: 'list', 'details', 'none'.
        """
        query_params = querybuilder.get_params()
        for p in self._required_query_params:
            if p not in query_params:
                query_params.append(p)

        if match_include and isinstance(match_include, types.BooleanType):
            query_params.append(('mlt.match.include', str(match_include).lower()))
        if match_offset:
            query_params.append(('mlt.match.offset', str(match_offset)))
        if interestingTerms:
            query_params.append(('mlt.interestingTerms', interestingTerms))

        self.log.debug('MoreLikeThis with params: %s' % query_params)
        qs = urllib.urlencode(query_params)
        final_url = '?'.join([self._mlt_url, qs])
        self.log.debug('Final MLT URL: %s' % final_url)

        self._get(final_url, headers=querybuilder.headers,
            callback=self._handle_search_response(querybuilder, callback))

    def index_document(self, doc, callback=None, commit=False):
        """
        Index a `doc` into Solr. The `callback` will be called from within
        `self._handle_indexing_response`. If `commit is True`, then a `commit`
        request is sent to Solr.
        """
        verification = self._document_verifier(doc)
        if 'error' in verification:
            callback({'error': 'document refused', 'reason': verification,
                'doc': doc})
            return

        to_index = {'add': {'doc': doc}}
        if commit:
            final_url = "%s?commit=true" % self._update_url
        else:
            final_url = self._update_url

        self._post(final_url, to_index,
                callback=handle_indexing_response(callback))

    def commit(self, callback=None):
        """
        Commit any pending changes within Solr.
        """
        to_commit = {}
        final_url = "%s?commit=true" % self._update_url

        self._post(final_url, to_commit,
                callback=handle_indexing_response(callback))

    def remove_by_id(self, doc_id, callback=None, commit=False):
        """
        Remove the document with id `doc_id`.

        If `commit=True` the change will be committed immidiately.
        The `callback` is called from within the
        `self._handle_indexing_response` method.
        """
        to_remove = {'delete': {'id': doc_id}}

        if commit:
            final_url = "%s?commit=true" % self._update_url
        else:
            final_url = self._update_url

        self._post(final_url, to_remove,
                callback=handle_indexing_response(callback))

    def remove_by_query(self, query, callback=None, commit=False):
        """
        Remote any documents matching the given query.

        The query must be of the form `field:value`.
        """
        to_remove = {'delete': {'query': query}}

        if commit:
            final_url = "%s?commit=true" % self._update_url
        else:
            final_url = self._update_url

        self._post(final_url, to_remove,
                callback=handle_indexing_response(callback))
예제 #8
0
class SolrClient(object):
    """
    Apache Solr Client class.
    """

    def __init__(self, search_host, update_host=None, default_headers=None,
            required_query_params=[], client_args={}, select_path='/select',
            update_path='/update/json', mlt_path='/mlt', get_path='/get',
            suggest_path='/suggest', document_verifier=None, ioloop=None):
        """
        Initialize me.
        """
        self._ioloop = ioloop or IOLoop.instance()

        self._search_url = '%s%s' % (search_host, select_path)
        self._mlt_url = '%s%s' % (search_host, mlt_path)
        self._get_url = '%s%s' % (search_host, get_path)
        self._termsuggest_url = '%s%s' % (search_host, suggest_path)
        uhost = update_host or search_host
        self._update_url = '%s%s' % (uhost, update_path)

        self._required_query_params = required_query_params
        if len([k for (k,v) in self._required_query_params if k=="wt"]) == 0:
            self._required_query_params.append(('wt', 'json'))

        self._document_verifier = document_verifier or \
            default_document_verifier

        self._default_headers = HTTPHeaders()
        if default_headers:
            self._default_headers.update(default_headers)
        self._client = AsyncHTTPClient(self._ioloop, **client_args)

    def _get(self, url, headers=None, callback=None):
        """
        A `GET` request to the solr.
        """
        h = HTTPHeaders()
        h.update(self._default_headers)
        if headers:
            h.update(headers)

        req = HTTPRequest(url, headers=headers)
        self._client.fetch(req, callback)

    def _post(self, url, body, headers=None, callback=None):
        """
        A `POST` request to the solr.
        """
        h = headers or HTTPHeaders()
        h.update(self._default_headers)
        if type(body) == str:
            body, h["Content-type"] = body, "application/x-www-form-urlencoded"
        else:
            body, h["Content-type"] = json.dumps(body), "application/json"
        request = HTTPRequest(url, headers=h, method="POST", body=body)
        self._client.fetch(request, callback)

    def _get_params(self, querybuilder):
        query_params = querybuilder.get_params()
        for p in self._required_query_params:
            if p not in query_params:
                query_params.append(p)

        return query_params

    def search(self, querybuilder, callback=None):
        """
        Search the Solr with `querybuilder.get_params()` as query parameters.
        Use GET by default, but switch to POST in case of very long urls.
        """
        query_params = self._get_params(querybuilder)

        log.debug('Searching solr with params: %s' % query_params)
        qs = urllib.urlencode(query_params)

        final_url = "?".join([self._search_url, qs])
        # use POST if the final url is very long
        final_url, use_post = (self._search_url, True) if len(final_url) > 2000 \
                                                    else (final_url, False)
        log.debug('Final search URL: %s' % final_url)

        if use_post:
            self._post(final_url, qs, headers=querybuilder.headers,
                callback=handle_search_response(querybuilder, callback))
        else:
            self._get(final_url, headers=querybuilder.headers,
                callback=handle_search_response(querybuilder, callback))

    def more_like_this(self, querybuilder, callback=None, match_include=True,
            match_offset=None, interestingTerms=None):
        """
        `interestingTerms` can be one of: 'list', 'details', 'none'.
        """
        query_params = self._get_params(querybuilder)

        if match_include and isinstance(match_include, types.BooleanType):
            query_params.append(('mlt.match.include', str(match_include).lower()))
        if match_offset:
            query_params.append(('mlt.match.offset', str(match_offset)))
        if interestingTerms:
            query_params.append(('mlt.interestingTerms', interestingTerms))

        log.debug('MoreLikeThis with params: %s' % query_params)
        qs = urllib.urlencode(query_params)
        final_url = '?'.join([self._mlt_url, qs])

        final_url, use_post = (self._mlt_url, True) if len(final_url) > 2000 \
                                                    else (final_url, False)
        log.debug('Final MLT URL: %s' % final_url)

        if use_post:
            self._post(final_url, qs, headers=querybuilder.headers,
                callback=handle_search_response(querybuilder, callback))
        else:
            self._get(final_url, headers=querybuilder.headers,
                callback=handle_search_response(querybuilder, callback))

    def term_suggest(self, querybuilder, callback=None):
        """
        simple query against the /term_suggest requesthandler from solr.
        """
        query_params = self._get_params(querybuilder)

        log.debug('term_suggest with params: %s' % query_params)
        qs = urllib.urlencode(query_params)
        final_url = '?'.join([self._termsuggest_url, qs])
        log.debug('Final suggest URL: %s' % final_url)

        self._get(final_url, headers=querybuilder.headers,
            callback=handle_suggest_response(querybuilder, callback))

    def get_ids(self, ids, fields=None, response_mapper=None, callback=None):
        """
        Using the Solr 4.X realtime /get handler:
        https://wiki.apache.org/solr/RealTimeGet
        """
        log.debug('realtime get with ids: %s' % ids)
        params = [('ids', ','.join(map(unicode, ids)))]
        if fields:
            params.append(('fl', ','.join(fields)))
        qs = urllib.urlencode(params)
        final_url = '?'.join([self._get_url, qs])
        log.debug('Final get URL: %s' % final_url)
        qb = QueryBuilder(response_mapper=(response_mapper or (lambda x: x)))

        self._get(final_url, callback=handle_search_response(qb, callback))

    def index_document(self, doc, callback=None, commit=False,
                       commitWithin=None, softCommit=None, overwrite=None,
                       boost=None):
        """
        Index a `doc` into Solr. The `callback` will be called from within
        `self._handle_indexing_response`. If `commit is True`, then a `commit`
        request is sent to Solr.
        """
        verification = self._document_verifier(doc)
        if 'error' in verification:
            callback({'error': 'document refused', 'reason': verification,
                      'doc': doc})
            return

        to_index = {'add': {'doc': doc}}
        if boost:
            to_index['add']['boost'] = boost

        params = []
        if commitWithin:
            params.append(('commitWithin', str(commitWithin)))
        if overwrite is not None:
            if not overwrite:
                params.append(('overwrite', 'false'))
        if commit:
            params.append(('commit', 'true'))
        if softCommit:
            params.append(('softCommit', 'true'))
        final_url = '%s?%s' % (self._update_url, urllib.urlencode(params))

        self._post(final_url, to_index,
                   callback=handle_indexing_response(callback))

    def index_documents(self, list_of_docs, callback=None, commit=False,
                        commitWithin=None, softCommit=None, overwrite=None,
                        boost=None):
        """
        Index a `list_of_docs` into Solr. Only available in Solr 4+.
        """
        error_reasons, error_docs = [], []
        for doc in list_of_docs:
            verification = self._document_verifier(doc)
            if 'error' in verification:
                error_reasons.append(verification)
                error_docs.append(doc)
        if error_reasons:
            callback({'error': 'document(s) refused', 'reasons':
                     list(error_reasons), 'docs': list(error_docs)})
            return

        params = []
        if commitWithin:
            params.append(('commitWithin', str(commitWithin)))
        if overwrite is not None:
            if not overwrite:
                params.append(('overwrite', 'false'))
        if commit:
            params.append(('commit', 'true'))
        if softCommit:
            params.append(('softCommit', 'true'))
        final_url = '%s?%s' % (self._update_url, urllib.urlencode(params))

        self._post(final_url, list_of_docs,
                   callback=handle_indexing_response(callback))

    def commit(self, callback=None):
        """
        Commit any pending changes within Solr.
        """
        to_commit = {}
        final_url = "%s?commit=true" % self._update_url

        self._post(final_url, to_commit,
                callback=handle_indexing_response(callback))

    def remove_by_id(self, doc_id, callback=None, commit=False,
            commitWithin=None, softCommit=None):
        """
        Remove the document with id `doc_id`.

        If `commit=True` the change will be committed immidiately.
        The `callback` is called from within the
        `self._handle_indexing_response` method.
        """
        to_remove = {'delete': {'id': doc_id}}

        params = []
        if commitWithin:
            params.append(('commitWithin', str(commitWithin)))
        if commit:
            params.append(('commit', 'true'))
        if softCommit:
            params.append(('softCommit', 'true'))
        final_url = '%s?%s' % (self._update_url, urllib.urlencode(params))

        self._post(final_url, to_remove,
                callback=handle_indexing_response(callback))

    def remove_by_query(self, query, callback=None, commit=False,
            commitWithin=None, softCommit=None):
        """
        Remote any documents matching the given query.

        The query must be of the form `field:value`.
        """
        to_remove = {'delete': {'query': query}}

        params = []
        if commitWithin:
            params.append(('commitWithin', str(commitWithin)))
        if commit:
            params.append(('commit', 'true'))
        if softCommit:
            params.append(('softCommit', 'true'))
        final_url = '%s?%s' % (self._update_url, urllib.urlencode(params))

        self._post(final_url, to_remove,
                callback=handle_indexing_response(callback))
예제 #9
0
class Configuration(object):
    """Configuration object for requests at a given URI"""

    HTTP_VERBS = [
        'delete',
        'get',
        'head',
        'options',
        'patch',
        'post',
        'put',
        'trace'
    ]

    FLAVORS = {
        'json': {
            'content-type': 'application/json',
            'accept':       'application/json',
        },
        'xml': {
            'content-type': 'application/xml',
            'accept':       'application/xml',
        },
        'plain': {
            'content-type': 'text/plain',
            'accept':       'text/plain',
        },

        # POST and PUT only flavors
        'form': {
            'content-type': 'application/x-www-form-urlencoded',
        },
        'multipart': {
            'content-type': 'multipart/form-data; boundary=AaB03x',
        },
    }

    # Default tornado timeout
    TIMEOUT = 20

    def __init__(self, uri, flavors=None, chain=None, compress=False,
                 ca_certs=None, connect_timeout=None, request_timeout=None):
        """Initialize the configuration for requests at the given URI"""
        self.uri         = uri
        self.headers     = HTTPHeaders()
        self.flavors     = flavors or ['json', 'xml']
        self.processors  = chain or tornado_chain
        self.credentials = {}
        self.verb        = None
        self.use_gzip    = compress
        self.ca_certs    = ca_certs

        # Request extra arguments
        self.progress_callback = None
        self.connect_timeout   = connect_timeout or self.TIMEOUT
        self.request_timeout   = request_timeout or self.TIMEOUT

    def __iter__(self):
        """Iterate over properties"""
        prop_filter = lambda x: x[0][0] != '_'
        return itertools.ifilter(prop_filter, self.__dict__.iteritems())

    def __getattr__(self, value):
        """
        Perform an HTTP request. This method supports calls to the following
        methods: delete, get, head, options, patch, post, put, trace

        Once the HTTP call is performed, a response is returned (unless the
        async method is used).
        """
        if (value not in self.HTTP_VERBS):
            raise AttributeError(value)

        # store current verb to be passed to Request
        self.verb = value.upper()

        # set accept if it wasn't set previously
        if 'accept' not in self.headers:
            for flavor in self.flavors:
                if 'accept' in self.FLAVORS[flavor]:
                    self.headers.add('accept', self.FLAVORS[flavor]['accept'])

        # set form type and default if noone is present
        verb_allowed = self.verb in ('POST', 'PUT', 'PATCH')
        if verb_allowed and 'content-type' not in self.headers:
            self.headers['content-type'] = self.FLAVORS['form']['content-type']

        # Debug helper
        if __debug__:
            sys.stderr.write("=" * 70)
            sys.stderr.write("\nRequest:{0} {1}".format(self.verb, self.uri))

            sys.stderr.write("\nHeaders:")
            sys.stderr.write("\n  Accept:'{0}'".format(self.headers['accept']))
            if 'content-type' in self.headers:
                ctype = self.headers['content-type']
                sys.stderr.write("\n  Content-Type:'{0}'".format(ctype))
                sys.stderr.write("\n  Compressed:'{0}'".format(self.use_gzip))
            if self.uri.startswith("https"):
                sys.stderr.write("\nCerts:'{0}'".format(self.ca_certs))
            sys.stderr.write("\n{0}\n".format("=" * 70))

        return Request(self)

    def use(self, feature):
        """Register a feature (processor) at this configuration"""
        self.processors.insert(0, feature)
        return self

    def secure(self, value=None, port=None, ca_certs=None):
        """Force connection using https protocol at port specified"""
        if isinstance(value, bool):
            scheme = 'http' if not value else 'https'
            self.uri = _PROT_RE.sub(scheme + r"://\g<url>", self.uri)
        if isinstance(port, int):
            regx_str = r"\g<proto>\g<host>:{0}\g<url>".format(port)
            self.uri = _PORT_RE.sub(regx_str, self.uri)
        if isinstance(ca_certs, basestring):
            self.ca_certs = ca_certs
        return self

    def compress(self, compress=True):
        """Notify server that we will be zipping request"""
        self.use_gzip = bool(compress)
        return self

    def progress(self, progress_callback):
        """
        Allow to define a progress callback about operaiton. This
        progress callback takes 2 arguments, total length, if any and
        amount of bytes already transfered
        """
        self.progress_callback = progress_callback
        return self

    def until(self, connect_timeout=None, request_timeout=None):
        """Set current timeout in seconds for every call"""
        self.connect_timeout = connect_timeout or self.connect_timeout
        self.request_timeout = request_timeout or self.request_timeout
        return self

    def as_(self, flavor):
        """Set up the Content-Type"""
        if flavor is not None:
            # Just use default flavors in case we pass a None param
            if flavor in self.FLAVORS:
                self.headers.update(self.FLAVORS[flavor])
            else:
                self.headers["accept"] = flavor
                self.headers["content-type"] = flavor
        return self

    def accepts(self, flavor):
        """Configure the accepted response format"""
        if flavor is not None:
            if flavor in self.FLAVORS:
                flavor = self.FLAVORS[flavor]['accept']
            self.headers.add('accept', flavor)
        return self

    def auth(self, credentials, path="*", method='plain'):
        """Authentication feature. It does simple HTTP auth"""
        # already defined ?
        if path in self.credentials or method is None:
            return self
        # process a regex valid for path
        expr = "%s.*" if path.endswith('*') else "%s$"
        rmatch = re.compile(expr % path.rsplit('*', 1)[0])
        # now store it
        self.credentials[path] = (rmatch, method, credentials,)
        return self
예제 #10
0
파일: client.py 프로젝트: sojoner/dopplr
class SolrClient(object):
    """
    Apache Solr Client class.
    """
    def __init__(self,
                 search_host,
                 update_host=None,
                 default_headers=None,
                 required_query_params=[],
                 client_args={},
                 select_path='/select',
                 update_path='/update/json',
                 mlt_path='/mlt',
                 suggest_path='/suggest',
                 document_verifier=None,
                 ioloop=None):
        """
        Initialize me.
        """
        self._ioloop = ioloop or IOLoop.instance()

        self._search_url = '%s%s' % (search_host, select_path)
        self._mlt_url = '%s%s' % (search_host, mlt_path)
        self._termsuggest_url = '%s%s' % (search_host, suggest_path)
        uhost = update_host or search_host
        self._update_url = '%s%s' % (uhost, update_path)

        self._required_query_params = required_query_params
        if len([k for (k, v) in self._required_query_params
                if k == "wt"]) == 0:
            self._required_query_params.append(('wt', 'json'))

        self._document_verifier = document_verifier or \
            default_document_verifier

        self._default_headers = HTTPHeaders()
        if default_headers:
            self._default_headers.update(default_headers)
        self._client = AsyncHTTPClient(self._ioloop, **client_args)

    def _get(self, url, headers=None, callback=None):
        """
        A `GET` request to the solr.
        """
        h = HTTPHeaders()
        h.update(self._default_headers)
        if headers:
            h.update(headers)

        req = HTTPRequest(url, headers=headers)
        self._client.fetch(req, callback)

    def _post(self, url, body, headers=None, callback=None):
        """
        A `POST` request to the solr.
        """
        h = headers or HTTPHeaders()
        h.update(self._default_headers)
        h["Content-type"] = "application/json"
        request = HTTPRequest(url,
                              headers=h,
                              method="POST",
                              body=json.dumps(body))
        self._client.fetch(request, callback)

    def _get_params(self, querybuilder):
        query_params = querybuilder.get_params()
        for p in self._required_query_params:
            if p not in query_params:
                query_params.append(p)

        return query_params

    def search(self, querybuilder, callback=None):
        """
        Search the Solr with `querybuilder.get_params()` as query parameters.
        Use GET by default, but switch to POST in case of very long urls.
        """
        query_params = self._get_params(querybuilder)

        log.debug('Searching solr with params: %s' % query_params)
        qs = urllib.urlencode(query_params)

        final_url = "?".join([self._search_url, qs])
        # use POST if the final url is very long
        final_url, use_post = (self._search_url, True) if len(final_url) > 2000 \
                                                    else (final_url, False)
        log.debug('Final search URL: %s' % final_url)

        if use_post:
            self._post(final_url,
                       qs,
                       headers=querybuilder.headers,
                       callback=handle_search_response(querybuilder, callback))
        else:
            self._get(final_url,
                      headers=querybuilder.headers,
                      callback=handle_search_response(querybuilder, callback))

    def more_like_this(self,
                       querybuilder,
                       callback=None,
                       match_include=True,
                       match_offset=None,
                       interestingTerms=None):
        """
        `interestingTerms` can be one of: 'list', 'details', 'none'.
        """
        query_params = self._get_params(querybuilder)

        if match_include and isinstance(match_include, types.BooleanType):
            query_params.append(
                ('mlt.match.include', str(match_include).lower()))
        if match_offset:
            query_params.append(('mlt.match.offset', str(match_offset)))
        if interestingTerms:
            query_params.append(('mlt.interestingTerms', interestingTerms))

        log.debug('MoreLikeThis with params: %s' % query_params)
        qs = urllib.urlencode(query_params)
        final_url = '?'.join([self._mlt_url, qs])
        log.debug('Final MLT URL: %s' % final_url)

        self._get(final_url,
                  headers=querybuilder.headers,
                  callback=handle_search_response(querybuilder, callback))

    def term_suggest(self, querybuilder, callback=None):
        """
        simple query against the /term_suggest requesthandler from solr.
        """
        query_params = self._get_params(querybuilder)

        log.debug('term_suggest with params: %s' % query_params)
        qs = urllib.urlencode(query_params)
        final_url = '?'.join([self._termsuggest_url, qs])
        log.debug('Final suggest URL: %s' % final_url)

        self._get(final_url,
                  headers=querybuilder.headers,
                  callback=handle_search_response(querybuilder, callback))

    def index_document(self,
                       doc,
                       callback=None,
                       commit=False,
                       commitWithin=None,
                       overwrite=None,
                       boost=None):
        """
        Index a `doc` into Solr. The `callback` will be called from within
        `self._handle_indexing_response`. If `commit is True`, then a `commit`
        request is sent to Solr.
        """
        verification = self._document_verifier(doc)
        if 'error' in verification:
            callback({
                'error': 'document refused',
                'reason': verification,
                'doc': doc
            })
            return

        to_index = {'add': {'doc': doc}}
        if boost:
            to_index['add']['boost'] = boost

        params = []
        if commitWithin:
            params.append(('commitWithin', str(commitWithin)))
        if overwrite is not None:
            if not overwrite:
                params.append(('overwrite', 'false'))
        if commit:
            params.append(('commit', 'true'))
        final_url = '%s?%s' % (self._update_url, urllib.urlencode(params))

        self._post(final_url,
                   to_index,
                   callback=handle_indexing_response(callback))

    def commit(self, callback=None):
        """
        Commit any pending changes within Solr.
        """
        to_commit = {}
        final_url = "%s?commit=true" % self._update_url

        self._post(final_url,
                   to_commit,
                   callback=handle_indexing_response(callback))

    def remove_by_id(self, doc_id, callback=None, commit=False):
        """
        Remove the document with id `doc_id`.

        If `commit=True` the change will be committed immidiately.
        The `callback` is called from within the
        `self._handle_indexing_response` method.
        """
        to_remove = {'delete': {'id': doc_id}}

        if commit:
            final_url = "%s?commit=true" % self._update_url
        else:
            final_url = self._update_url

        self._post(final_url,
                   to_remove,
                   callback=handle_indexing_response(callback))

    def remove_by_query(self, query, callback=None, commit=False):
        """
        Remote any documents matching the given query.

        The query must be of the form `field:value`.
        """
        to_remove = {'delete': {'query': query}}

        if commit:
            final_url = "%s?commit=true" % self._update_url
        else:
            final_url = self._update_url

        self._post(final_url,
                   to_remove,
                   callback=handle_indexing_response(callback))