Esempio n. 1
0
 def _prepare_date(self, value):
     ''' Prepare a value of type date
     '''
     if is_iter(value):
         value = [str(scorched.dates.solr_date(v)) for v in value]
     else:
         value = str(scorched.dates.solr_date(value))
     return value
Esempio n. 2
0
 def _prepare_date(self, value):
     ''' Prepare a value of type date
     '''
     if is_iter(value):
         value = [str(scorched.dates.solr_date(v)) for v in
                  value]
     else:
         value = str(scorched.dates.solr_date(value))
     return value
Esempio n. 3
0
 def _prepare_docs(self, docs):
     for doc in docs:
         for name, value in list(doc.items()):
             # XXX remove all None fields this is needed for adding date
             # fields
             if value is None:
                 doc.pop(name)
                 continue
             if name in self._datefields:
                 doc[name] = str(scorched.dates.solr_date(value))
             elif name.endswith(self._datefields):
                 doc[name] = str(scorched.dates.solr_date(value))
     return docs
Esempio n. 4
0
    def from_json(cls, jsonmsg, unique_key, datefields=()):
        self = cls()
        self.original_json = jsonmsg
        doc = json.loads(jsonmsg)
        details = doc['responseHeader']
        for attr in ["QTime", "params", "status"]:
            setattr(self, attr, details.get(attr))
        if self.status != 0:
            raise ValueError("Response indicates an error")
        self.result = SolrResult()
        if doc.get('response'):
            self.result = SolrResult.from_json(doc['response'], datefields)
        # TODO mlt/ returns match what should we do with it ?
        # if doc.get('match'):
        #    self.result = SolrResult.from_json(doc['match'], datefields)
        self.facet_counts = SolrFacetCounts.from_json(doc)
        self.spellcheck = doc.get("spellcheck", {})
        if self.params is not None:
            self.group_field = self.params.get('group.field')
        else:
            self.group_field = None
        self.groups = {}
        if self.group_field is not None:
            self.groups = SolrGroupResult.from_json(doc['grouped'],
                                                    self.group_field,
                                                    datefields)
        self.highlighting = doc.get("highlighting", {})
        if self.highlighting:
            # Add highlighting info to the individual documents.
            if doc.get('response'):
                for d in self.result.docs:
                    k = str(d[unique_key])
                    if k in self.highlighting:
                        d['solr_highlights'] = self.highlighting[k]
            elif doc.get('grouped'):
                for group in getattr(self.groups, self.group_field)['groups']:
                    for d in group['doclist']['docs']:
                        k = str(d[unique_key])
                        if k in self.highlighting:
                            d['solr_highlights'] = self.highlighting[k]

        self.debug = doc.get('debug', {})
        self.next_cursor_mark = doc.get('nextCursorMark')
        self.more_like_these = dict(
            (k, SolrResult.from_json(v, datefields))
            for (k, v) in list(doc.get('moreLikeThis', {}).items()))
        self.term_vectors = self.parse_term_vectors(doc.get('termVectors', []))
        # can be computed by MoreLikeThisHandler
        self.interesting_terms = doc.get('interestingTerms', None)
        self.stats = SolrStats.from_json(doc)
        return self
Esempio n. 5
0
    def from_json(cls, jsonmsg, unique_key, datefields=()):
        self = cls()
        self.original_json = jsonmsg
        doc = json.loads(jsonmsg)
        details = doc['responseHeader']
        for attr in ["QTime", "params", "status"]:
            setattr(self, attr, details.get(attr))
        if self.status != 0:
            raise ValueError("Response indicates an error")
        self.result = SolrResult()
        if doc.get('response'):
            self.result = SolrResult.from_json(doc['response'], datefields)
        # TODO mlt/ returns match what should we do with it ?
        # if doc.get('match'):
        #    self.result = SolrResult.from_json(doc['match'], datefields)
        self.facet_counts = SolrFacetCounts.from_json(doc)
        self.spellcheck = doc.get("spellcheck", {})
        if self.params is not None:
            self.group_field = self.params.get('group.field')
        else:
            self.group_field = None
        self.groups = {}
        if self.group_field is not None:
            self.groups = SolrGroupResult.from_json(
                doc['grouped'], self.group_field, datefields)
        self.highlighting = doc.get("highlighting", {})
        if self.highlighting:
            # Add highlighting info to the individual documents.
            if doc.get('response'):
                for d in self.result.docs:
                    k = str(d[unique_key])
                    if k in self.highlighting:
                        d['solr_highlights'] = self.highlighting[k]
            elif doc.get('grouped'):
                for group in getattr(self.groups, self.group_field)['groups']:
                    for d in group['doclist']['docs']:
                        k = str(d[unique_key])
                        if k in self.highlighting:
                            d['solr_highlights'] = self.highlighting[k]

        self.debug = doc.get('debug', {})
        self.next_cursor_mark = doc.get('nextCursorMark')
        self.more_like_these = dict(
            (k, SolrResult.from_json(v, datefields))
            for (k, v) in list(doc.get('moreLikeThis', {}).items()))
        self.term_vectors = self.parse_term_vectors(doc.get('termVectors', []))
        # can be computed by MoreLikeThisHandler
        self.interesting_terms = doc.get('interestingTerms', None)
        self.stats = SolrStats.from_json(doc)
        return self
Esempio n. 6
0
 def options(self):
     if self.queries:
         return {
             'facet.query': [str(q) for q in self.queries],
             'facet': True
         }
     else:
         return {}
Esempio n. 7
0
    def delete_by_query(self, query, **kwargs):
        """
        :param query: criteria how witch entries should be deleted
        :type query: LuceneQuery

        Delete entries by a given query
        """
        delete_message = json.dumps({"delete": {"query": str(query)}})
        self.conn.update(delete_message, **kwargs)
Esempio n. 8
0
 def _prepare_docs(self, docs):
     prepared_docs = []
     for doc in docs:
         new_doc = {}
         for name, value in list(doc.items()):
             # XXX remove all None fields this is needed for adding date
             # fields
             if value is None:
                 continue
             if scorched.dates.is_datetime_field(name, self._datefields):
                 if is_iter(value):
                     value = [
                         str(scorched.dates.solr_date(v)) for v in value
                     ]
                 else:
                     value = str(scorched.dates.solr_date(value))
             new_doc[name] = value
         prepared_docs.append(new_doc)
     return prepared_docs
Esempio n. 9
0
def is_solr_available(dsn=None):
    if not dsn:
        dsn = os.environ.get("SOLR_URL", "http://localhost:8983/solr")
    if dsn is not None:
        try:
            requests.get(dsn, timeout=1)
            return True
        except Exception as e:
            print("Connection error:%s" % str(e))
    return False
Esempio n. 10
0
def is_solr_available(dsn=None):
    if not dsn:
        dsn = os.environ.get("SOLR_URL",
                             "http://localhost:8983/solr")
    if dsn is not None:
        try:
            requests.get(dsn, timeout=1)
            return True
        except Exception as e:
            print("Connection error:%s" % str(e))
    return False
Esempio n. 11
0
def skip_unless_solr(func):
    """
    Use this decorator to skip tests which need a functional Solr connection.
    The connection is given by the environment SOLR_URL
    """

    if is_solr_available():
        return func
    msg = "Test needs a running Solr connection (SOLR_URL)"
    warnings.warn(msg + str(func))
    return unittest.skip(msg)(func)
Esempio n. 12
0
    def delete_by_query(self, query, **kwargs):
        """
        :param query: criteria how witch entries should be deleted
        :type query: LuceneQuery
        :returns: SolrUpdateResponse  -- A Solr response object.

        Delete entries by a given query
        """
        delete_message = json.dumps({"delete": {"query": str(query)}})
        ret = scorched.response.SolrUpdateResponse.from_json(
            self.conn.update(delete_message, **kwargs))
        return ret
Esempio n. 13
0
    def delete_by_query(self, query, **kwargs):
        """
        :param query: criteria how witch entries should be deleted
        :type query: LuceneQuery
        :returns: SolrUpdateResponse  -- A solr response object.

        Delete entries by a given query
        """
        delete_message = json.dumps({"delete": {"query": str(query)}})
        ret = scorched.response.SolrUpdateResponse.from_json(
            self.conn.update(delete_message, **kwargs))
        return ret
Esempio n. 14
0
 def _prepare_docs(self, docs):
     prepared_docs = []
     for doc in docs:
         new_doc = {}
         for name, value in list(doc.items()):
             # XXX remove all None fields this is needed for adding date
             # fields
             if value is None:
                 continue
             if scorched.dates.is_datetime_field(name, self._datefields):
                 value = str(scorched.dates.solr_date(value))
             new_doc[name] = value
         prepared_docs.append(new_doc)
     return prepared_docs
Esempio n. 15
0
def params_from_dict(**kwargs):
    utf8_params = []
    for k, vs in list(kwargs.items()):
        if isinstance(k, bytes):
            k = k.decode('utf-8')
        # We allow for multivalued options with lists.
        if not is_iter(vs):
            vs = [vs]
        for v in vs:
            if isinstance(v, bool):
                v = b"true" if v else b"false"
            if isinstance(v, str):
                v = v.encode('utf-8')
            if isinstance(v, numbers.Number):
                v = str(v).encode('utf-8')
            utf8_params.append((k, v))
    return sorted(utf8_params)
Esempio n. 16
0
def params_from_dict(**kwargs):
    utf8_params = []
    for k, vs in list(kwargs.items()):
        if isinstance(k, bytes):
            k = k.decode('utf-8')
        # We allow for multivalued options with lists.
        if not is_iter(vs):
            vs = [vs]
        for v in vs:
            if isinstance(v, bool):
                v = b"true" if v else b"false"
            if isinstance(v, str):
                v = v.encode('utf-8')
            if isinstance(v, numbers.Number):
                v = str(v).encode('utf-8')
            utf8_params.append((k, v))
    return sorted(utf8_params)
Esempio n. 17
0
 def test_solr_date_from_str(self):
     # str here is original str from python
     self.assertTrue("'str'" in repr(str))
     s = solr_date(str("2009-07-23T03:24:34.000376Z"))
     self.assertEqual(s, solr_date(s))
     self.assertTrue(s == s)
Esempio n. 18
0
 def options(self):
     if self.queries:
         return {'facet.query': [str(q) for q in self.queries],
                 'facet': True}
     else:
         return {}
Esempio n. 19
0
 def to_solr(self, value):
     if isinstance(value, bool):
         return u"true" if value else u"false"
     if isinstance(value, datetime.datetime):
         return str(scorched.dates.solr_date(value))
     return str(value)
Esempio n. 20
0
 def term_or_phrase(self, arg, force=None):
     return 'terms' if self.default_term_re.match(str(arg)) else 'phrases'
Esempio n. 21
0
 def __str__(self):
     return str(self.char)
Esempio n. 22
0
 def __str__(self):
     return str(self.char)
Esempio n. 23
0
def check_solr_date_from_date(s, date, canonical_date):
    from scorched.compat import str
    assert str(solr_date(date)) == s, "Unequal representations of %r: %r and %r" % (
        date, str(solr_date(date)), s)
    check_solr_date_from_string(s, canonical_date)
Esempio n. 24
0
 def test_solr_date_from_str(self):
     # str here is original str from python
     self.assertTrue("'str'" in repr(str))
     s = solr_date(str("2009-07-23T03:24:34.000376Z"))
     self.assertEqual(s, solr_date(s))
     self.assertTrue(s == s)
Esempio n. 25
0
 def term_or_phrase(self, arg, force=None):
     return 'terms' if self.default_term_re.match(str(arg)) else 'phrases'
Esempio n. 26
0
def check_solr_date_from_date(s, date, canonical_date):
    from scorched.compat import str
    assert str(
        solr_date(date)) == s, "Unequal representations of %r: %r and %r" % (
            date, str(solr_date(date)), s)
    check_solr_date_from_string(s, canonical_date)
Esempio n. 27
0
    def url_for_update(self, commit=None, commitWithin=None, softCommit=None,
                       optimize=None, waitSearcher=None, expungeDeletes=None,
                       maxSegments=None):
        """
        :param commit: optional -- commit actions
        :type commit: bool
        :param commitWithin: optional -- document will be added within that
                             time
        :type commitWithin: int
        :param softCommit: optional -- performant commit without "on-disk"
                           guarantee
        :type softCommit: bool
        :param optimize: optional -- optimize forces all of the index segments
                         to be merged into a single segment first.
        :type optimze: bool
        :param waitSearcher: optional -- block until a new searcher is opened
                             and registered as the main query searcher,
        :type waitSearcher: bool
        :param expungeDeletes: optional -- merge segments with deletes away
        :type expungeDeletes: bool
        :param maxSegments: optional -- optimizes down to at most this number
                            of segments
        :type maxSegments: int
        :returns: str -- url with all extra paramters set

        This functions sets all extra parameters for the ``optimize`` and
        ``commit`` function.
        """
        extra_params = {}
        if commit is not None:
            extra_params['commit'] = "true" if commit else "false"
        if commitWithin is not None:
            try:
                extra_params['commitWithin'] = int(commitWithin)
            except (TypeError, ValueError):
                raise ValueError(
                    "commitWithin should be a number in milliseconds")
            if extra_params['commitWithin'] < 0:
                raise ValueError(
                    "commitWithin should be a number in milliseconds")
            extra_params['commitWithin'] = str(extra_params['commitWithin'])
        if softCommit is not None:
            extra_params['softCommit'] = "true" if softCommit else "false"
        if optimize is not None:
            extra_params['optimize'] = "true" if optimize else "false"
        if waitSearcher is not None:
            extra_params['waitSearcher'] = "true" if waitSearcher else "false"
        if expungeDeletes is not None:
            extra_params[
                'expungeDeletes'] = "true" if expungeDeletes else "false"
        if maxSegments is not None:
            try:
                extra_params['maxSegments'] = int(maxSegments)
            except (TypeError, ValueError):
                raise ValueError("maxSegments")
            if extra_params['maxSegments'] <= 0:
                raise ValueError("maxSegments should be a positive number")
            extra_params['maxSegments'] = str(extra_params['maxSegments'])
        if 'expungeDeletes' in extra_params and 'commit' not in extra_params:
            raise ValueError("Can't do expungeDeletes without commit")
        if 'maxSegments' in extra_params and 'optimize' not in extra_params:
            raise ValueError("Can't do maxSegments without optimize")
        if extra_params:
            return "%s?%s" % (self.update_url, scorched.compat.urlencode(
                sorted(extra_params.items())))
        else:
            return self.update_url
Esempio n. 28
0
 def __str__(self):
     return str(self.result)
Esempio n. 29
0
    def url_for_update(self,
                       commit=None,
                       commitWithin=None,
                       softCommit=None,
                       optimize=None,
                       waitSearcher=None,
                       expungeDeletes=None,
                       maxSegments=None):
        """
        :param commit: optional -- commit actions
        :type commit: bool
        :param commitWithin: optional -- document will be added within that
                             time
        :type commitWithin: int
        :param softCommit: optional -- performant commit without "on-disk"
                           guarantee
        :type softCommit: bool
        :param optimize: optional -- optimize forces all of the index segments
                         to be merged into a single segment first.
        :type optimze: bool
        :param waitSearcher: optional -- block until a new searcher is opened
                             and registered as the main query searcher,
        :type waitSearcher: bool
        :param expungeDeletes: optional -- merge segments with deletes away
        :type expungeDeletes: bool
        :param maxSegments: optional -- optimizes down to at most this number
                            of segments
        :type maxSegments: int
        :returns: str -- url with all extra paramters set

        This functions sets all extra parameters for the ``optimize`` and
        ``commit`` function.
        """
        extra_params = {}
        if commit is not None:
            extra_params['commit'] = "true" if commit else "false"
        if commitWithin is not None:
            try:
                extra_params['commitWithin'] = int(commitWithin)
            except (TypeError, ValueError):
                raise ValueError(
                    "commitWithin should be a number in milliseconds")
            if extra_params['commitWithin'] < 0:
                raise ValueError(
                    "commitWithin should be a number in milliseconds")
            extra_params['commitWithin'] = str(extra_params['commitWithin'])
        if softCommit is not None:
            extra_params['softCommit'] = "true" if softCommit else "false"
        if optimize is not None:
            extra_params['optimize'] = "true" if optimize else "false"
        if waitSearcher is not None:
            extra_params['waitSearcher'] = "true" if waitSearcher else "false"
        if expungeDeletes is not None:
            extra_params[
                'expungeDeletes'] = "true" if expungeDeletes else "false"
        if maxSegments is not None:
            try:
                extra_params['maxSegments'] = int(maxSegments)
            except (TypeError, ValueError):
                raise ValueError("maxSegments")
            if extra_params['maxSegments'] <= 0:
                raise ValueError("maxSegments should be a positive number")
            extra_params['maxSegments'] = str(extra_params['maxSegments'])
        if 'expungeDeletes' in extra_params and 'commit' not in extra_params:
            raise ValueError("Can't do expungeDeletes without commit")
        if 'maxSegments' in extra_params and 'optimize' not in extra_params:
            raise ValueError("Can't do maxSegments without optimize")
        if extra_params:
            return "%s?%s" % (self.update_url,
                              scorched.compat.urlencode(
                                  sorted(extra_params.items())))
        else:
            return self.update_url
Esempio n. 30
0
 def to_solr(self, value):
     if isinstance(value, bool):
         return u"true" if value else u"false"
     if isinstance(value, datetime.datetime):
         return str(scorched.dates.solr_date(value))
     return str(value)
Esempio n. 31
0
 def __str__(self):
     return str(self.result)