Beispiel #1
0
    def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300):
        """
        :arg timeout: Number of seconds to wait for each request before raising
            Timeout
        :arg max_retries: How many other servers to try, in series, after a
            request times out or a connection fails
        :arg revival_delay: Number of seconds for which to avoid a server after
            it times out or is uncontactable
        """
        if isinstance(urls, basestring):
            urls = [urls]
        urls = [u.rstrip('/') for u in urls]
        self.servers = DowntimePronePool(urls, revival_delay)
        self.revival_delay = revival_delay

        self.timeout = timeout
        self.max_retries = max_retries
        self.logger = getLogger('pyelasticsearch')
        self.session = requests.session()

        json_converter = self.from_python

        class DateSavvyJsonEncoder(json.JSONEncoder):
            def default(self, value):
                """Convert more Python data types to ES-understandable JSON."""
                return json_converter(value)

        self.json_encoder = DateSavvyJsonEncoder
Beispiel #2
0
def inicializar(archivo, **kwargs):
    if archivo is not None:
        config = ConfigParser.ConfigParser()
        with open(archivo) as fp:
            config.readfp(fp)
        urls = config.get('buscador', 'elasticsearch.url')
    elif kwargs is not None and 'elasticsearch.url' in kwargs:
        urls = kwargs['elasticsearch.url']
    else:
        raise Exception('No hay ninguna URL especificada para el buscador')

    if isinstance(urls, basestring):
        urls = [urls]
    urls = [u.rstrip('/') for u in urls]
    SEConn.servers = DowntimePronePool(urls, 300)
Beispiel #3
0
 def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300):
     """
     :arg urls: A URL or iterable of URLs of ES nodes. These are full URLs
         with port numbers, like ``http://elasticsearch.example.com:9200``.
     :arg timeout: Number of seconds to wait for each request before raising
         Timeout
     :arg max_retries: How many other servers to try, in series, after a
         request times out or a connection fails
     :arg revival_delay: Number of seconds for which to avoid a server after
         it times out or is uncontactable
     """
     if isinstance(urls, string_types):
         urls = [urls]
     urls = [u.rstrip('/') for u in urls]
     self.servers = DowntimePronePool(urls, revival_delay)
     self.revival_delay = revival_delay
     self.timeout = timeout
     self.max_retries = max_retries
     self.logger = getLogger('pyelasticsearch')
     self.session = requests.session()
     self.json_encoder = JsonEncoder