def results(chunks, size=100): def fetch(): fetch.cache = [] fetch.cache.extend(l for l in itertools.islice(chunks, size) if l) fetch() first = True while fetch.cache: for chunk in fetch.cache: if is_msgpack: if first: first = False if ord(chunk[-1]) & 0xf0 == 0x90: chunk = chunk[:-1] + '\x90' elif chunk[-3] == '\xdc': chunk = chunk[:-3] + '\x90' elif chunk[-5] == '\xdd': chunk = chunk[:-5] + '\x90' else: raise IOError("Unexpected chunk!") yield msgpack.loads(chunk) elif is_json: if first: first = False if chunk.rstrip().endswith('['): chunk += ']}}' else: raise IOError("Unexpected chunk!") elif chunk.lstrip().startswith(']'): continue else: chunk = chunk.rstrip().rstrip(',') yield json.loads(chunk) else: yield chunk fetch()
def results(chunks, size=100): def fetch(): fetch.cache = [] fetch.cache.extend(l for l in itertools.islice(chunks, size) if l) fetch() first = True while fetch.cache: for chunk in fetch.cache: if is_msgpack: if first: first = False for o, m, v in ((-1, 0xf0, 0x90), (-3, 0xff, 0xdc), (-5, 0xff, 0xdd)): if ord(chunk[o]) & m == v: try: yield msgpack.loads(chunk[:o] + '\x90') break except Exception: pass else: raise IOError("Unexpected chunk!") else: yield msgpack.loads(chunk) elif is_json: if first: first = False if chunk.rstrip().endswith('['): chunk += ']}}' else: raise IOError("Unexpected chunk!") elif chunk.lstrip().startswith(']'): continue else: chunk = chunk.rstrip().rstrip(',') yield json.loads(chunk) else: yield chunk fetch()
def results(chunks, size=100): def fetch(): fetch.cache = [] fetch.cache.extend(l for l in itertools.islice(chunks, size) if l) fetch() total = None if is_msgpack: while fetch.cache: for num, chunk in enumerate(fetch.cache): if num == 0: for o, m, v in ((-1, 0xf0, 0x90), (-3, 0xff, 0xdc), (-5, 0xff, 0xdd)): if ord(chunk[o]) & m == v: try: if v == 0xdd: total = msgpack.loads('\xce' + chunk[o + 1:]) + 1 elif v == 0xdc: total = msgpack.loads('\xcd' + chunk[o + 1:]) + 1 else: total = msgpack.loads(chr(ord(chunk[o]) & 0x0f)) + 1 chunk = chunk[:o] + '\x90' + msgpack.dumps({RESPONSE_TOOK: 0.0})[1:] break except Exception: pass else: raise IOError("Unexpected chunk!") if total == 0: # Add single-item dictionary: meta['_']._update(msgpack.loads('\x81' + chunk)) else: obj = msgpack.loads(chunk) yield obj total -= 1 fetch() elif is_json: while fetch.cache: for num, chunk in enumerate(fetch.cache): if num == 0: if chunk.rstrip().endswith('['): chunk += ']}}' else: raise IOError("Unexpected chunk!") elif chunk.lstrip().startswith(']'): # Remove "],}" and use as single-item dictionary: chunk = '{' + chunk.lstrip()[1:].lstrip()[1:].lstrip()[1:] total = 0 else: chunk = chunk.rstrip().rstrip(',') if total == 0: meta['_']._update(json.loads(chunk)) else: obj = json.loads(chunk) yield obj fetch() else: while fetch.cache: for num, chunk in enumerate(fetch.cache): yield chunk fetch()
def _send_request(self, action_request, index, host=None, port=None, nodename=None, id=None, body=None, default=NA, **kwargs): """ :arg action_request: Perform index, delete, serch, stats, patch, head actions per request :arg query: Query to process on xapiand :arg index: index path :arg host: address to connect to xapiand :arg port: port to connect to xapiand :arg nodename: Node name, if empty is assigned randomly :arg id: Document ID :arg body: File or dictionary with the body of the request """ method, stream, key = self._methods[action_request] url = self._build_url(action_request, index, host, port, nodename, id) params = kwargs.pop('params', None) if params is not None: kwargs['params'] = dict((k.replace('__', '.'), (v and 1 or 0) if isinstance(v, bool) else v) for k, v in params.items() if k not in ('commit', 'volatile', 'pretty', 'indent') or v) stream = kwargs.pop('stream', stream) if stream is not None: kwargs['stream'] = stream kwargs.setdefault('allow_redirects', False) headers = kwargs.setdefault('headers', {}) accept = headers.setdefault('accept', self.default_accept) headers.setdefault('accept-encoding', self.default_accept_encoding) if 'json' in kwargs is not None: body = kwargs.pop('json') headers['content-type'] = 'application/json' is_msgpack = False is_json = True elif 'msgpack' in kwargs is not None: body = kwargs.pop('msgpack') headers['content-type'] = 'application/x-msgpack' is_msgpack = True is_json = False else: content_type = headers.setdefault('content-type', accept) is_msgpack = 'application/x-msgpack' in content_type is_json = 'application/json' in content_type if body is not None: if isinstance(body, dict): if '_schema' in body: body = body.copy() schema = body['_schema'] if isinstance(schema, dict): schema['_endpoint'] = '{}{}'.format(self.prefix, schema['_endpoint'].strip('/')) else: schema = '{}{}'.format(self.prefix, schema.strip('/')) body['_schema'] = schema if isinstance(body, (dict, list)): if is_msgpack: body = msgpack.dumps(body) elif is_json: body = json.dumps(body, ensure_ascii=True) elif os.path.isfile(body): body = open(body, 'r') res = method(url, body, **kwargs) else: data = kwargs.get('data') if data: if is_msgpack: kwargs['data'] = msgpack.dumps(data) elif is_json: kwargs['data'] = json.dumps(data, ensure_ascii=True) res = method(url, **kwargs) if res.status_code == 404 and action_request in ('patch', 'delete', 'get'): if default is NA: raise self.DoesNotExist("Matching query does not exist.") return default else: res.raise_for_status() content_type = res.headers.get('content-type', '') is_msgpack = 'application/x-msgpack' in content_type is_json = 'application/json' in content_type if stream: meta = {} def results(chunks, size=100): def fetch(): fetch.cache = [] fetch.cache.extend(l for l in itertools.islice(chunks, size) if l) fetch() total = None if is_msgpack: while fetch.cache: for num, chunk in enumerate(fetch.cache): if num == 0: for o, m, v in ((-1, 0xf0, 0x90), (-3, 0xff, 0xdc), (-5, 0xff, 0xdd)): if ord(chunk[o]) & m == v: try: if v == 0xdd: total = msgpack.loads('\xce' + chunk[o + 1:]) + 1 elif v == 0xdc: total = msgpack.loads('\xcd' + chunk[o + 1:]) + 1 else: total = msgpack.loads(chr(ord(chunk[o]) & 0x0f)) + 1 chunk = chunk[:o] + '\x90' + msgpack.dumps({RESPONSE_TOOK: 0.0})[1:] break except Exception: pass else: raise IOError("Unexpected chunk!") if total == 0: # Add single-item dictionary: meta['_']._update(msgpack.loads('\x81' + chunk)) else: obj = msgpack.loads(chunk) yield obj total -= 1 fetch() elif is_json: while fetch.cache: for num, chunk in enumerate(fetch.cache): if num == 0: if chunk.rstrip().endswith('['): chunk += ']}}' else: raise IOError("Unexpected chunk!") elif chunk.lstrip().startswith(']'): # Remove "],}" and use as single-item dictionary: chunk = '{' + chunk.lstrip()[1:].lstrip()[1:].lstrip()[1:] total = 0 else: chunk = chunk.rstrip().rstrip(',') if total == 0: meta['_']._update(json.loads(chunk)) else: obj = json.loads(chunk) yield obj fetch() else: while fetch.cache: for num, chunk in enumerate(fetch.cache): yield chunk fetch() results = results(res.iter_content(chunk_size=None)) meta.update(next(results)) else: meta = {} if is_msgpack: content = msgpack.loads(res.content) elif is_json: content = json.loads(res.content) else: content = res.content results = [content] results = Results(meta, results) if key == 'result': results = results.next() for k, v in res.headers.items(): dict.__setattr__(results, k.replace('-', '_'), v) return results
def loads(self, serialized): try: return json.loads(serialized, object_pairs_hook=self.object_pairs_hook) except (ValueError, TypeError) as e: raise SerializationError(serialized, e)
def _send_request(self, action_request, index, host=None, port=None, nodename=None, id=None, body=None, default=NA, **kwargs): """ :arg action_request: Perform index, delete, serch, stats, patch, head actions per request :arg query: Query to process on xapiand :arg index: index path :arg host: address to connect to xapiand :arg port: port to connect to xapiand :arg nodename: Node name, if empty is assigned randomly :arg id: Document ID :arg body: File or dictionary with the body of the request """ method, stream, key = self._methods[action_request] url = self._build_url(action_request, index, host, port, nodename, id, body) params = kwargs.pop('params', None) if params is not None: kwargs['params'] = dict( (k.replace('__', '.'), (v and 1 or 0) if isinstance(v, bool) else v) for k, v in params.items() if k not in ('commit', 'pretty') or v) stream = kwargs.pop('stream', stream) if stream is not None: kwargs['stream'] = stream kwargs.setdefault('allow_redirects', False) headers = kwargs.setdefault('headers', {}) accept = headers.setdefault('accept', self.default_accept) accept_encoding = headers.setdefault('accept-encoding', self.default_accept_encoding) content_type = headers.setdefault('content-type', accept) is_msgpack = 'application/x-msgpack' in content_type is_json = 'application/json' in content_type if body is not None: if isinstance(body, (dict, list)): if is_msgpack: body = msgpack.dumps(body) elif is_json: body = json.dumps(body) elif os.path.isfile(body): body = open(body, 'r') res = method(url, body, **kwargs) else: data = kwargs.get('data') if data: if is_msgpack: kwargs['data'] = msgpack.dumps(data) elif is_json: kwargs['data'] = json.dumps(data) res = method(url, **kwargs) if res.status_code == 404 and action_request in ('patch', 'delete', 'get'): if default is NA: raise self.DoesNotExist return default else: res.raise_for_status() content_type = res.headers.get('content-type', '') is_msgpack = 'application/x-msgpack' in content_type is_json = 'application/json' in content_type if stream: def results(chunks, size=100): def fetch(): fetch.cache = [] fetch.cache.extend(l for l in itertools.islice(chunks, size) if l) fetch() first = True while fetch.cache: for chunk in fetch.cache: if is_msgpack: if first: first = False if ord(chunk[-1]) & 0xf0 == 0x90: chunk = chunk[:-1] + '\x90' elif chunk[-3] == '\xdc': chunk = chunk[:-3] + '\x90' elif chunk[-5] == '\xdd': chunk = chunk[:-5] + '\x90' else: raise IOError("Unexpected chunk!") yield msgpack.loads(chunk) elif is_json: if first: first = False if chunk.rstrip().endswith('['): chunk += ']}}' else: raise IOError("Unexpected chunk!") elif chunk.lstrip().startswith(']'): continue else: chunk = chunk.rstrip().rstrip(',') yield json.loads(chunk) else: yield chunk fetch() results = results(res.iter_content(chunk_size=None)) meta = next(results) else: if is_msgpack: content = msgpack.loads(res.content) elif is_json: content = json.loads(res.content) else: content = res.content results = [content] meta = {} results = Results(meta, results) if key == 'result': results = results.next() for k, v in res.headers.items(): dict.__setattr__(results, k.replace('-', '_'), v) return results