def test_complex_object(self): value = wrap({"s": 0, "r": 5}) test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "s": {"$value": 0}, "r": {"$value": 5}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_null(self): value = None test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = '{"$value": null}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_empty_dict(self): value = wrap({"match_all": wrap({})}) test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "match_all": {"$object": "."}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_true(self): value = True test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = '{"$value": true}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_escaped_string_value(self): value = "\"" test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = '{"$value": "\\""}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_number_value(self): value = 42 test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = '{"$value": 42}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_empty_list2(self): value = wrap({"a": [], "b": 1}) test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "a": [], "b": {"$value": 1}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_date(self): value = {"test": datetime.date(2013, 11, 13)} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "test": {"$value": 1384318800}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_list_value(self): value = [42] test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'[{"$value": 42}]' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_unicode1(self): value = {"comment": u"Open all links in the current tab, except the pages opened from external apps — open these ones in new windows"} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "comment": {"$value": "Open all links in the current tab, except the pages opened from external apps — open these ones in new windows"}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_list_of_objects(self): value = {"a": [{}, "b"]} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "a": [{"$object": "."}, {"$value": "b"}]}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_empty_list_value(self): value = [] test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'[]' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_empty_list(self): value = {"value": []} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "value": []}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_nested(self): value = {"a": {}, "b": {}} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "a": {"$object": "."}, "b": {"$object": "."}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_double(self): value = {"value": 5.2025595183536973e-07} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "value": {"$value": 5.202559518353697e-7}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_unicode3(self): value = {"comment": u"testing accented char ŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "comment": {"$value": "testing accented char ŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_unicode2(self): value = {"comment": b"testing accented char àáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "comment": {"$value": "testing accented char àáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"}}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def test_list(self): value = {"value": [23, 42]} test1 = typed_encode(value) test2 = json2typed(pypy_json_encode(value)) expected = u'{"$object": ".", "value": [{"$value": 23}, {"$value": 42}]}' self.assertEqual(test1, expected) self.assertEqual(test2, expected)
def extend(self, records): """ records - MUST HAVE FORM OF [{"value":value}, ... {"value":value}] OR [{"json":json}, ... {"json":json}] OPTIONAL "id" PROPERTY IS ALSO ACCEPTED """ if self.settings.read_only: Log.error("Index opened in read only mode, no changes allowed") lines = [] try: for r in records: id = r.get("id") if id == None: id = random_id() if "json" in r: json_bytes = r["json"].encode("utf8") elif "value" in r: json_bytes = convert.value2json(r["value"]).encode("utf8") else: json_bytes = None Log.error("Expecting every record given to have \"value\" or \"json\" property") lines.append(b'{"index":{"_id": ' + convert.value2json(id).encode("utf8") + b'}}') if self.settings.tjson: lines.append(json2typed(json_bytes.decode('utf8')).encode('utf8')) else: lines.append(json_bytes) del records if not lines: return with Timer("Add {{num}} documents to {{index}}", {"num": len(lines) / 2, "index":self.settings.index}, debug=self.debug): try: data_bytes = b"\n".join(l for l in lines) + b"\n" except Exception, e: Log.error("can not make request body from\n{{lines|indent}}", lines=lines, cause=e) response = self.cluster.post( self.path + "/_bulk", data=data_bytes, headers={"Content-Type": "text"}, timeout=self.settings.timeout, retry=self.settings.retry ) items = response["items"] fails = [] if self.cluster.version.startswith("0.90."): for i, item in enumerate(items): if not item.index.ok: fails.append(i) elif any(map(self.cluster.version.startswith, ["1.4.", "1.5.", "1.6.", "1.7."])): for i, item in enumerate(items): if item.index.status not in [200, 201]: fails.append(i) else: Log.error("version not supported {{version}}", version=self.cluster.version) if fails: Log.error("Problems with insert", cause=[ Except( template="{{status}} {{error}} (and {{some}} others) while loading line id={{id}} into index {{index|quote}}:\n{{line}}", status=items[i].index.status, error=items[i].index.error, some=len(fails) - 1, line=strings.limit(lines[fails[0] * 2 + 1], 500 if not self.debug else 100000), index=self.settings.index, id=items[i].index._id ) for i in fails ]) except Exception, e: if e.message.startswith("sequence item "): Log.error("problem with {{data}}", data=repr(lines[int(e.message[14:16].strip())]), cause=e) Log.error("problem sending to ES", e)
def extend(self, records): """ records - MUST HAVE FORM OF [{"value":value}, ... {"value":value}] OR [{"json":json}, ... {"json":json}] OPTIONAL "id" PROPERTY IS ALSO ACCEPTED """ if self.settings.read_only: Log.error("Index opened in read only mode, no changes allowed") lines = [] try: for r in records: id = r.get("id") if id == None: id = random_id() if "json" in r: json_bytes = r["json"].encode("utf8") elif "value" in r: json_bytes = convert.value2json(r["value"]).encode("utf8") else: json_bytes = None Log.error("Expecting every record given to have \"value\" or \"json\" property") lines.append(b'{"index":{"_id": ' + convert.value2json(id).encode("utf8") + b'}}') if self.settings.tjson: lines.append(json2typed(json_bytes.decode('utf8')).encode('utf8')) else: lines.append(json_bytes) del records if not lines: return try: data_bytes = b"\n".join(l for l in lines) + b"\n" except Exception, e: Log.error("can not make request body from\n{{lines|indent}}", lines=lines, cause=e) response = self.cluster.post( self.path + "/_bulk", data=data_bytes, headers={"Content-Type": "text"}, timeout=self.settings.timeout, retry=self.settings.retry ) items = response["items"] fails = [] if self.cluster.version.startswith("0.90."): for i, item in enumerate(items): if not item.index.ok: fails.append(i) elif any(map(self.cluster.version.startswith, ["1.4.", "1.5.", "1.6.", "1.7."])): for i, item in enumerate(items): if item.index.status not in [200, 201]: fails.append(i) else: Log.error("version not supported {{version}}", version=self.cluster.version) if fails: Log.error("Problems with insert", cause=[ Except( template="{{status}} {{error}} (and {{some}} others) while loading line id={{id}} into index {{index|quote}}:\n{{line}}", status=items[i].index.status, error=items[i].index.error, some=len(fails) - 1, line=strings.limit(lines[fails[0] * 2 + 1], 500 if not self.debug else 100000), index=self.settings.index, id=items[i].index._id ) for i in fails ]) if self.debug: Log.note("{{num}} documents added", num=len(items))
def extend(self, records): """ records - MUST HAVE FORM OF [{"value":value}, ... {"value":value}] OR [{"json":json}, ... {"json":json}] OPTIONAL "id" PROPERTY IS ALSO ACCEPTED """ if self.settings.read_only: Log.error("Index opened in read only mode, no changes allowed") lines = [] try: for r in records: id = r.get("id") if id == None: id = random_id() if "json" in r: # if id != coalesce(wrap(convert.json2value(r["json"])).value._id, id): # Log.error("expecting _id to match") json = r["json"] elif "value" in r: # if id != coalesce(wrap(r).value._id, id): # Log.error("expecting _id to match") json = convert.value2json(r["value"]) else: json = None Log.error("Expecting every record given to have \"value\" or \"json\" property") lines.append('{"index":{"_id": ' + convert.value2json(id) + '}}') if self.settings.tjson: lines.append(json2typed(json)) else: lines.append(json) del records if not lines: return try: data_bytes = "\n".join(lines) + "\n" data_bytes = data_bytes.encode("utf8") except Exception, e: Log.error("can not make request body from\n{{lines|indent}}", lines=lines, cause=e) response = self.cluster.post( self.path + "/_bulk", data=data_bytes, headers={"Content-Type": "text"}, timeout=self.settings.timeout ) items = response["items"] for i, item in enumerate(items): if self.cluster.version.startswith("0.90."): if not item.index.ok: Log.error( "{{error}} while loading line:\n{{line}}", error=item.index.error, line=lines[i * 2 + 1] ) elif any(map(self.cluster.version.startswith, ["1.4.", "1.5.", "1.6.", "1.7."])): if item.index.status not in [200, 201]: Log.error( "{{num}} {{error}} while loading line id={{id}} into index {{index|quote}}:\n{{line}}", num=item.index.status, error=item.index.error, line=strings.limit(lines[i * 2 + 1], 300), index=self.settings.index, id=item.index._id ) else: Log.error("version not supported {{version}}", version=self.cluster.version) if self.debug: Log.note("{{num}} documents added", num=len(items))