Exemple #1
0
def test_wrap_2():
    switch = [
        lambda: {"i": Random.int(2000)},
        lambda: Data(i=Random.int(2000)),
        lambda: FlatList([{"i": Random.int(2000)}]),
        lambda: [{"i": Random.int(2000)}]
    ]

    inputs = [switch[min(len(switch) - 1, int(floor(-log(Random.float(), 2))))]() for i in range(NUM_INPUT)]

    for i in range(NUM_REPEAT):
        results = []
        gc.collect()
        with Profiler("more dict: slow_wrap"):
            for v in inputs:
                results.append(slow_wrap(v))

        results = []
        gc.collect()
        with Profiler("more dict: wrap"):
            for v in inputs:
                results.append(wrap(v))

        results = []
        gc.collect()
        with Profiler("more dict: baseline"):
            for v in inputs:
                results.append(baseline(v))

        Log.note("Done {{i}} of {{num}}", {"i": i, "num": NUM_REPEAT})
Exemple #2
0
    def test_aes(self):
        crypto.DEBUG = True

        crypto.encrypt("this is a test", Random.bytes(32))
        crypto.encrypt("this is a longer test with more than 16bytes", Random.bytes(32))
        crypto.encrypt("", Random.bytes(32))
        crypto.encrypt(convert.latin12unicode(b"testing accented char àáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"), Random.bytes(32))
        crypto.encrypt("testing accented char àáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ", Random.bytes(32))
    def commit(self):
        with self.lock:
            if self.closed:
                Log.error("Queue is closed, commit not allowed")

            try:
                self._add_pending({"add": {"status.start": self.start}})
                for i in range(self.db.status.start, self.start):
                    self._add_pending({"remove": str(i)})

                if (
                    self.db.status.end - self.start < 10 or Random.range(0, 1000) == 0
                ):  # FORCE RE-WRITE TO LIMIT FILE SIZE
                    # SIMPLY RE-WRITE FILE
                    if DEBUG:
                        Log.note(
                            "Re-write {{num_keys}} keys to persistent queue", num_keys=self.db.status.end - self.start
                        )

                        for k in self.db.keys():
                            if k == "status" or int(k) >= self.db.status.start:
                                continue
                            Log.error("Not expecting {{key}}", key=k)
                    self._commit()
                    self.file.write(convert.value2json({"add": self.db}) + "\n")
                else:
                    self._commit()
            except Exception, e:
                raise e
Exemple #4
0
    def output(*args):
        with cache_store.locker:
            if using_self:
                self = args[0]
                args = args[1:]
            else:
                self = cache_store

            now = Date.now()
            try:
                _cache = getattr(self, attr_name)
            except Exception, _:
                _cache = {}
                setattr(self, attr_name, _cache)

            if Random.int(100) == 0:
                # REMOVE OLD CACHE
                _cache = {
                    k: v
                    for k, v in _cache.items() if v[0] == None or v[0] > now
                }
                setattr(self, attr_name, _cache)

            timeout, key, value, exception = _cache.get(
                args, (Null, Null, Null, Null))
    def test_request(self):
        # MAKE SOME DATA
        data = {
            "constant": "this is a test",
            "random-data": convert.bytes2base64(Random.bytes(100))
        }

        client = Client(settings.url, unwrap(settings.hawk))  # unwrap() DUE TO BUG https://github.com/kumar303/mohawk/issues/21
        link, id = client.send(data)
        Log.note("Success!  Located at {{link}} id={{id}}", link=link, id=id)

        # FILL THE REST OF THE FILE
        Log.note("Add ing {{num}} more...", num=99-id)
        for i in range(id + 1, storage.BATCH_SIZE):
            l, k = client.send(data)
            if l != link:
                Log.error("Expecting rest of data to have same link")

        # TEST LINK HAS DATA
        raw_content = requests.get(link).content
        content = convert.zip2bytes(raw_content)
        for line in convert.utf82unicode(content).split("\n"):
            data = convert.json2value(line)
            if data.etl.id == id:
                Log.note("Data {{id}} found", id=id)
                break
        else:
            Log.error("Expecting to find data at link")
    def test_public_request_too_big(self):
        # MAKE SOME DATA
        data = {
            "a": {  # MATCHES SERVER PATTERN
                "b": "good",
                "c": [
                    {"k": "good", "m": 1},
                    {"k": 2, "m": 2}
                ]
            },
            "constant": "this is a test",
            "random-data": convert.bytes2base64(Random.bytes(500))
        }

        content = json.dumps(data)

        def poster():
            response = requests.post(
                url=settings.url,
                data=content,
                headers={
                    'Content-Type': CONTENT_TYPE
                }
            )

            self.assertEqual(response.status_code, 200, "Expecting 200")

        self.assertRaises(Exception, poster)
    def test_public_request(self):
        # MAKE SOME DATA
        data = {
            "a": {  # MATCHES SERVER PATTERN
                "b": "good",
                "c": [
                    {"k": "good", "m": 1},
                    {"k": 2, "m": 2}
                ]
            },
            "constant": "this is a test",
            "random-data": convert.bytes2base64(Random.bytes(100))
        }

        content = json.dumps(data)

        response = requests.post(
            url=settings.url,
            data=content,
            headers={
                'Content-Type': CONTENT_TYPE
            }
        )

        self.assertEqual(response.status_code, 200, "Expecting 200")

        about = json.loads(response.content)
        return about['link'], about['etl']['id']

        Log.note("Data located at {{link}} id={{id}}", link=link, id=id)
    def extend(self, records):
        """
        records - MUST HAVE FORM OF
            [{"value":value}, ... {"value":value}] OR
            [{"json":json}, ... {"json":json}]
            OPTIONAL "id" PROPERTY IS ALSO ACCEPTED
        """
        lines = []
        try:
            for r in records:
                id = r.get("id")
                if id == None:
                    id = Random.hex(40)

                if "json" in r:
                    json = r["json"]
                elif "value" in r:
                    json = convert.value2json(r["value"])
                else:
                    json = None
                    Log.error("Expecting every record given to have \"value\" or \"json\" property")

                lines.append('{"index":{"_id": ' + convert.value2json(id) + '}}')
                lines.append(json)
            del records

            if not lines:
                return

            try:
                data_bytes = "\n".join(lines) + "\n"
                data_bytes = data_bytes.encode("utf8")
            except Exception, e:
                Log.error("can not make request body from\n{{lines|indent}}",  lines= lines, cause=e)


            response = self.cluster._post(
                self.path + "/_bulk",
                data=data_bytes,
                headers={"Content-Type": "text"},
                timeout=self.settings.timeout
            )
            items = response["items"]

            for i, item in enumerate(items):
                if self.cluster.version.startswith("0.90."):
                    if not item.index.ok:
                        Log.error("{{error}} while loading line:\n{{line}}",
                            error= item.index.error,
                            line= lines[i * 2 + 1])
                elif self.cluster.version.startswith("1.4."):
                    if item.index.status not in [200, 201]:
                        Log.error("{{error}} while loading line:\n{{line}}",
                            error= item.index.error,
                            line= lines[i * 2 + 1])
                else:
                    Log.error("version not supported {{version}}",  version=self.cluster.version)

            if self.debug:
                Log.note("{{num}} documents added",  num= len(items))
Exemple #9
0
    def commit(self):
        with self.lock:
            if self.closed:
                Log.error("Queue is closed, commit not allowed")

            try:
                self._add_pending({"add": {"status.start": self.start}})
                for i in range(self.db.status.start, self.start):
                    self._add_pending({"remove": str(i)})

                if self.db.status.end - self.start < 10 or Random.range(
                        0, 1000) == 0:  # FORCE RE-WRITE TO LIMIT FILE SIZE
                    # SIMPLY RE-WRITE FILE
                    if DEBUG:
                        Log.note(
                            "Re-write {{num_keys}} keys to persistent queue",
                            num_keys=self.db.status.end - self.start)

                        for k in self.db.keys():
                            if k == "status" or int(k) >= self.db.status.start:
                                continue
                            Log.error("Not expecting {{key}}", key=k)
                    self._commit()
                    self.file.write(
                        convert.value2json({"add": self.db}) + "\n")
                else:
                    self._commit()
            except Exception, e:
                raise e
Exemple #10
0
def main():
    settings = startup.read_settings()
    Log.start(settings.debug)
    try:
        with Multithread(update_repo, threads=10, outbound=False) as multi:
            for repo in Random.combination(settings.param.repos):
                multi.execute([{"repos": repo, "settings": settings}])
    finally:
        Log.stop()
    def test_missing_auth(self):
        # MAKE SOME DATA
        data = {
            "constant": "this is a test",
            "random-data": convert.bytes2base64(Random.bytes(100))
        }

        response = requests.post(settings.bad_url, data=convert.unicode2utf8(convert.value2json(data)))
        self.assertEqual(response.status_code, 403)
def main():
    settings = startup.read_settings()
    Log.start(settings.debug)
    try:
        with Multithread(update_repo, threads=10, outbound=False) as multi:
            for repo in Random.combination(settings.param.repos):
                multi.execute([{"repos": repo, "settings": settings}])
    finally:
        Log.stop()
Exemple #13
0
    def test_floor_mod_identity_w_ints(self):
        for i in range(100):
            x = Random.float()*200 - 100.0
            m = floor(abs(random.gauss(0, 5)))

            if m == 0:
                self.assertEqual(Math.floor(x, m), None)
                self.assertEqual(Math.mod(x, m), None)
            else:
                self.assertAlmostEqual(Math.floor(x, m)+Math.mod(x, m), x, places=7)
Exemple #14
0
def fix(rownum, line, source, sample_only_filter, sample_size):
    # ES SCHEMA IS STRICTLY TYPED, USE "code" FOR TEXT IDS
    line = line.replace('{"id": "bb"}',
                        '{"code": "bb"}').replace('{"id": "tc"}',
                                                  '{"code": "tc"}')

    # ES SCHEMA IS STRICTLY TYPED, THE SUITE OBJECT CAN NOT BE HANDLED
    if source.name.startswith("active-data-test-result"):
        # "suite": {"flavor": "plain-chunked", "name": "mochitest"}
        found = strings.between(line, '"suite": {', '}')
        if found:
            suite_json = '{' + found + "}"
            if suite_json:
                suite = convert.json2value(suite_json)
                suite = convert.value2json(suite.name)
                line = line.replace(suite_json, suite)

    if rownum == 0:
        value = convert.json2value(line)
        if len(line) > 100000:
            value.result.subtests = [
                s for s in value.result.subtests if s.ok is False
            ]
            value.result.missing_subtests = True

        _id, value = _fix(value)
        row = {"id": _id, "value": value}
        if sample_only_filter and Random.int(
                int(1.0 / coalesce(sample_size, 0.01))) != 0 and jx.filter(
                    [value], sample_only_filter):
            # INDEX etl.id==0, BUT NO MORE
            if value.etl.id != 0:
                Log.error("Expecting etl.id==0")
            return row, True
    elif len(line) > 100000:
        value = convert.json2value(line)
        value.result.subtests = [
            s for s in value.result.subtests if s.ok is False
        ]
        value.result.missing_subtests = True
        _id, value = _fix(value)
        row = {"id": _id, "value": value}
    elif line.find("\"resource_usage\":") != -1:
        value = convert.json2value(line)
        _id, value = _fix(value)
        row = {"id": _id, "value": value}
    else:
        # FAST
        _id = strings.between(line, "\"_id\": \"", "\"")  # AVOID DECODING JSON
        row = {"id": _id, "json": line}

    return row, False
Exemple #15
0
def seen_nonce(sender_id, nonce, timestamp):
    global seen
    key = '{id}:{nonce}:{ts}'.format(
        id=sender_id,
        nonce=nonce,
        ts=timestamp
    )

    if Random.int(1000) == 0:
        old = (Date.now() - HOUR).unix
        seen = {k: v for k, v in seen.items() if v["timestamp"] >= old}

    if seen.get(key):
        return True
    else:
        seen[key] = {"timestamp": timestamp}
        return False
def fix(rownum, line, source, sample_only_filter, sample_size):
    # ES SCHEMA IS STRICTLY TYPED, USE "code" FOR TEXT IDS
    line = line.replace('{"id": "bb"}', '{"code": "bb"}').replace('{"id": "tc"}', '{"code": "tc"}')

    # ES SCHEMA IS STRICTLY TYPED, THE SUITE OBJECT CAN NOT BE HANDLED
    if source.name.startswith("active-data-test-result"):
        # "suite": {"flavor": "plain-chunked", "name": "mochitest"}
        found = strings.between(line, '"suite": {', '}')
        if found:
            suite_json = '{' + found + "}"
            if suite_json:
                suite = convert.json2value(suite_json)
                suite = convert.value2json(suite.name)
                line = line.replace(suite_json, suite)

    if rownum == 0:
        value = convert.json2value(line)
        if len(line) > 100000:
            value.result.subtests = [s for s in value.result.subtests if s.ok is False]
            value.result.missing_subtests = True

        _id, value = _fix(value)
        row = {"id": _id, "value": value}
        if sample_only_filter and Random.int(int(1.0/coalesce(sample_size, 0.01))) != 0 and jx.filter([value], sample_only_filter):
            # INDEX etl.id==0, BUT NO MORE
            if value.etl.id != 0:
                Log.error("Expecting etl.id==0")
            return row, True
    elif len(line) > 100000:
        value = convert.json2value(line)
        value.result.subtests = [s for s in value.result.subtests if s.ok is False]
        value.result.missing_subtests = True
        _id, value = _fix(value)
        row = {"id": _id, "value": value}
    elif line.find("\"resource_usage\":") != -1:
        value = convert.json2value(line)
        _id, value = _fix(value)
        row = {"id": _id, "value": value}
    else:
        # FAST
        _id = strings.between(line, "\"_id\": \"", "\"")  # AVOID DECODING JSON
        row = {"id": _id, "json": line}

    return row, False
Exemple #17
0
    def copy(self, keys, source, sample_only_filter=None, sample_size=None):
        num_keys = 0
        for key in keys:
            try:
                for rownum, line in enumerate(
                        source.read_lines(strip_extension(key))):
                    if rownum == 0:
                        value = convert.json2value(line)
                        if len(line) > 1000000:
                            # Log.warning("Line {{num}} for key {{key}} is too long ({{length|comma}} bytes, {{num_tests}} subtests)", key=key, length=len(line), num=rownum, num_tests=len(value.result.subtests))
                            value.result.subtests = None
                            value.result.missing_subtests = True

                        _id, value = _fix(value)
                        row = {"id": _id, "value": value}
                        if sample_only_filter and Random.int(
                                int(1.0 / coalesce(
                                    sample_size, 0.01))) != 0 and qb.filter(
                                        [value], sample_only_filter):
                            # INDEX etl.id==0, BUT NO MORE
                            if value.etl.id != 0:
                                Log.error("Expecting etl.id==0")
                            num_keys += 1
                            self.queue.add(row)
                            break
                    elif len(line) > 1000000:
                        value = convert.json2value(line)
                        # Log.warning("Line {{num}} for key {{key}} is too long ({{length|comma}} bytes, {{num_tests}} subtests).", key=key, length=len(line), num=rownum, num_tests=len(value.result.subtests))
                        value.result.subtests = None
                        value.result.missing_subtests = True
                        _id, value = _fix(value)
                        row = {"id": _id, "value": value}
                    else:
                        #FAST
                        _id = strings.between(line, "_id\": \"",
                                              "\"")  # AVOID DECODING JSON
                        row = {"id": _id, "json": line}
                    num_keys += 1
                    self.queue.add(row)
            except Exception, e:
                Log.warning("Could not get queue for {{key}}",
                            key=key,
                            cause=e)
Exemple #18
0
def encrypt(text, _key, salt=None):
    """
    RETURN JSON OF ENCRYPTED DATA   {"salt":s, "length":l, "data":d}
    """
    from pyLibrary.queries import jx

    if not isinstance(text, unicode):
        Log.error("only unicode is encrypted")
    if _key is None:
        Log.error("Expecting a key")
    if isinstance(_key, str):
        _key = bytearray(_key)
    if salt is None:
        salt = Random.bytes(16)

    data = bytearray(text.encode("utf8"))

    # Initialize encryption using key and iv
    key_expander_256 = key_expander.KeyExpander(256)
    expanded_key = key_expander_256.expand(_key)
    aes_cipher_256 = aes_cipher.AESCipher(expanded_key)
    aes_cbc_256 = cbc_mode.CBCMode(aes_cipher_256, 16)
    aes_cbc_256.set_iv(salt)

    output = Data()
    output.type = "AES256"
    output.salt = convert.bytes2base64(salt)
    output.length = len(data)

    encrypted = bytearray()
    for _, d in jx.groupby(data, size=16):
        encrypted.extend(aes_cbc_256.encrypt_block(d))
    output.data = convert.bytes2base64(encrypted)
    json = convert.value2json(output)

    if DEBUG:
        test = decrypt(json, _key)
        if test != text:
            Log.error("problem with encryption")

    return json
Exemple #19
0
def encrypt(text, _key, salt=None):
    """
    RETURN JSON OF ENCRYPTED DATA   {"salt":s, "length":l, "data":d}
    """
    from pyLibrary.queries import jx

    if not isinstance(text, unicode):
        Log.error("only unicode is encrypted")
    if _key is None:
        Log.error("Expecting a key")
    if isinstance(_key, str):
        _key = bytearray(_key)
    if salt is None:
        salt = Random.bytes(16)

    data = bytearray(text.encode("utf8"))

    # Initialize encryption using key and iv
    key_expander_256 = key_expander.KeyExpander(256)
    expanded_key = key_expander_256.expand(_key)
    aes_cipher_256 = aes_cipher.AESCipher(expanded_key)
    aes_cbc_256 = cbc_mode.CBCMode(aes_cipher_256, 16)
    aes_cbc_256.set_iv(salt)

    output = Dict()
    output.type = "AES256"
    output.salt = convert.bytes2base64(salt)
    output.length = len(data)

    encrypted = bytearray()
    for _, d in jx.groupby(data, size=16):
        encrypted.extend(aes_cbc_256.encrypt_block(d))
    output.data = convert.bytes2base64(encrypted)
    json = convert.value2json(output)

    if DEBUG:
        test = decrypt(json, _key)
        if test != text:
            Log.error("problem with encryption")

    return json
Exemple #20
0
    def output(*args):
        with cache_store.locker:
            if using_self:
                self = args[0]
                args = args[1:]
            else:
                self = cache_store

            now = Date.now()
            try:
                _cache = getattr(self, attr_name)
            except Exception, _:
                _cache = {}
                setattr(self, attr_name, _cache)

            if Random.int(100) == 0:
                # REMOVE OLD CACHE
                _cache = {k: v for k, v in _cache.items() if v[0]==None or v[0] > now}
                setattr(self, attr_name, _cache)

            timeout, key, value, exception = _cache.get(args, (Null, Null, Null, Null))
Exemple #21
0
    def output(*args):
        with cache_store.locker:
            if using_self:
                self = args[0]
                args = args[1:]
            else:
                self = cache_store

            now = Date.now()
            try:
                _cache = getattr(self, attr_name)
            except Exception, _:
                _cache = {}
                setattr(self, attr_name, _cache)

            if Random.int(100) == 0:
                # REMOVE OLD CACHE
                _cache = {k: v for k, v in _cache.items() if v[0]==None or v[0] < now}
                setattr(self, attr_name, _cache)

            timeout, key, value, exception = _cache.get(args, (Null, Null, Null, Null))

            if now > timeout:
                value = func(self, *args)
                _cache[args] = (now + cache_store.timeout, args, value, None)
                return value

            if value == None:
                if exception == None:
                    try:
                        value = func(self, *args)
                        _cache[args] = (now + cache_store.timeout, args, value, None)
                        return value
                    except Exception, e:
                        e = Except.wrap(e)
                        _cache[args] = (now + cache_store.timeout, args, None, e)
                        raise e
                else:
                    raise exception
    def copy(self, keys, source, sample_only_filter=None, sample_size=None):
        num_keys = 0
        for key in keys:
            try:
                for rownum, line in enumerate(source.read_lines(strip_extension(key))):
                    if rownum == 0:
                        value = convert.json2value(line)
                        if len(line) > 1000000:
                            # Log.warning("Line {{num}} for key {{key}} is too long ({{length|comma}} bytes, {{num_tests}} subtests)", key=key, length=len(line), num=rownum, num_tests=len(value.result.subtests))
                            value.result.subtests = None
                            value.result.missing_subtests = True

                        _id, value = _fix(value)
                        row = {"id": _id, "value": value}
                        if sample_only_filter and Random.int(int(1.0/coalesce(sample_size, 0.01))) != 0 and qb.filter([value], sample_only_filter):
                            # INDEX etl.id==0, BUT NO MORE
                            if value.etl.id != 0:
                                Log.error("Expecting etl.id==0")
                            num_keys += 1
                            self.queue.add(row)
                            break
                    elif len(line) > 1000000:
                        value = convert.json2value(line)
                        # Log.warning("Line {{num}} for key {{key}} is too long ({{length|comma}} bytes, {{num_tests}} subtests).", key=key, length=len(line), num=rownum, num_tests=len(value.result.subtests))
                        value.result.subtests = None
                        value.result.missing_subtests = True
                        _id, value = _fix(value)
                        row = {"id": _id, "value": value}
                    else:
                        #FAST
                        _id = strings.between(line, "_id\": \"", "\"")  # AVOID DECODING JSON
                        row = {"id": _id, "json": line}
                    num_keys += 1
                    self.queue.add(row)
            except Exception, e:
                Log.warning("Could not get queue for {{key}}", key=key, cause=e)
Exemple #23
0
    def test_floor_mod_identity(self):
        for i in range(100):
            x = Random.float()*200 - 100.0
            m = abs(random.gauss(0, 5))

            self.assertAlmostEqual(Math.floor(x, m)+Math.mod(x, m), x, places=7)
Exemple #24
0
def unique_name():
    return Random.string(20)
def unique_name():
    return Random.string(20)
Exemple #26
0
def random_id():
    return Random.hex(40)
def random_id():
    return Random.hex(40)
import requests

from pyLibrary import convert, jsons
from pyLibrary.debugs.logs import Log
from pyLibrary.dot import unwrap
from pyLibrary.maths.randoms import Random
from modatasubmission import Client


settings = jsons.ref.get("file://~/MoDataSubmissionClient.json")


data={
    "constant": "this is a test",
    "random-data": convert.bytes2base64(Random.bytes(100))
}
link, id = Client(settings.url, unwrap(settings.hawk)).send(data)
Log.note("Success!  Located at {{link}} id={{id}}", link=link, id=id)


data = convert.unicode2utf8(convert.value2json(settings.example))

response = requests.post(
    settings.url,
    data=data,
    headers={
        'Content-Type': b'application/json'
    }
)
if response.status_code != 200:
    def extend(self, records):
        """
        records - MUST HAVE FORM OF
            [{"value":value}, ... {"value":value}] OR
            [{"json":json}, ... {"json":json}]
            OPTIONAL "id" PROPERTY IS ALSO ACCEPTED
        """
        lines = []
        try:
            for r in records:
                id = r.get("id")

                if id == None:
                    id = Random.hex(40)

                if "json" in r:
                    # if id != coalesce(wrap(convert.json2value(r["json"])).value._id, id):
                    #     Log.error("expecting _id to match")
                    json = r["json"]
                elif "value" in r:
                    # if id != coalesce(wrap(r).value._id, id):
                    #     Log.error("expecting _id to match")
                    json = convert.value2json(r["value"])
                else:
                    json = None
                    Log.error(
                        "Expecting every record given to have \"value\" or \"json\" property"
                    )

                lines.append('{"index":{"_id": ' + convert.value2json(id) +
                             '}}')
                lines.append(json)
            del records

            if not lines:
                return

            try:
                data_bytes = "\n".join(lines) + "\n"
                data_bytes = data_bytes.encode("utf8")
            except Exception, e:
                Log.error("can not make request body from\n{{lines|indent}}",
                          lines=lines,
                          cause=e)

            response = self.cluster._post(self.path + "/_bulk",
                                          data=data_bytes,
                                          headers={"Content-Type": "text"},
                                          timeout=self.settings.timeout)
            items = response["items"]

            for i, item in enumerate(items):
                if self.cluster.version.startswith("0.90."):
                    if not item.index.ok:
                        Log.error("{{error}} while loading line:\n{{line}}",
                                  error=item.index.error,
                                  line=lines[i * 2 + 1])
                elif any(map(self.cluster.version.startswith,
                             ["1.4.", "1.5."])):
                    if item.index.status not in [200, 201]:
                        Log.error(
                            "{{num}} {{error}} while loading line into {{index}}:\n{{line}}",
                            num=item.index.status,
                            error=item.index.error,
                            line=lines[i * 2 + 1],
                            index=self.settings.index)
                else:
                    Log.error("version not supported {{version}}",
                              version=self.cluster.version)

            if self.debug:
                Log.note("{{num}} documents added", num=len(items))