class SubdocSinglePathTests(SubdocBaseTest):
    def setUp(self):
        super(SubdocSinglePathTests, self).setUp()
        self.client = MemcachedClient(host=self.server.ip)
        self.jsonSchema = {
            "id": "0",
            "number": 0,
            "array": [],
            "child": {},
            "isDict": True,
            "padding": None
        }

    def tearDown(self):
        super(SubdocSinglePathTests, self).tearDown()

    def insertBinaryDocument(self, key):
        pass

    def _createNestedJson(self, key, dict):
        if dict['levels'] == 0:
            return
        if dict['doc'] == {}:
            dict['doc'] = copy.copy(self.jsonSchema)
        else:
            dict['doc']['child'] = copy.copy(self.jsonSchema)
            dict['doc']['child']['array'] = []
            dict['doc'] = dict['doc']['child']

        dict['doc']['id'] = key
        dict['doc']['number'] = dict['levels']

        for level in range(0, dict['levels']):
            dict['doc']['array'].append(level)
        return self._createNestedJson(key, {
            'doc': dict['doc'],
            'levels': dict['levels'] - 1
        })

    def insertJsonDocument(self, key, levels, expiry, size=512):
        dict = {'doc': {}, 'levels': levels}
        self._createNestedJson(key, dict)
        jsonDump = json.dumps(dict['doc'])
        self.client.set(key, expiry, 0, jsonDump)

    def deleteDoc(self, key):
        self.client.delete(key)

    def _load_all_docs(self, key=None, levels=0, num_items=0):
        dict = {'doc': {}, 'levels': levels}
        self._createNestedJson(key, dict)
        template = dict['doc']
        gen_load = SubdocDocumentGenerator(key,
                                           template,
                                           start=0,
                                           end=num_items)

        print("Inserting json data into bucket")
        self._load_all_buckets(self.server, gen_load, "create", 0)
        self._wait_for_stats_all_buckets([self.server])
class SubdocSinglePathTests(SubdocBaseTest):
    def setUp(self):
        super(SubdocSinglePathTests, self).setUp()
        self.client = MemcachedClient(host=self.server.ip)
        self.jsonSchema = {
            "id" : "0",
            "number" : 0,
            "array" : [],
            "child" : {},
            "isDict" : True,
            "padding": None
        }

    def tearDown(self):
        super(SubdocSinglePathTests, self).tearDown()

    def insertBinaryDocument(self, key):
        pass

    def _createNestedJson(self, key, dict):
        if dict['levels'] == 0:
            return
        if dict['doc'] == {}:
            dict['doc'] = copy.copy(self.jsonSchema)
        else:
            dict['doc']['child'] = copy.copy(self.jsonSchema)
            dict['doc']['child']['array'] = []
            dict['doc'] = dict['doc']['child']

        dict['doc']['id'] = key
        dict['doc']['number'] = dict['levels']

        for level in xrange(0, dict['levels']):
            dict['doc']['array'].append(level)
        return self._createNestedJson(key, {'doc': dict['doc'], 'levels': dict['levels']-1})

    def insertJsonDocument(self, key, levels, expiry, size=512):
        dict = {'doc' : {}, 'levels' : levels }
        self._createNestedJson(key, dict)
        jsonDump = json.dumps(dict['doc'])
        self.client.set(key, expiry, 0, jsonDump)

    def deleteDoc(self, key):
        self.client.delete(key)

    def _load_all_docs(self, key=None, levels=0, num_items=0):
        dict = {'doc' : {}, 'levels' : levels }
        self._createNestedJson(key, dict)
        template = dict['doc']
        gen_load = SubdocDocumentGenerator(key, template,start=0, end=num_items)

        print "Inserting json data into bucket"
        self._load_all_buckets(self.server, gen_load, "create", 0)
        self._wait_for_stats_all_buckets([self.server])
Exemple #3
0
class MemcachedHelper(object):
    def __init__(self,
                 serverip="localhost",
                 port=11211,
                 bucket="default",
                 password=""):

        self.client = MemcachedClient(serverip, port)
        self.client.sasl_auth_plain(bucket, password)

    def write_one_json(self, key, doc):

        count = 0
        loaded = False
        while count < 60 and not loaded:
            try:
                self.client.set(key, 0, 0, json.dumps(doc))
                loaded = True
            except MemcachedError as error:
                if error.status == 134:
                    print "Memcached TMP_OOM, Retrying in 5 seconds..."
                    count += 1
                    time.sleep(5)
                elif error.status == 7:
                    print "Not my vbucket error. If on MAC, please specify vbuckets as 64."
                    print "If rebalance is in progress. Please wait for it to finish.\n"
                    break
                else:
                    print error
                    break

    def read_one_json(self, key):

        doc = ""
        try:
            _, _, doc = self.client.get(key)
        except MemcachedError as error:
            print error

        return doc

    def write_batch(self, batch_id, data):
        print "Batch Insert not implemented for Memcached"
class MemcachedHelper(object):
    
    def __init__(self, serverip = "localhost", port = 11211, bucket = "default", password = ""):
        
        self.client = MemcachedClient(serverip, port)
        self.client.sasl_auth_plain(bucket, password)

    def write_one_json(self, key, doc):

        count = 0 
        loaded = False
        while count < 60 and not loaded:
            try:
                #self.client.set(key, 0, 0, json.dumps(doc))
                self.client.set(key, 0, 0, doc)
                loaded = True
            except MemcachedError as error:
                if error.status == 134:
                    print "Memcached TMP_OOM, Retrying in 5 seconds..."
                    count += 1
                    time.sleep(5)
                elif error.status == 7:
                    print "Not my vbucket error. If on MAC, please specify vbuckets as 64."
                    print "If rebalance is in progress. Please wait for it to finish.\n"
                    break
                else:
                    print error
                    break

    def read_one_json(self, key):

        doc = ""
        try:
            _, _, doc = self.client.get(key)
        except MemcachedError as error:
            print error

        return doc

    def write_batch(self, batch_id, data):
        print "Batch Insert not implemented for Memcached"