def setUp(self):
     self.log = logger.Logger.get_logger()
     self.helper = SubdocHelper(self, "default")
     self.helper.setup_cluster()
     self.cluster = Cluster()
     self.servers = self.helper.servers
class SubdocSanityTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SubdocHelper(self, "default")
        self.helper.setup_cluster()
        self.cluster = Cluster()
        self.servers = self.helper.servers

    def tearDown(self):
        self.helper.cleanup_cluster()

    def test_simple_dataset_sanity(self):
        self.test_simple_dataset_get()
        self.test_deep_nested_dataset_get_dict()
        self.test_deep_nested_dataset_get_array()
        self.test_simple_dataset_dict_upsert()
        self.test_simple_dataset_dict_add()
        self.test_simple_dataset_remove()
        self.test_simple_dataset_exists()
        self.test_simple_dataset_replace()
        self.test_simple_dataset_array_push_last()
        self.test_simple_dataset_array_push_first()
        self.test_simple_dataset_counter()
        self.test_simple_dataset_array_add_unique()
        self.test_simple_dataset_counter()

    def test_simple_dataset_get(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple get sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        data_set.get_all_docs(inserted_keys, path='isDict')
        data_set.get_all_docs(inserted_keys, path='geometry.coordinates[0]')
        data_set.get_all_docs(inserted_keys, path='dict_value.name')
        data_set.get_all_docs(inserted_keys, path='array[0]')
        data_set.get_all_docs(inserted_keys, path='array[-1]')
        ''' This should go into ErrorTesting '''
        #self.assertFalse(data_set.get_all_docs(inserted_keys, path='array[-5]'))
        #self.assertFalse(data_set.get_all_docs(inserted_keys, path='  '))

    def test_deep_nested_dataset_get_dict(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Issue  get sub doc on deep nested single path on dictionaries "
            "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()
        '''Top level element'''
        #data_set.get_all_docs(inserted_keys, path = 'number', check_data=levels)
        data_set.get_all_docs(inserted_keys, path='array')
        data_set.get_all_docs(inserted_keys, path='array[0]')
        '''Last element Dictionary'''
        self.log.info('Testing last element dictionary')
        data_set.get_all_docs(inserted_keys,
                              path=self._get_path('child', levels - 1))
        '''Last element Dict.Array'''
        self.log.info('Testing Dict.Array')
        data_set.get_all_docs(inserted_keys,
                              path=self._get_path('child', levels - 2) +
                              '.array[0]')
        '''Intermediate element Dict.Array'''
        self.log.info('Testing Intermediate element Dict. Array')
        data_set.get_all_docs(inserted_keys,
                              path=self._get_path('child', levels // 2) +
                              '.array[0]')

    def test_deep_nested_dataset_get_array(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Issue  get sub doc on deep nested single path on dictionaries "
            "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()
        '''Top level element'''
        data_set.get_all_docs(inserted_keys, path='number')
        data_set.get_all_docs(inserted_keys, path='array')
        data_set.get_all_docs(inserted_keys, path='array[0]')
        '''Last element Array'''
        last_path = 'child'
        for i in range(levels - 1):
            last_path += '.child'
        data_set.get_all_docs(inserted_keys, path=last_path)
        '''Last element Array of Array'''
        last_path = 'child'
        for i in range(levels - 3):
            last_path += '.child'
        last_path += '.array[-1][-1][-1]'
        data_set.get_all_docs(inserted_keys, path=last_path)
        '''Intermediate element Array'''
        last_path = 'child'
        for i in range(levels // 2):
            last_path += '.child'
        last_path += '.array[0][-1]'
        data_set.get_all_docs(inserted_keys, path=last_path)

    def test_simple_dataset_dict_upsert(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Issue simple upsert dict sub doc single path "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(1000)

        data_set.upsert_all_docs(inserted_keys, replace_string, path='isDict')
        data_set.upsert_all_docs(inserted_keys,
                                 replace_string,
                                 path='geometry.coordinates[0]')
        data_set.upsert_all_docs(inserted_keys,
                                 replace_string,
                                 path='dict_value.name')
        data_set.upsert_all_docs(inserted_keys, "999", path='height')
        data_set.upsert_all_docs(inserted_keys,
                                 replace_string,
                                 path='array[-1]')

    def test_simple_dataset_dict_add(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Issue simple add dict sub doc single path "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(1000)

        #data_set.add_all_docs(inserted_keys, replace_string, path='isDict')
        #data_set.add_all_docs(inserted_keys, replace_string, path='geometry.coordinates[0]')
        data_set.add_all_docs(inserted_keys, replace_string, path='dict_value')
        #data_set.add_all_docs(inserted_keys, "999", path='height')
        #data_set.add_all_docs(inserted_keys, replace_string, path='array[-1]')

    def test_simple_dataset_remove(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple remove sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        data_set.remove_all_docs(inserted_keys, path='isDict')
        data_set.remove_all_docs(inserted_keys, path='geometry.coordinates[0]')
        data_set.remove_all_docs(inserted_keys, path='dict_value.name')
        data_set.remove_all_docs(inserted_keys, path='array[0]')
        data_set.remove_all_docs(inserted_keys, path='array[-1]')

    def test_simple_dataset_exists(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple exists sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' add test code to accept Bool values and not error out '''
        data_set.exists_all_docs(inserted_keys, path='isDict')
        data_set.exists_all_docs(inserted_keys, path='geometry.coordinates[0]')
        data_set.exists_all_docs(inserted_keys, path='dict_value.name')
        data_set.exists_all_docs(inserted_keys, path='array[0]')
        data_set.exists_all_docs(inserted_keys, path='array[-1]')

    def test_simple_dataset_replace(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple replace sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        data_set.replace_all_docs(inserted_keys, replace_string, path='isDict')
        data_set.replace_all_docs(inserted_keys,
                                  replace_string,
                                  path='geometry.coordinates[0]')
        data_set.replace_all_docs(inserted_keys,
                                  replace_string,
                                  path='dict_value.name')
        data_set.replace_all_docs(inserted_keys, "999", path='height')
        data_set.replace_all_docs(inserted_keys,
                                  replace_string,
                                  path='array[-1]')

    def test_simple_dataset_array_push_last(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Issue simple array_push_last sub doc single path "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.array_push_last(inserted_keys,
                                 replace_string,
                                 path='geometry.coordinates')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        #data_set.array_push_last(inserted_keys, "999", path='height')
        data_set.array_push_last(inserted_keys, replace_string, path='array')

    def test_simple_dataset_array_push_first(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Issue simple array_push_first sub doc single path "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.array_push_first(inserted_keys,
                                  replace_string,
                                  path='geometry.coordinates')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        #data_set.array_push_last(inserted_keys, "999", path='height')
        data_set.array_push_first(inserted_keys, replace_string, path='array')

    def test_simple_dataset_counter(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple counter sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.counter_all_paths(inserted_keys,
                                   path='geometry.coordinates[0]')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        data_set.counter_all_paths(inserted_keys, path='height')
        #data_set.counter_all_paths(inserted_keys, path='array')

    def test_simple_dataset_array_add_unique(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Issue simple add array unique sub doc single path "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.array_add_unique(inserted_keys,
                                  replace_string,
                                  path='geometry.coordinates')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        #data_set.counter_all_paths(inserted_keys, 1, path='height')
        #data_set.counter_all_paths(inserted_keys, replace_string, path='array')

    def test_simple_dataset_multi_lookup(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Issue simple multi lookup sub doc single path "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.multi_lookup_all_paths(inserted_keys,
                                        path='geometry.coordinates')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        #data_set.counter_all_paths(inserted_keys, 1, path='height')
        #data_set.counter_all_paths(inserted_keys, replace_string, path='array')

    def test_simple_dataset_multi_lookup2(self):
        pass

    def generate_string(self, range_val=100):
        long_string = ''.join(
            chr(97 + randint(0, 25)) for i in range(range_val))
        return '"' + long_string + '"'

    def _get_path(self, subdoc_elt=None, levels=None):
        subdoc_path = subdoc_elt
        for i in range(levels - 1):
            subdoc_path += '.' + subdoc_elt
        return subdoc_path
Esempio n. 3
0
class SubdocErrorTests(SubdocSanityTests):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SubdocHelper(self, "default")
        self.helper.setup_cluster()
        self.cluster = Cluster()
        self.servers = self.helper.servers

    def tearDown(self):
        self.helper.cleanup_cluster()

    def error_test_simple_dataset_get(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Error testing path error for CMD_GET on simple_dataset"
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()
        '''invalid path'''
        self.log.info('Testing invalid path ')
        self.error_gets(inserted_keys,
                        path='array[-5]',
                        error="Memcached error #194 'Invalid path'",
                        field='Testing invalid path ',
                        result=result)
        '''path does not exist'''
        self.log.info('Testing path does not exist')
        self.error_gets(inserted_keys,
                        path='  ',
                        error="Memcached error #192 'Path not exists'",
                        field='path does not exist',
                        result=result)
        self.assertTrue(len(result) > 0, result)

    def error_test_deep_nested_dataset_get(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Error testing path error for CMD_GET for deep nested dataset "
            "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()
        '''path does not exist'''
        self.log.info('Testing last+1 element dictionary')
        self.error_gets(inserted_keys,
                        path=self._get_path('child', levels + 1),
                        error="Memcached error #192 'Path not exists'",
                        field='Testing last+1 element dictionary',
                        result=result)
        '''Invalid path'''
        self.log.info('Testing Dict.Array')
        self.error_gets(inserted_keys,
                        path=self._get_path('child', levels - 2) +
                        '.array[-5]',
                        error="Memcached error #194 'Invalid path'",
                        field='Testing Dict.Array',
                        result=result)
        '''path too big'''
        self.log.info('Testing Intermediate element Dict. Array')
        self.error_gets(inserted_keys,
                        path=self._get_path('child', 40),
                        error="Memcached error #195 'Path too big'",
                        field='Testing Intermediate element Dict. Array',
                        result=result)
        '''Malformed path'''
        self.log.info('Testing Malformed path Dict. Array')
        self.error_gets(inserted_keys,
                        path=self._get_path('child', levels - 2) +
                        '.`array[0]`',
                        error="Memcached error #192 'Path not exists'",
                        field='Testing Malformed path Dict. Array',
                        result=result)
        '''Invalid Path'''
        self.log.info('Testing ENOENT')
        self.error_gets(inserted_keys,
                        path=self._get_path('child', levels - 2) +
                        '.array[100]',
                        error="Memcached error #192 'Path not exists'",
                        field='Testing ENOENT',
                        result=result)
        '''Path too long'''
        data_set_long = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys_long, levels_long = data_set_long.load(long_path=True)
        self.log.info('Testing long path ')
        self.error_gets(inserted_keys_long,
                        path=self._get_path(
                            'child12345678901234567890123456789', levels_long),
                        error="Memcached error #192 'Path too long'",
                        field='Path too long',
                        result=result)
        self.assertTrue(len(result) > 0, result)

    def error_test_deep_nested_dataset_exists(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Error testing path error for CMD_EXISTS for deep nested dataset "
            "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()
        '''path does not exist'''
        self.log.info('Testing last+1 element dictionary')
        self.error_exists(inserted_keys,
                          path=self._get_path('child', levels + 1),
                          error="Memcached error #192 'Path not exists'",
                          field='path does not exist',
                          result=result)
        '''Invalid path'''
        self.log.info('Testing Dict.Array')
        self.error_exists(inserted_keys,
                          path=self._get_path('child', levels - 2) +
                          '.array[-5]',
                          error="Memcached error #194 'Invalid path'",
                          field='Invalid path',
                          result=result)
        '''path too big'''
        self.log.info('Testing Intermediate element Dict. Array')
        self.error_exists(inserted_keys,
                          path=self._get_path('child', 40),
                          error="Memcached error #195 'Path too big'",
                          field='path too big',
                          result=result)
        '''Malformed path'''
        self.log.info('Testing Malformed path Dict. Array')
        self.error_exists(inserted_keys,
                          path=self._get_path('child', levels - 2) +
                          '.`array[0]`',
                          error="Memcached error #192 'Path not exists'",
                          field='Malformed path',
                          result=result)
        '''Invalid Path'''
        self.log.info('Testing ENOENT')
        self.error_exists(inserted_keys,
                          path=self._get_path('child', levels - 2) +
                          '.array[100]',
                          error="Memcached error #192 'Path not exists'",
                          field='Invalid Path',
                          result=result)
        '''Path too long'''
        data_set_long = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys_long, levels_long = data_set_long.load(long_path=True)
        self.log.info('Testing long path ')
        self.error_exists(inserted_keys_long,
                          path=self._get_path(
                              'child12345678901234567890123456789',
                              levels_long),
                          error="Memcached error #192 'Path too long'",
                          field='Path too long',
                          result=result)
        self.assertTrue(len(result) > 0, result)

    ''' Change error behaviour , there is something wrong on the call '''

    def error_test_deep_nested_dataset_dict_add(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Error testing path error for CMD_DICT_ADD for deep nested dataset "
            "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()
        '''path does not exist'''
        self.log.info('Testing empty path for dictionary')
        self.error_add_dict(inserted_keys,
                            add_str='child',
                            path=self._get_path('child', levels - 2),
                            error="Memcached error #197 'Cant insert'",
                            field='Testing empty path for dictionary',
                            result=result)
        '''path does not exist'''
        self.log.info('Testing empty path for dictionary')
        self.error_add_dict(inserted_keys,
                            add_str="new_value",
                            path=self._get_path('child', levels - 2),
                            error="Memcached error #197 'Cant insert'",
                            field='path does not exist',
                            result=result)
        self.assertTrue(len(result) > 0, result)
        ''' Change error behaviour , there is something wrong on the call '''

    def error_test_deep_nested_dataset_dict_upsert(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Error testing path error for CMD_DICT_UPSERT for deep nested dataset "
            "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()
        '''path does not exist'''
        self.log.info('Testing empty path for dictionary')
        self.error_upsert_dict(inserted_keys,
                               add_str='child',
                               path=self._get_path('child', levels - 2),
                               error="Memcached error #197 'Cant insert'",
                               field='path does not exist',
                               result=result)
        '''path does not exist'''
        self.log.info('Testing empty path for dictionary')
        self.error_upsert_dict(inserted_keys,
                               add_str="new_value",
                               path=self._get_path('child', levels - 2),
                               error="Memcached error #197 'Cant insert'",
                               field='path does not exist',
                               result=result)
        '''document does not exist'''
        self.log.info('Document does not exist')
        self.error_upsert_dict(['key_does_not_exist'],
                               add_str="new_value",
                               path="does_not_matter",
                               error="Memcached error #197 'Cant insert'",
                               field='Document does not exist',
                               result=result)
        self.assertTrue(len(result) == 0, result)

    def error_test_deep_nested_dataset_delete(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Error testing path error for CMD_delete for deep nested dataset "
            "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()
        '''path does not exist'''
        self.log.info('Testing path not exists')
        self.error_delete(inserted_keys,
                          path=self._get_path('child', levels) + '.child',
                          error="Memcached error #192 'Path not exists'",
                          field='path does not exist',
                          result=result)
        '''Last element Array of Array'''
        last_path = 'child'
        for i in xrange(levels - 3):
            last_path += '.child'
        last_path += '.array[-1]'
        #data_set.get_all_docs(inserted_keys, path = last_path)
        '''path does not exist on array'''
        self.log.info('Testing path not exists on dict.array with array [-5]')
        self.error_delete(inserted_keys,
                          path=last_path + '[-1]',
                          error="Memcached error #193 'Path mismatch'",
                          field='path does not exist on array - Path mismatch',
                          result=result)
        self.error_delete(inserted_keys,
                          path=last_path + '[-5]',
                          error="Memcached error #194 'Invalid path'",
                          field='path does not exist on array - Invalid path',
                          result=result)
        '''path missing CHECK if this error is expected.'''
        self.log.info('Testing path missing delete')
        self.error_delete(inserted_keys,
                          path='',
                          error="Memcached error #4 'Invalid'",
                          field='Testing path missing delete',
                          result=result)
        self.assertTrue(len(result) > 0, result)

    def error_test_deep_nested_dataset_replace(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Error testing path error for CMD_REPLACE for deep nested dataset "
            "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()
        '''path does not exist'''
        self.log.info('Testing path not exists')
        self.error_delete(inserted_keys,
                          path=self._get_path('child', levels) + '.child',
                          error="Memcached error #192 'Path not exists'",
                          field='path does not exist',
                          result=result)
        '''Last element Array of Array'''
        last_path = 'child'
        for i in xrange(levels - 3):
            last_path += '.child'
        last_path += '.array[-1]'
        #data_set.get_all_docs(inserted_keys, path = last_path)
        '''path does not exist on array'''
        self.log.info('Testing path not exists on dict.array with array [--1]')
        self.error_replace(inserted_keys,
                           path=last_path + '[-1]',
                           error="Memcached error #193 'Path mismatch'",
                           replace_str=1000000,
                           field='path does not exist on array',
                           result=result)
        '''path missing replace string array Memcached error #4 'Invalid and Cant Insert'''
        self.log.info('Testing path not exists on dict.array with array [--1]')
        self.error_replace(inserted_keys,
                           path=last_path + '[-1]',
                           error="Memcached error #193 'Path mismatch'",
                           replace_str='abc',
                           field='path missing replace string array',
                           result=result)
        self.assertTrue(len(result) > 0, result)

    def error_gets(self, inserted_keys, path, error, field="field", result={}):
        for in_key in inserted_keys:
            num_tries = 1
            try:
                opaque, cas, data = self.helper.client.get_sd(in_key, path)
                print data
            except Exception as ex:
                if not (str(ex).find(error) != -1):
                    result[
                        field] = "Error is incorrect.Actual %s.Expected: %s." % (
                            str(ex), error)
            else:
                result[
                    field] = "There were no errors. Error expected: %s" % error

    def error_exists(self,
                     inserted_keys,
                     path,
                     error,
                     field="field",
                     result={}):
        for in_key in inserted_keys:
            num_tries = 1
            try:
                opaque, cas, data = self.helper.client.exists_sd(in_key, path)
                print data
            except Exception as ex:
                if not (str(ex).find(error) != -1):
                    result[
                        field] = "Error is incorrect.Actual %s.Expected: %s." % (
                            str(ex), error)
            else:
                result[
                    field] = "There were no errors. Error expected: %s" % error

    def error_add_dict(self,
                       inserted_keys,
                       add_str,
                       path,
                       error,
                       field="field",
                       result={}):
        for in_key in inserted_keys:
            num_tries = 1
            try:
                opaque, cas, data = self.helper.client.dict_add_sd(
                    in_key, path, add_str)
                print data
            except Exception as ex:
                if not (str(ex).find(error) != -1):
                    result[
                        field] = "Error is incorrect.Actual %s.Expected: %s." % (
                            str(ex), error)
            else:
                result[
                    field] = "There were no errors. Error expected: %s" % error

    def error_upsert_dict(self,
                          inserted_keys,
                          add_str,
                          path,
                          error,
                          field="field",
                          result={}):
        for in_key in inserted_keys:
            num_tries = 1
            try:
                opaque, cas, data = self.helper.client.dict_upsert_sd(
                    in_key, path, add_str)
                result[
                    field] = "There were no errors. Error expected: %s" % error
            except Exception as ex:
                self.log.info(str(ex))
                if str(ex).find(error) == -1:
                    result[
                        field] = "Error is incorrect.Actual %s.Expected: %s." % (
                            str(ex), error)

    def error_delete(self,
                     inserted_keys,
                     path,
                     error,
                     field="field",
                     result={}):
        for in_key in inserted_keys:
            num_tries = 1
            try:
                opaque, cas, data = self.helper.client.delete_sd(in_key, path)
                print data
            except Exception as ex:
                if not ():
                    result[
                        field] = "Error is incorrect.Actual %s.Expected: %s." % (
                            str(ex), error)
            else:
                result[
                    field] = "There were no errors. Error expected: %s" % error

    def error_replace(self,
                      inserted_keys,
                      path,
                      error,
                      replace_str,
                      field="field",
                      result={}):
        for in_key in inserted_keys:
            num_tries = 1
            try:
                opaque, cas, data = self.helper.client.replace_sd(
                    in_key, path, replace_str)
                print data
            except Exception as ex:
                if not (str(ex).find(error) != -1):
                    result[
                        field] = "Error is incorrect.Actual %s.Expected: %s." % (
                            str(ex), error)
            else:
                result[
                    field] = "There were no errors. Error expected: %s" % error
Esempio n. 4
0
 def setUp(self):
     self.log = logger.Logger.get_logger()
     self.helper = SubdocHelper(self, "default")
     self.helper.setup_cluster()
     self.cluster = Cluster()
     self.servers = self.helper.servers
Esempio n. 5
0
class SubdocErrorTests(SubdocSanityTests):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SubdocHelper(self, "default")
        self.helper.setup_cluster()
        self.cluster = Cluster()
        self.servers = self.helper.servers

    def tearDown(self):
        self.helper.cleanup_cluster()

    def error_test_simple_dataset_get(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Error testing path error for CMD_GET on simple_dataset"
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        '''invalid path'''
        self.log.info('Testing invalid path ')
        self.error_gets(inserted_keys, path='array[-5]', error="Memcached error #194 'Invalid path'", field  = 'Testing invalid path ', result = result)
        '''path does not exist'''
        self.log.info('Testing path does not exist')
        self.error_gets(inserted_keys, path='  ', error="Memcached error #192 'Path not exists'", field = 'path does not exist', result = result)
        self.assertTrue(len(result) > 0, result)

    def error_test_deep_nested_dataset_get(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Error testing path error for CMD_GET for deep nested dataset "
                      "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()

        '''path does not exist'''
        self.log.info('Testing last+1 element dictionary')
        self.error_gets(inserted_keys, path = self._get_path('child', levels+1), error="Memcached error #192 'Path not exists'", field = 'Testing last+1 element dictionary', result = result)

        '''Invalid path'''
        self.log.info('Testing Dict.Array')
        self.error_gets(inserted_keys, path = self._get_path('child', levels-2)+'.array[-5]', error="Memcached error #194 'Invalid path'", field = 'Testing Dict.Array', result = result)

        '''path too big'''
        self.log.info('Testing Intermediate element Dict. Array')
        self.error_gets(inserted_keys, path = self._get_path('child', 40), error="Memcached error #195 'Path too big'", field = 'Testing Intermediate element Dict. Array', result = result)

        '''Malformed path'''
        self.log.info('Testing Malformed path Dict. Array')
        self.error_gets(inserted_keys, path = self._get_path('child', levels-2)+'.`array[0]`', error="Memcached error #192 'Path not exists'", field = 'Testing Malformed path Dict. Array', result  = result)

        '''Invalid Path'''
        self.log.info('Testing ENOENT')
        self.error_gets(inserted_keys, path = self._get_path('child', levels-2)+'.array[100]', error="Memcached error #192 'Path not exists'", field = 'Testing ENOENT', result  = result)

        '''Path too long'''
        data_set_long = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys_long, levels_long = data_set_long.load(long_path=True)
        self.log.info('Testing long path ')
        self.error_gets(inserted_keys_long, path = self._get_path('child12345678901234567890123456789', levels_long), error="Memcached error #192 'Path too long'", field = 'Path too long', result  = result)
        self.assertTrue(len(result) > 0, result)

    def error_test_deep_nested_dataset_exists(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Error testing path error for CMD_EXISTS for deep nested dataset "
                      "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()

        '''path does not exist'''
        self.log.info('Testing last+1 element dictionary')
        self.error_exists(inserted_keys, path = self._get_path('child', levels+1), error="Memcached error #192 'Path not exists'", field = 'path does not exist', result  = result)

        '''Invalid path'''
        self.log.info('Testing Dict.Array')
        self.error_exists(inserted_keys, path = self._get_path('child', levels-2)+'.array[-5]', error="Memcached error #194 'Invalid path'", field = 'Invalid path', result  = result)

        '''path too big'''
        self.log.info('Testing Intermediate element Dict. Array')
        self.error_exists(inserted_keys, path = self._get_path('child', 40), error="Memcached error #195 'Path too big'", field = 'path too big', result  = result)

        '''Malformed path'''
        self.log.info('Testing Malformed path Dict. Array')
        self.error_exists(inserted_keys, path = self._get_path('child', levels-2)+'.`array[0]`', error="Memcached error #192 'Path not exists'", field = 'Malformed path', result  = result)

        '''Invalid Path'''
        self.log.info('Testing ENOENT')
        self.error_exists(inserted_keys, path = self._get_path('child', levels-2)+'.array[100]', error="Memcached error #192 'Path not exists'", field = 'Invalid Path', result  = result)

        '''Path too long'''
        data_set_long = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys_long, levels_long = data_set_long.load(long_path=True)
        self.log.info('Testing long path ')
        self.error_exists(inserted_keys_long, path = self._get_path('child12345678901234567890123456789', levels_long), error="Memcached error #192 'Path too long'", field = 'Path too long', result  = result)
        self.assertTrue(len(result) > 0, result)

    ''' Change error behaviour , there is something wrong on the call '''
    def error_test_deep_nested_dataset_dict_add(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Error testing path error for CMD_DICT_ADD for deep nested dataset "
                      "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys,levels = data_set.load()

        '''path does not exist'''
        self.log.info('Testing empty path for dictionary')
        self.error_add_dict(inserted_keys, add_str = 'child', path = self._get_path('child', levels-2), error="Memcached error #197 'Cant insert'", field = 'Testing empty path for dictionary', result  = result)

        '''path does not exist'''
        self.log.info('Testing empty path for dictionary')
        self.error_add_dict(inserted_keys, add_str="new_value", path = self._get_path('child', levels-2), error="Memcached error #197 'Cant insert'", field = 'path does not exist', result  = result)
        self.assertTrue(len(result) > 0, result)

        ''' Change error behaviour , there is something wrong on the call '''

    def error_test_deep_nested_dataset_dict_upsert(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Error testing path error for CMD_DICT_UPSERT for deep nested dataset "
                      "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys,levels = data_set.load()

        '''path does not exist'''
        self.log.info('Testing empty path for dictionary')
        self.error_upsert_dict(inserted_keys, add_str = 'child', path = self._get_path('child', levels-2), error="Memcached error #197 'Cant insert'", field = 'path does not exist', result  = result)

        '''path does not exist'''
        self.log.info('Testing empty path for dictionary')
        self.error_upsert_dict(inserted_keys, add_str="new_value", path = self._get_path('child', levels-2), error="Memcached error #197 'Cant insert'", field = 'path does not exist', result  = result)

        '''document does not exist'''
        self.log.info('Document does not exist')
        self.error_upsert_dict(['key_does_not_exist'], add_str="new_value", path = "does_not_matter", error="Memcached error #197 'Cant insert'", field = 'Document does not exist', result  = result)
        self.assertTrue(len(result) == 0, result)

    def error_test_deep_nested_dataset_delete(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Error testing path error for CMD_delete for deep nested dataset "
                      "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys,levels = data_set.load()

        '''path does not exist'''
        self.log.info('Testing path not exists')
        self.error_delete(inserted_keys, path = self._get_path('child', levels)+'.child', error="Memcached error #192 'Path not exists'", field = 'path does not exist', result  = result)

        '''Last element Array of Array'''
        last_path ='child'
        for i in xrange(levels-3):
            last_path +='.child'
        last_path +='.array[-1]'
        #data_set.get_all_docs(inserted_keys, path = last_path)

        '''path does not exist on array'''
        self.log.info('Testing path not exists on dict.array with array [-5]')
        self.error_delete(inserted_keys, path = last_path+'[-1]', error="Memcached error #193 'Path mismatch'", field = 'path does not exist on array - Path mismatch', result  = result )
        self.error_delete(inserted_keys, path = last_path+'[-5]', error="Memcached error #194 'Invalid path'", field = 'path does not exist on array - Invalid path', result  = result )

        '''path missing CHECK if this error is expected.'''
        self.log.info('Testing path missing delete')
        self.error_delete(inserted_keys, path = '', error="Memcached error #4 'Invalid'", field = 'Testing path missing delete', result  = result)
        self.assertTrue(len(result) > 0, result)

    def error_test_deep_nested_dataset_replace(self):
        result = {}
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Error testing path error for CMD_REPLACE for deep nested dataset "
                      "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys,levels = data_set.load()

        '''path does not exist'''
        self.log.info('Testing path not exists')
        self.error_delete(inserted_keys, path = self._get_path('child', levels)+'.child', error="Memcached error #192 'Path not exists'", field = 'path does not exist', result  = result)

        '''Last element Array of Array'''
        last_path ='child'
        for i in xrange(levels-3):
            last_path +='.child'
        last_path +='.array[-1]'
        #data_set.get_all_docs(inserted_keys, path = last_path)

        '''path does not exist on array'''
        self.log.info('Testing path not exists on dict.array with array [--1]')
        self.error_replace(inserted_keys, path = last_path+'[-1]', error="Memcached error #193 'Path mismatch'", replace_str=1000000, field = 'path does not exist on array', result  = result)

        '''path missing replace string array Memcached error #4 'Invalid and Cant Insert'''
        self.log.info('Testing path not exists on dict.array with array [--1]')
        self.error_replace(inserted_keys, path = last_path+'[-1]', error="Memcached error #193 'Path mismatch'", replace_str='abc', field = 'path missing replace string array', result  = result)
        self.assertTrue(len(result) > 0, result)

    def error_gets(self, inserted_keys, path, error, field  = "field", result = {}):
        for in_key in inserted_keys:
            try:
                opaque, cas, data = self.helper.client.get_sd(in_key, path)
                print data
            except Exception as ex:
                if not (str(ex).find(error) != -1):
                    result[field]  = "Error is incorrect.Actual %s.Expected: %s." %(str(ex), error)
            else:
                result[field]  = "There were no errors. Error expected: %s" % error

    def error_exists(self, inserted_keys, path, error, field  = "field", result = {}):
        for in_key in inserted_keys:
            try:
                opaque, cas, data = self.helper.client.exists_sd(in_key, path)
                print data
            except Exception as ex:
                if not (str(ex).find(error) != -1):
                    result[field] = "Error is incorrect.Actual %s.Expected: %s." %(str(ex), error)
            else:
                result[field] = "There were no errors. Error expected: %s" % error

    def error_add_dict(self, inserted_keys, add_str, path, error, field  = "field", result = {}):
        for in_key in inserted_keys:
            try:
                opaque, cas, data = self.helper.client.dict_add_sd(in_key, path, add_str)
                print data
            except Exception as ex:
                if not (str(ex).find(error) != -1):
                     result[field] = "Error is incorrect.Actual %s.Expected: %s." %(str(ex), error)
            else:
                 result[field] = "There were no errors. Error expected: %s" % error

    def error_upsert_dict(self, inserted_keys, add_str, path, error, field  = "field", result = {}):
        for in_key in inserted_keys:
            try:
                opaque, cas, data = self.helper.client.dict_upsert_sd(in_key, path, add_str)
                result[field] = "There were no errors. Error expected: %s" % error
            except Exception as ex:
                self.log.info(str(ex))
                if str(ex).find(error) == -1:
                    result[field] = "Error is incorrect.Actual %s.Expected: %s." %(str(ex), error)

    def error_delete(self, inserted_keys, path, error, field  = "field", result = {}):
        for in_key in inserted_keys:
            try:
                opaque, cas, data = self.helper.client.delete_sd(in_key, path)
                print data
            except Exception as ex:
                if not ():
                    result[field] = "Error is incorrect.Actual %s.Expected: %s." %(str(ex), error)
            else:
                result[field] = "There were no errors. Error expected: %s" % error

    def error_replace(self, inserted_keys, path, error, replace_str, field  = "field", result = {}):
        for in_key in inserted_keys:
            try:
                opaque, cas, data = self.helper.client.replace_sd(in_key, path, replace_str)
                print data
            except Exception as ex:
                if not (str(ex).find(error) != -1):
                    result[field] = "Error is incorrect.Actual %s.Expected: %s." %(str(ex), error)
            else:
                result[field] = "There were no errors. Error expected: %s" % error
Esempio n. 6
0
class SubdocSanityTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SubdocHelper(self, "default")
        self.helper.setup_cluster()
        self.cluster = Cluster()
        self.servers = self.helper.servers

    def tearDown(self):
        self.helper.cleanup_cluster()

    def test_simple_dataset_sanity(self):
        self.test_simple_dataset_get()
        self.test_deep_nested_dataset_get_dict()
        self.test_deep_nested_dataset_get_array()
        self.test_simple_dataset_dict_upsert()
        self.test_simple_dataset_dict_add()
        self.test_simple_dataset_remove()
        self.test_simple_dataset_exists()
        self.test_simple_dataset_replace()
        self.test_simple_dataset_array_push_last()
        self.test_simple_dataset_array_push_first()
        self.test_simple_dataset_counter()
        self.test_simple_dataset_array_add_unqiue()
        self.test_simple_dataset_counter()

    def test_simple_dataset_get(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple get sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        data_set.get_all_docs(inserted_keys, path = 'isDict')
        data_set.get_all_docs(inserted_keys, path='geometry.coordinates[0]')
        data_set.get_all_docs(inserted_keys, path='dict_value.name')
        data_set.get_all_docs(inserted_keys, path='array[0]')
        data_set.get_all_docs(inserted_keys, path='array[-1]')

        ''' This should go into ErrorTesting '''
        #self.assertFalse(data_set.get_all_docs(inserted_keys, path='array[-5]'))
        #self.assertFalse(data_set.get_all_docs(inserted_keys, path='  '))

    def test_deep_nested_dataset_get_dict(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue  get sub doc on deep nested single path on dictionaries "
                      "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()

        '''Top level element'''
        #data_set.get_all_docs(inserted_keys, path = 'number', check_data=levels)
        data_set.get_all_docs(inserted_keys, path = 'array')
        data_set.get_all_docs(inserted_keys, path = 'array[0]')

        '''Last element Dictionary'''
        self.log.info('Testing last element dictionary')
        data_set.get_all_docs(inserted_keys, path = self._get_path('child', levels-1))

        '''Last element Dict.Array'''
        self.log.info('Testing Dict.Array')
        data_set.get_all_docs(inserted_keys, path = self._get_path('child', levels-2)+'.array[0]')

        '''Intermediate element Dict.Array'''
        self.log.info('Testing Intermediate element Dict. Array')
        data_set.get_all_docs(inserted_keys, path = self._get_path('child', levels/2)+'.array[0]')

    def test_deep_nested_dataset_get_array(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue  get sub doc on deep nested single path on dictionaries "
                      "dataset with {0} docs".format(num_docs))

        data_set = DeeplyNestedDataSet(self.helper, num_docs)
        inserted_keys, levels = data_set.load()

        '''Top level element'''
        data_set.get_all_docs(inserted_keys, path = 'number')
        data_set.get_all_docs(inserted_keys, path = 'array')
        data_set.get_all_docs(inserted_keys, path = 'array[0]')

        '''Last element Array'''
        last_path ='child'
        for i in xrange(levels-1):
            last_path +='.child'
        data_set.get_all_docs(inserted_keys, path = last_path)

        '''Last element Array of Array'''
        last_path ='child'
        for i in xrange(levels-3):
            last_path +='.child'
        last_path +='.array[-1][-1][-1]'
        data_set.get_all_docs(inserted_keys, path = last_path)

        '''Intermediate element Array'''
        last_path ='child'
        for i in xrange(levels/2):
            last_path +='.child'
        last_path +='.array[0][-1]'
        data_set.get_all_docs(inserted_keys, path = last_path)

    def test_simple_dataset_dict_upsert(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple upsert dict sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(1000)

        data_set.upsert_all_docs(inserted_keys, replace_string, path='isDict')
        data_set.upsert_all_docs(inserted_keys, replace_string, path='geometry.coordinates[0]')
        data_set.upsert_all_docs(inserted_keys, replace_string, path='dict_value.name')
        data_set.upsert_all_docs(inserted_keys, "999", path='height')
        data_set.upsert_all_docs(inserted_keys, replace_string, path='array[-1]')

    def test_simple_dataset_dict_add(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple add dict sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(1000)

        #data_set.add_all_docs(inserted_keys, replace_string, path='isDict')
        #data_set.add_all_docs(inserted_keys, replace_string, path='geometry.coordinates[0]')
        data_set.add_all_docs(inserted_keys, replace_string, path='dict_value')
        #data_set.add_all_docs(inserted_keys, "999", path='height')
        #data_set.add_all_docs(inserted_keys, replace_string, path='array[-1]')

    def test_simple_dataset_remove(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple remove sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        data_set.remove_all_docs(inserted_keys, path='isDict')
        data_set.remove_all_docs(inserted_keys, path='geometry.coordinates[0]')
        data_set.remove_all_docs(inserted_keys, path='dict_value.name')
        data_set.remove_all_docs(inserted_keys, path='array[0]')
        data_set.remove_all_docs(inserted_keys, path='array[-1]')

    def test_simple_dataset_exists(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple exists sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' add test code to accept Bool values and not error out '''
        data_set.exists_all_docs(inserted_keys, path='isDict')
        data_set.exists_all_docs(inserted_keys, path='geometry.coordinates[0]')
        data_set.exists_all_docs(inserted_keys, path='dict_value.name')
        data_set.exists_all_docs(inserted_keys, path='array[0]')
        data_set.exists_all_docs(inserted_keys, path='array[-1]')

    def test_simple_dataset_replace(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple replace sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        data_set.replace_all_docs(inserted_keys, replace_string, path='isDict')
        data_set.replace_all_docs(inserted_keys, replace_string, path='geometry.coordinates[0]')
        data_set.replace_all_docs(inserted_keys, replace_string, path='dict_value.name')
        data_set.replace_all_docs(inserted_keys, "999", path='height')
        data_set.replace_all_docs(inserted_keys, replace_string, path='array[-1]')

    def test_simple_dataset_array_push_last(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple array_push_last sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.array_push_last(inserted_keys, replace_string, path='geometry.coordinates')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        #data_set.array_push_last(inserted_keys, "999", path='height')
        data_set.array_push_last(inserted_keys, replace_string, path='array')

    def test_simple_dataset_array_push_first(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple array_push_first sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.array_push_first(inserted_keys, replace_string, path='geometry.coordinates')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        #data_set.array_push_last(inserted_keys, "999", path='height')
        data_set.array_push_first(inserted_keys, replace_string, path='array')

    def test_simple_dataset_counter(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple counter sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.counter_all_paths(inserted_keys,path='geometry.coordinates[0]')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        data_set.counter_all_paths(inserted_keys, path='height')
        #data_set.counter_all_paths(inserted_keys, path='array')


    def test_simple_dataset_array_add_unqiue(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple add array unique sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.array_add_unique(inserted_keys, replace_string, path='geometry.coordinates')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        #data_set.counter_all_paths(inserted_keys, 1, path='height')
        #data_set.counter_all_paths(inserted_keys, replace_string, path='array')

    def test_simple_dataset_multi_lookup(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Issue simple multi lookup sub doc single path "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        inserted_keys = data_set.load()

        ''' Randomly generate 1000 long string to replace existing path strings '''
        replace_string = self.generate_string(10)

        #Should be a negative testcase below.
        #data_set.array_push_last(inserted_keys, replace_string, path='isDict')
        data_set.multi_lookup_all_paths(inserted_keys, path='geometry.coordinates')
        #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name')
        #data_set.counter_all_paths(inserted_keys, 1, path='height')
        #data_set.counter_all_paths(inserted_keys, replace_string, path='array')


    def test_simple_dataset_multi_lookup2(self):
        pass

    def generate_string(self, range_val=100):
        long_string = ''.join(chr(97 + randint(0, 25)) for i in range(range_val))
        return '"' + long_string + '"'

    def _get_path(self, subdoc_elt=None, levels=None):
        subdoc_path = subdoc_elt
        for i in xrange(levels-1):
            subdoc_path +='.'+subdoc_elt
        return subdoc_path