Exemple #1
0
    def test_empty_dataverses_are_not_imported(self):

        self.log.info('Create dataverse\'s')
        self.cbas_util.create_dataverse_on_cbas(self.dataverse_1)
        self.cbas_util.create_dataverse_on_cbas(self.dataverse_2)

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.cbas_node)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=0, index_count=0)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_1)
        self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=0, dataset_count=0, index_count=0)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_2)
        self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=0, dataset_count=0, index_count=0)
Exemple #2
0
    def test_analytics_multiple_bucket_backup_and_restore_using_cbbackupmgr(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master, include_buckets=[self.beer_sample_bucket, self.travel_sample_bucket])
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.cbas_node)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Connect to Local link')
        self.cbas_util.connect_link()

        self.log.info('Connect to Local link on dataverse')
        self.cbas_util.connect_link(link_name=self.dataverse_1 + '.Local')

        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_1)
        self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_2)
        self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=1, dataset_count=1, index_count=1)
Exemple #3
0
    def test_disabling_analytics_with_cbbackupmgr_restore(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop custom dataverse')
        self.cbas_util.disconnect_link(link_name=self.dataverse_1 + '.Local')
        self.cbas_util.drop_dataverse_on_cbas(dataverse_name=self.dataverse_1)

        self.log.info('Restore Analytics metadata using cbbackupmgr with analytics disabled')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.cbas_node, disable_analytics=True)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Verify bucket state')
        self.build_bucket_status_map()
        self.assertEqual(self.dataverse_bucket_map[self.dataverse][self.beer_sample_bucket], 'connected')
        self.assertFalse(self.dataverse_1 in self.dataverse_bucket_map)
        self.assertEqual(self.dataverse_bucket_map[self.dataverse_2][self.travel_sample_bucket], 'connected')

        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_2)
        self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=1, dataset_count=1, index_count=1)
Exemple #4
0
    def test_disabling_analytics_with_cbbackupmgr_backup(self):

        self.log.info('Load documents in KV, create dataverse\'s, datasets, index and validate')
        self.create_ds_index_and_validate_count()

        self.log.info('Backup using cbbackupmgr with analytics disabled')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master, disable_analytics=True)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop dataset on default bucket')
        self.assertTrue(self.cbas_util.drop_dataset(self.dataset), msg='Failed to drop dataset')

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        o = shell.restore_backup(self.cbas_node)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Verify bucket state post restore')
        self.build_bucket_status_map()
        self.assertFalse(self.dataverse in self.dataverse_bucket_map)
        self.assertEquals(self.dataverse_bucket_map[self.dataverse_1][self.beer_sample_bucket], 'connected')
        self.assertEquals(self.dataverse_bucket_map[self.dataverse_2][self.travel_sample_bucket], 'connected')

        self.log.info('Validate metadata for default dataverse')
        self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=0, index_count=0)
Exemple #5
0
    def test_cbbackupmgr_allways_backsup_full_metadata(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Create additional dataset on Default bucket')
        self.dataset_1_post_backup = self.dataset + "_1"
        self.cbas_util.create_dataset_on_bucket(self.beer_sample_bucket, self.dataset_1_post_backup)
        
        self.log.info('Create secondary index on %s dataset and %s bucket' % (self.dataset_1_post_backup, self.beer_sample_bucket))
        create_idx_statement = 'create index {0} if not exists on {1}({2})'.format(self.index_name, self.dataset_1_post_backup, self.index_field)
        status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(create_idx_statement)
        self.assertTrue(status == 'success', 'Create Index query failed')

        self.log.info('Again backup Analytics metadata - This will backup everything and not just newly created dataset & index')
        o = shell.create_backup(self.master, skip_configure_bkup=True)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backing up again was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.cbas_node)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Connect to Local link')
        self.cbas_util.connect_link()

        self.log.info('Connect to Local link on custom dataverse')
        self.cbas_util.connect_link(link_name=self.dataverse_1 + '.Local')
        self.cbas_util.connect_link(link_name=self.dataverse_2 + '.Local')

        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)
        self.validate_metadata(self.dataverse, self.dataset_1_post_backup, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_1)
        self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_2)
        self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=1, dataset_count=1, index_count=1)
Exemple #6
0
    def test_analytics_backup_restore_using_cbbackupmgr(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.cbas_node)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Verify bucket state')
        self.build_bucket_status_map()
        if not self.skip_ds_index_creation_default:
            self.assertEqual(self.dataverse_bucket_map[self.dataverse][self.beer_sample_bucket], 'disconnected')
            self.cbas_util.connect_link()
        
        if not self.skip_ds_index_creation_dv1:
            self.assertEqual(self.dataverse_bucket_map[self.dataverse_1][self.beer_sample_bucket], 'disconnected')
            self.cbas_util.connect_link(link_name=self.dataverse_1 + '.Local')
            
        if not self.skip_ds_index_creation_dv2:
            self.assertEqual(self.dataverse_bucket_map[self.dataverse_2][self.travel_sample_bucket], 'disconnected')
            self.cbas_util.connect_link(link_name=self.dataverse_2 + '.Local')

        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        if not self.skip_ds_index_creation_default:
            self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)
        else:
            self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=0, index_count=0)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_1)
        if not self.skip_ds_index_creation_dv1:
            self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=1, dataset_count=1, index_count=1)
        else:
            self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=0, dataset_count=0, index_count=0)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_2)
        if not self.skip_ds_index_creation_dv2:
            self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=1, dataset_count=1, index_count=1)
        else:
            self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=0, dataset_count=0, index_count=0)

        if self.compress_dataset:
            self.assertEqual(self.cbas_util.get_ds_compression_type(self.dataset), "snappy",
                             "ds1 dataset not compressed with type snappy")
            self.assertEqual(self.cbas_util.get_ds_compression_type(self.dataset_1), "snappy",
                             "ds1 dataset not compressed with type snappy")
            self.assertEqual(self.cbas_util.get_ds_compression_type(self.dataset_2), "snappy",
                             "ds1 dataset not compressed with type snappy")
Exemple #7
0
    def test_cbbackupmgr_backup_bucket_only_if_user_has_bucket_permission(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()
        
        self.log.info('Create user with backup permission on beer-sample')
        user = '******'
        self.rbac_util = rbac_utils(self.master)
        self.rbac_util._create_user_and_grant_role(user, 'data_backup[beer-sample]')
        self.sleep(2)

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master, username=user)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.cbas_node, username=user)

        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Verify bucket state')
        self.build_bucket_status_map()
        self.assertEqual(self.dataverse_bucket_map[self.dataverse][self.beer_sample_bucket], 'disconnected')
        self.assertEqual(self.dataverse_bucket_map[self.dataverse_1][self.beer_sample_bucket], 'disconnected')
        self.assertFalse(self.dataverse_2 in self.dataverse_bucket_map)

        self.log.info('Connect to Local link')
        self.cbas_util.connect_link()

        self.log.info('Connect to Local link on dataverse')
        self.cbas_util.connect_link(link_name=self.dataverse_1 + '.Local')

        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_1)
        self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_2)
        self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=0, dataset_count=0, index_count=0)
Exemple #8
0
    def test_cbbackupmgr_fail_if_impacted_kv_bucket_is_deleted(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()
        
        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()
        
        self.log.info('Delete %s bucket on KV' % self.beer_sample_bucket)
        self.delete_bucket_or_assert(self.master, bucket=self.beer_sample_bucket)

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.cbas_node)
        self.assertTrue('Error restoring cluster' in ''.join(o), msg='Restore must fail')
Exemple #9
0
    def test_backuprestore_on_node_having_no_analytics_service(self):
        
        self.log.info('Add an extra KV node')
        self.add_node(self.servers[1], rebalance=True)
        
        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()

        self.log.info('Restore Analytics metadata using cbbackupmgr on KV node')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.servers[1])
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')
Exemple #10
0
    def test_cbbackupmgr_while_analytics_node_unreachable(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()
        
        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')
        
        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()

        self.log.info('Shutdown analytics node')
        shell = RemoteMachineShellConnection(self.cbas_node)
        shell.stop_couchbase()
              
        self.log.info('Verify restore metadata fails')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.master)
        self.assertTrue('Error restoring cluster' in ''.join(o), msg='Restore must be unsuccessful')
        
        self.log.info('Start back analytics node')
        shell = RemoteMachineShellConnection(self.cbas_node)
        shell.start_couchbase()
        self.cbas_util.wait_for_cbas_to_recover()

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.master)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')
        
        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_1)
        self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=1, dataset_count=1, index_count=1)

        self.log.info('Validate metadata for %s dataverse' % self.dataverse_2)
        self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=1, dataset_count=1, index_count=1)
Exemple #11
0
    def test_cbbackupmgr_does_not_fail_if_non_impacted_kv_bucket_is_deleted(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()
        
        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()
        
        self.log.info('Delete %s bucket on KV' % self.beer_sample_bucket)
        self.delete_bucket_or_assert(self.master, bucket=self.beer_sample_bucket)

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.cbas_node, exclude_bucket=[self.beer_sample_bucket])
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=1, dataset_count=1, index_count=1)
Exemple #12
0
    def test_cbbackupmgr_restore_with_dataverse_already_present(self):

        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')
        
        for x in range(2):
            self.log.info('Restore Analytics metadata using cbbackupmgr')
            shell = RemoteMachineShellConnection(self.master)
            o = shell.restore_backup(self.cbas_node)
            self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')
    
            self.log.info('Verify bucket state')
            self.build_bucket_status_map()
            self.assertEqual(self.dataverse_bucket_map[self.dataverse][self.beer_sample_bucket], 'disconnected')
            self.assertEqual(self.dataverse_bucket_map[self.dataverse_1][self.beer_sample_bucket], 'disconnected')
            self.assertEqual(self.dataverse_bucket_map[self.dataverse_2][self.travel_sample_bucket], 'disconnected')
    
            self.log.info('Connect to Local link')
            self.cbas_util.connect_link()
    
            self.log.info('Connect to Local link on custom dataverse')
            self.cbas_util.connect_link(link_name=self.dataverse_1 + '.Local')
            self.cbas_util.connect_link(link_name=self.dataverse_2 + '.Local')
    
            self.log.info('Validate metadata for %s dataverse' % self.dataverse)
            self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)
    
            self.log.info('Validate metadata for %s dataverse' % self.dataverse_1)
            self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=1, dataset_count=1, index_count=1)
    
            self.log.info('Validate metadata for %s dataverse' % self.dataverse_2)
            self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=1, dataset_count=1, index_count=1)
Exemple #13
0
    def test_cbbackupmgr_metadata_with_escape_characters_and_dataset_conflicts(self):

        self.log.info('Create dataset on default dataverse')
        dataset_name_escape_characters = '`ds-beer`'
        index_name_escape_characters = '`idx-name`'
        dataverse_name_escape_characters = '`dataverse-custom`'
        self.cbas_util.create_dataset_on_bucket(self.beer_sample_bucket, dataset_name_escape_characters, where_field='type', where_value='beer')

        self.log.info('Create index on default dataverse')
        create_idx_statement = 'create index {0} if not exists on {1}({2})'.format(index_name_escape_characters, dataset_name_escape_characters, self.index_field_composite)
        status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(create_idx_statement)
        self.assertTrue(status == 'success', 'Create Index query failed')

        self.log.info('Connect link Local')
        self.cbas_util.connect_link()
        
        self.log.info("Create primary index")
        query = "CREATE PRIMARY INDEX ON `{0}` using gsi".format(self.beer_sample_bucket)
        self.rest.query_tool(query)

        self.log.info('Validate dataset count on default bucket')
        count_n1ql = self.rest.query_tool('select count(*) from `%s` where type = "beer"' % self.beer_sample_bucket)['results'][0]['$1']
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(dataset_name_escape_characters, count_n1ql), msg='Count mismatch on CBAS')

        self.log.info('Create custom dataverse')
        self.cbas_util.create_dataverse_on_cbas(dataverse_name_escape_characters)

        self.log.info('Create dataset on custom dataverse')
        self.cbas_util.create_dataset_on_bucket(self.beer_sample_bucket, dataset_name_escape_characters, dataverse=dataverse_name_escape_characters)

        self.log.info('Create index on default dataverse')
        create_idx_statement = 'create index {0} if not exists on {1}({2})'.format(index_name_escape_characters,
                                                                                   dataverse_name_escape_characters + "." + dataset_name_escape_characters,
                                                                                   self.index_field)
        status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(create_idx_statement)
        self.assertTrue(status == 'success', 'Create Index query failed')

        self.log.info('Connect link Local')
        self.cbas_util.execute_statement_on_cbas_util('connect link %s.Local' % dataverse_name_escape_characters)

        self.log.info('Validate dataset count on custom bucket')
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(dataverse_name_escape_characters + "." + dataset_name_escape_characters, self.beer_sample_docs_count),
                        msg='Count mismatch on CBAS')

        self.log.info('Backup Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.create_backup(self.master)
        self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful')

        self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes')
        self.cleanup_cbas()
        
        self.log.info('Load documents in KV, create dataverse, datasets, index and validate')
        self.create_ds_index_and_validate_count()
        
        self.log.info('Verify bucket state')
        self.build_bucket_status_map()

        self.assertEqual(self.dataverse_bucket_map[self.dataverse][self.beer_sample_bucket], 'connected')
        self.assertEqual(self.dataverse_bucket_map[self.dataverse_1][self.beer_sample_bucket], 'connected')
        self.assertEqual(self.dataverse_bucket_map[self.dataverse_2][self.travel_sample_bucket], 'connected')
        
        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.master)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Connect link Local')
        self.cbas_util.execute_statement_on_cbas_util('connect link %s.Local' % dataverse_name_escape_characters)

        self.log.info('Connect link Local')
        self.cbas_util.connect_link()

        self.log.info('Validate dataset count post restore')
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(dataset_name_escape_characters, count_n1ql), msg='Count mismatch on CBAS')
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(dataverse_name_escape_characters + "." + dataset_name_escape_characters, self.beer_sample_docs_count),
                        msg='Count mismatch on CBAS')
        
        self.log.info('drop dataset on default dataverse')
        self.cbas_util.drop_dataset(dataset_name_escape_characters)

        self.log.info('create a dataset on Default dataverse')
        dataset_name = 'ds'
        self.cbas_util.create_dataset_on_bucket(self.beer_sample_bucket, dataset_name)
        
        self.log.info('Create index on default dataverse')
        create_idx_statement = 'create index {0} if not exists on {1}({2})'.format(self.index_name, dataset_name, self.index_field_composite)
        status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(create_idx_statement)
        self.assertTrue(status == 'success', 'Create Index query failed')

        self.log.info('Restore Analytics metadata using cbbackupmgr')
        shell = RemoteMachineShellConnection(self.master)
        o = shell.restore_backup(self.master)
        self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful')

        self.log.info('Validate metadata for %s dataverse' % self.dataverse)
        self.validate_metadata(self.dataverse, dataset_name_escape_characters, index_name_escape_characters, dataverse_count=1, dataset_count=1, index_count=1)
        self.validate_metadata(self.dataverse, dataset_name, self.index_name, dataverse_count=1, dataset_count=1, index_count=1)