def test_version(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         o = shell.execute_commands_inside('%s/go_cbq --version' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'','','','','','' )
         print o
         o = shell.execute_commands_inside('%s/go_cbq -s="\HELP VERSION"' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'','','','','','' )
         print o
 def test_push_pop_set(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             i=1
             pushqueries =[]
             while(i<100):
                 pushqueries.append('\SET -args [7, 0,1,2011];')
                 pushqueries.append('\push;')
                 pushqueries.append('\SET -$join_day %s;' %i)
                 pushqueries.append('\push -$join_day %s;' %i)
                 pushqueries.append('\push -args [8,1,2,2011];')
                 pushqueries.append('\SET -$project "AB";')
                 pushqueries.append('\push;')
                 pushqueries.append('prepare temp from select name, tasks_points.task1 AS task from bucketname where join_day>=$join_day and  join_mo>$1 GROUP BY tasks_points.task1 HAVING COUNT(tasks_points.task1) > $2 AND  (MIN(join_day)=$3 OR MAX(join_yr=$4));')
                 pushqueries.append('execute temp;')
                 pushqueries.append('\set;')
                 i=i+1
                 o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',pushqueries,'','',bucket.name,'' )
                 print o
             i=1
             popqueries =[]
             while(i<10):
                 popqueries.append('\SET;')
                 popqueries.append('\pop;')
                 popqueries.append('\pop -args;')
                 popqueries.append('\pop -$join_day;')
                 popqueries.append('\pop -$project;')
                 popqueries.append('\SET;')
                 popqueries.append('prepare temp from select name, tasks_points.task1 AS task from bucketname where join_day>=$join_day and  join_mo>$1 GROUP BY tasks_points.task1 HAVING COUNT(tasks_points.task1) > $2 AND  (MIN(join_day)=$3 OR MAX(join_yr=$4));')
                 popqueries.append('execute temp;')
                 i=i+1
                 o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',popqueries,'','',bucket.name,'' )
                 print o
Exemple #3
0
 def test_url(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside(
                     '%s/cbq  -u=Administrator -p=password http://localhost:8091@'
                     % (self.path), '', '', '', '', '', '')
                 self.assertTrue('status:FAIL' in o)
                 o = shell.execute_commands_inside(
                     '%s/cbq  -u=Administrator -p=password http://localhost:8091:'
                     % (self.path), '', '', '', '', '', '')
                 self.assertTrue('status:FAIL' in o)
                 o = shell.execute_commands_inside(
                     '%s/cbq  -u=Administrator -p=password http://localhost:8091['
                     % (self.path), '', '', '', '', '', '')
                 self.assertTrue('status:FAIL' in o)
                 o = shell.execute_commands_inside(
                     '%s/cbq  -u=Administrator -p=password http://localhost:8091]'
                     % (self.path), '', '', '', '', '', '')
                 self.assertTrue('status:FAIL' in o)
                 o = shell.execute_commands_inside(
                     '%s/cbq  -u=Administrator -p=password http://localhost:8091:'
                     % (self.path), '', '', '', '', '', '')
                 self.assertTrue('status:FAIL' in o)
             finally:
                 shell.disconnect()
 def test_version(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         o = shell.execute_commands_inside('%s/cbq --version' % (self.path),'','','','','','' )
         print o
         o = shell.execute_commands_inside('%s/cbq -s="\HELP VERSION"' % (self.path),'','','','','','' )
         print o
 def test_version(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         o = shell.execute_commands_inside('%s/cbq --version' % (self.path),
                                           '', '', '', '', '', '')
         print o
         o = shell.execute_commands_inside(
             '%s/cbq -s="\HELP VERSION"' % (self.path), '', '', '', '', '',
             '')
         print o
 def test_connect_disconnect(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = ['\connect http://localhost:8091;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\connect http://localhost:8093;','drop primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             # wrong disconnect
             queries = ['\disconnect http://localhost:8093;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             #wrong port
             queries = ['\connect http://localhost:8097;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             #wrong url including http
             queries = ['\connect http://localhost345:8097;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             #wrong url not including http
             queries = ['\connect localhost3458097;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\disconnect','drop primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\disconnect','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\connect http://localhost:8091;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
 def test_engine_ne(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside('%s/go_cbq  -q -ne' % (self.path),'select * from %s' % bucket.name,'','','','','')
                 print o
                 self.assertTrue('NotConnectedtoanyinstance' in o)
                 o = shell.execute_commands_inside('%s/go_cbq -q -ne' % (self.path),'\SET','','','','','')
                 print o
             finally:
                 shell.disconnect()
 def test_redirect(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = ['\\redirect abc;','create primary index on bucketname;','select name,tasks_points.task1,skills from bucketname;','\\redirect off;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             sftp = shell._ssh_client.open_sftp()
             fileout = sftp.open("abc",'r')
             filedata = fileout.read()
             print filedata
             queries = ['drop primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,True )
             print o
Exemple #9
0
 def test_redirect(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = ['\\redirect abc;','create primary index on bucketname;','select name,tasks_points.task1,skills from bucketname;','\\redirect off;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             sftp = shell._ssh_client.open_sftp()
             fileout = sftp.open("abc",'r')
             filedata = fileout.read()
             print filedata
             queries = ['drop primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,True )
             print o
    def test_ipv6(self):
        prefixes = ['http://', 'https://', 'couchbase://', 'couchbases://']
        ips = ['[::1]']
        ports = [':8091', ':8093', ':18091', ':18093']

        pass_urls = []

        # creates url, port tuples that should be valid.
        # port will be used to verify it connected to the proper endpoint
        for prefix in prefixes:
            for ip in ips:
                pass_urls.append((ip, '8091'))
                if prefix == 'couchbase://':
                    pass_urls.append((prefix+ip, '8091'))
                if prefix == 'couchbases://':
                    pass_urls.append((prefix+ip, '18091'))
                for port in ports:
                    if prefix == 'http://' and port in ['8091', '8093']:
                        pass_urls.append((prefix+ip+port, port))
                    if prefix == 'https://' and port in ['18091', '18093']:
                        pass_urls.append((prefix+ip+port, port))

        # run through all servers and try to connect cbq to the given url
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            try:
                for url in pass_urls:
                    cmd = self.path+'cbq  -u=Administrator -p=password -e='+url[0]+' -no-ssl-verify=true'
                    o = shell.execute_commands_inside(cmd, '', ['select * from system:nodes;', '\quit;'], '', '', '', '')
                    self.assertTrue(url[1] in o)
            finally:
                shell.disconnect()
Exemple #11
0
    def test_ipv6(self):
        prefixes = ['http://', 'https://', 'couchbase://', 'couchbases://']
        ips = ['[::1]']
        ports = [':8091', ':8093', ':18091', ':18093']

        pass_urls = []

        # creates url, port tuples that should be valid.
        # port will be used to verify it connected to the proper endpoint
        for prefix in prefixes:
            for ip in ips:
                pass_urls.append((ip, '8091'))
                if prefix == 'couchbase://':
                    pass_urls.append((prefix + ip, '8091'))
                if prefix == 'couchbases://':
                    pass_urls.append((prefix + ip, '18091'))
                for port in ports:
                    if prefix == 'http://' and port in ['8091', '8093']:
                        pass_urls.append((prefix + ip + port, port))
                    if prefix == 'https://' and port in ['18091', '18093']:
                        pass_urls.append((prefix + ip + port, port))

        # run through all servers and try to connect cbq to the given url
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            try:
                for url in pass_urls:
                    cmd = self.path + 'cbq  -u=Administrator -p=password -e=' + url[
                        0] + ' -no-ssl-verify=true'
                    o = shell.execute_commands_inside(
                        cmd, '', ['select * from system:nodes;', '\quit;'], '',
                        '', '', '')
                    self.assertTrue(url[1] in o)
            finally:
                shell.disconnect()
 def test_alias_and_echo(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = ["\ALIAS tempcommand create primary index on bucketname;","\\\\tempcommand;",'\ALIAS tempcommand2 select * from bucketname limit 10;',"\\\\tempcommand2;",'\ALIAS;','\echo tempcommand1;','\echo tempcommand2;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\ALIAS tempcommand drop primary index on bucketname;','\\\\tempcommand;','\ALIAS tempcommand create primary index on bucketname;','\ALIAS tempcommand2 drop primary index on bucketname;','\\\\tempcommand;','\\\\tempcommand2;','\ALIAS;','\echo tempcommand;','\echo tempcommand2;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\UNALIAS tempcommand drop primary index on bucketname;','\\\\tempcommand;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\UNALIAS tempcommand;','\\\\tempcommand;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
 def test_timeout(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         username = self.rest.username
         password = self.rest.password
         for bucket in self.buckets:
             try:
                 queries = [
                     '\set -timeout "10ms";',
                     "create primary index on bucketname;",
                     "select * from bucketname;"
                 ]
                 o = shell.execute_commands_inside(
                     '%scbq -q -u %s -p %s' %
                     (self.path, username, password), '', queries,
                     bucket.name, '', bucket.name, '')
                 self.assertTrue("timeout" in o)
                 if self.analytics:
                     self.query = '\set -timeout "10ms"'
                     self.run_cbq_query()
                     self.query = 'select * from %s ' % bucket.name
                     o = self.run_cbq_query()
                     print o
                 self.assertTrue("timeout" in o)
             finally:
                 shell.disconnect()
 def test_positional_params(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = ['\SET -args [7, 0,1,2011];','prepare temp from SELECT tasks_points.task1 AS task from bucketname WHERE join_mo>$1 GROUP BY tasks_points.task1 HAVING COUNT(tasks_points.task1) > $2 AND  (MIN(join_day)=$3 OR MAX(join_yr=$4)) ORDER BY tasks_points.task1 ;','execute temp;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name ,'')
             print o
 def test_named_params(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = ['\SET -$join_day 2;','\SET -$project "AB";','prepare temp from select name, tasks_ids,join_day from bucketname where join_day>=$join_day and tasks_ids[0] IN (select ARRAY_AGG(DISTINCT task_name) as names from bucketname d use keys ["test_task-1", "test_task-2"] where project!=$project)[0].names;','execute temp;']
             o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
             print o
 def check_multiple_saslbuckets_auth(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         queries = ['\set -creds user1:pass1,user2:pass2;','create primary index on bucket1;','create primary index on bucket2;']
         o = shell.execute_commands_inside('%s/go_cbq --quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'bucket0','password','bucket1','' )
         print o
         o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'bucket0','password123','bucket1',''  )
         print o
         queries = ['\set -creds wrong:pass1,user2:pass2;','drop primary index on bucket1;','drop primary index on bucket2;']
         o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'bucket0' ,'password','bucket1','' )
         print o
         queries = ['\set -creds user1:pass1,'':pass2;','create primary index on bucket1;','create primary index on bucket2;']
         o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'bucket0','password','bucket1','')
         print o
         queries = ['\set -creds '':pass1,'':pass2;','drop primary index on bucket1;','drop primary index on bucket2;']
         o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'bucket0','password','bucket1','' )
         print o
Exemple #17
0
    def check_multiple_saslbuckets_auth(self):
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            queries = ['\set -creds bucket0:pass,bucket1:pass;','create primary index on bucket0;','create primary index on bucket1;','select count(*) from bucket0  union all select count(*) from bucket1;']
            o = shell.execute_commands_inside('%s/cbq --quiet' % (self.path),'',queries,'bucket1','password','bucket0','' )
            self.assertTrue("requestID" in o)
            queries = ['SELECT buck.email FROM  bucketname buck LEFT JOIN default on keys "query-testemployee10153.1877827-0";']
            o = shell.execute_commands_inside('%s/cbq --quiet' % (self.path),'',queries,'bucket1','password','bucket0','' )
            self.assertTrue("AuthorizationFailed" in o)
            queries = ['\set -creds bucket0:pass,bucket1:pass;','SELECT buck.email FROM  bucketname buck LEFT JOIN default on keys "query-testemployee10153.1877827-0" limit 10;']
            o = shell.execute_commands_inside('%s/cbq --quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )

            self.assertTrue("requestID" in o)

            queries = ['\set -creds Administrator:pass;','select * from bucket1 union all select * from bucket2 limit 10;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )
            self.assertTrue("requestID" in o)

            queries = ['\set -creds user:pass;','SELECT buck.email FROM  bucket1 buck LEFT JOIN bucket2 on keys "query-testemployee10153.1877827-0";']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password123','bucket1',''  )

            self.assertTrue("AuthorizationFailed" in o)


            queries = ['\set -creds Administrator:pass;','select * from bucketname union all select * from default limit 10;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )
            self.assertTrue("requestID" in o)

            queries = ['\set -creds user:pass;','select * from bucketname union all select * from default limit 10;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )
            self.assertTrue("requestID" in o)

            queries = ['\set -creds wrong:pass1,user:pass;','drop primary index on bucket1;','drop primary index on bucket2;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0' ,'password','bucket1','' )

            self.assertTrue("AuthorizationFailed" in o)

            queries = ['\set -creds user1:pass1,'':pass2;','create primary index on bucket1;','create primary index on bucket2;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','')

            self.assertTrue("Usernamemissingin" in o)
            queries = ['\set -creds '':pass1,'':pass2;','drop primary index on bucket1;','drop primary index on bucket2;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )

            self.assertTrue("Usernamemissingin" in o)
    def check_multiple_saslbuckets_auth(self):
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            queries = ['\set -creds bucket0:pass,bucket1:pass;','create primary index on bucket0;','create primary index on bucket1;','select count(*) from bucket0  union all select count(*) from bucket1;']
            o = shell.execute_commands_inside('%s/cbq --quiet' % (self.path),'',queries,'bucket1','password','bucket0','' )
            self.assertTrue("requestID" in o)
            queries = ['SELECT buck.email FROM  bucketname buck LEFT JOIN default on keys "query-testemployee10153.1877827-0";']
            o = shell.execute_commands_inside('%s/cbq --quiet' % (self.path),'',queries,'bucket1','password','bucket0','' )
            self.assertTrue("AuthorizationFailed" in o)
            queries = ['\set -creds bucket0:pass,bucket1:pass;','SELECT buck.email FROM  bucketname buck LEFT JOIN default on keys "query-testemployee10153.1877827-0" limit 10;']
            o = shell.execute_commands_inside('%s/cbq --quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )

            self.assertTrue("requestID" in o)

            queries = ['\set -creds Administrator:pass;','select * from bucket1 union all select * from bucket2 limit 10;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )
            self.assertTrue("requestID" in o)

            queries = ['\set -creds user:pass;','SELECT buck.email FROM  bucket1 buck LEFT JOIN bucket2 on keys "query-testemployee10153.1877827-0";']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password123','bucket1',''  )

            self.assertTrue("AuthorizationFailed" in o)


            queries = ['\set -creds Administrator:pass;','select * from bucketname union all select * from default limit 10;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )
            self.assertTrue("requestID" in o)

            queries = ['\set -creds user:pass;','select * from bucketname union all select * from default limit 10;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )
            self.assertTrue("requestID" in o)

            queries = ['\set -creds wrong:pass1,user:pass;','drop primary index on bucket1;','drop primary index on bucket2;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0' ,'password','bucket1','' )

            self.assertTrue("AuthorizationFailed" in o)

            queries = ['\set -creds user1:pass1,'':pass2;','create primary index on bucket1;','create primary index on bucket2;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','')

            self.assertTrue("Usernamemissingin" in o)
            queries = ['\set -creds '':pass1,'':pass2;','drop primary index on bucket1;','drop primary index on bucket2;']
            o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'bucket0','password','bucket1','' )

            self.assertTrue("Usernamemissingin" in o)
 def test_engine_postive(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside('%s/cbq -q' % (self.path),'\quit','','','','','','')
                 self.assertTrue(o is '')
             finally:
                 shell.disconnect()
 def test_alias_and_echo(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = ["\ALIAS tempcommand create primary index on bucketname;","\\\\tempcommand;",'\ALIAS tempcommand2 select *,email from bucketname limit 10;',"\\\\tempcommand2;",'\ALIAS;','\echo \\\\tempcommand1;','\echo \\\\tempcommand2;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\ALIAS tempcommand drop primary index on bucketname;','\\\\tempcommand;','\ALIAS tempcommand create primary index on bucketname;','\ALIAS tempcommand2 drop primary index on bucketname;','\\\\tempcommand;','\\\\tempcommand2;','\ALIAS;','\echo \\\\tempcommand;','\echo \\\\tempcommand2;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\UNALIAS tempcommand drop primary index on bucketname;','\\\\tempcommand;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             self.assertTrue("Aliasdoesnotexist" in o)
             print o
             queries = ['\UNALIAS tempcommand;','\\\\tempcommand;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             self.assertTrue("Aliasdoesnotexist" in o)
             print o
    def test_push_pop_set(self):
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            for bucket in self.buckets:
                i=1
                pushqueries=['\set -$project "AB";','\push -$project "CD";','\push -$project "EF";','\push -$project "GH";','select $project;']
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',pushqueries,'','',bucket.name,'' )

                self.assertTrue('{"$1":"GH"}' in o)
                pushqueries.append('\pop;')
                pushqueries.append('select $project;')
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',pushqueries,'','',bucket.name,'' )
                self.assertTrue('{"$1":"EF"}' in o)

                popqueries=['\pop;','select $project;']
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',popqueries,'','',bucket.name,'' )
                self.assertTrue('Errorevaluatingprojection' in o)

                popqueries.extend(['\push -$project "CD";','\push -$project "EF";','\push -$project "GH";','\pop -$project;','\pop;','select $project;'])
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',popqueries,'','',bucket.name,'' )
                self.assertTrue('{"$1":"CD"}' in o)
                popqueries.append('\pop -$project;')
                popqueries.append('select $project;')
                self.assertTrue('Errorevaluatingprojection' in o)
                popqueries.extend(['\set -$project "AB";','\push -$project "CD";','\push -$project "EF";','\pop;','\unset -$project;','select $project;'])
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',popqueries,'','',bucket.name,'' )
                self.assertTrue('Errorevaluatingprojection' in o)


                while(i<15):
                    pushqueries.append('\SET -args [7, 0,1,2011];')
                    pushqueries.append('\push;')
                    pushqueries.append('\SET -$join_day %s;' %i)
                    pushqueries.append('\push -$join_day %s;' %i)
                    pushqueries.append('\push -args [8,1,2,2011];')
                    pushqueries.append('select $join_day;');
                    pushqueries.append('\SET -$project "AB";')
                    pushqueries.append('\push;')
                    pushqueries.append('\push  -$project "CD";')
                    pushqueries.append('select  $project;')
                    pushqueries.append('prepare temp from select  tasks_points.task1 AS task from bucketname where join_day>=$join_day and  join_mo>$1 GROUP BY tasks_points.task1 HAVING COUNT(tasks_points.task1) > $2 AND  (MIN(join_day)=$3 OR MAX(join_yr=$4));')
                    pushqueries.append('execute temp;')
                    pushqueries.append('\set;')
                    i=i+1
                    o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',pushqueries,'','',bucket.name,'' )
                i=1
                popqueries =[]
                while(i<10):
                    popqueries.append('\SET;')
                    popqueries.append('\pop;')
                    popqueries.append('\pop -args;')
                    popqueries.append('\pop -$join_day;')
                    popqueries.append('select $join_day;');
                    popqueries.append('\pop -$project;')
                    popqueries.append('\SET;')
                    popqueries.append('prepare temp from select tasks_points.task1 AS task from bucketname where join_day>=$join_day and  join_mo>$1 GROUP BY tasks_points.task1 HAVING COUNT(tasks_points.task1) > $2 AND  (MIN(join_day)=$3 OR MAX(join_yr=$4));')
                    popqueries.append('execute temp;')
                    i=i+1
                    o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',popqueries,'','',bucket.name,'' )
 def test_shell_error(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside('%s/go_cbq  -q -engine=http://%s:8093/' % (self.path,server.ip),'\quit1','','','','','')
                 print o
                 self.assertTrue("Commanddoesnotexist" in o)
             finally:
                 shell.disconnect()
 def test_shell_error(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside('%s/cbq  -q ' % (self.path),'\quit1','','','','','')
                 print o
                 self.assertTrue("FAIL" in o)
             finally:
                 shell.disconnect()
 def test_engine_postive(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside('%s/go_cbq -q -engine=http://%s:8093/' % (self.path,server.ip),'\quit','','','','','','')
                 print o
                 self.assertTrue("Exitingtheshell" in o)
             finally:
                 shell.disconnect()
 def test_positional_params(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = [
                 '\SET -args [7, 0,1,2011];',
                 'prepare temp from SELECT tasks_points.task1 AS task from bucketname WHERE join_mo>$1 GROUP BY tasks_points.task1 HAVING COUNT(tasks_points.task1) > $2 AND  (MIN(join_day)=$3 OR MAX(join_yr=$4)) ORDER BY tasks_points.task1 ;',
                 'execute temp;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
 def test_engine_ne(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside(
                     '%s/cbq  -q -ne' % (self.path),
                     'select * from %s' % bucket.name, '', '', '', '', '')
                 self.assertTrue("Notconnectedtoanycluster" in o)
                 o = shell.execute_commands_inside(
                     '%s/cbq -q -ne' % (self.path), '\SET', '', '', '', '',
                     '')
                 print o
                 if self.analytics:
                     self.query = '\SET'
                     o = self.run_cbq_query()
                     print o
                 self.assertTrue("histfileValue" in o)
             finally:
                 shell.disconnect()
 def test_timeout(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 queries = ['\set -timeout "10ms";',"create primary index on bucketname;","select * from bucketname;"]
                 o = shell.execute_commands_inside('%s/cbq -q ' % (self.path),'',queries,bucket.name,'',bucket.name,'')
                 print o
                 self.assertEqual('timeout',o[7:])
             finally:
                 shell.disconnect()
 def test_named_params(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = [
                 '\SET -$join_day 2;', '\SET -$project "AB";',
                 'prepare temp from select name, tasks_ids,join_day from bucketname where join_day>=$join_day and tasks_ids[0] IN (select ARRAY_AGG(DISTINCT task_name) as names from bucketname d use keys ["test_task-1", "test_task-2"] where project!=$project)[0].names;',
                 'execute temp;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
    def test_history(self):
         for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            type2 = shell.extract_remote_info().distribution_type
            queries = []
            queries2 = []
            queries3 = []
            queries5 = []
            queries6 = []
            for bucket in self.buckets:
                if type2.lower() == 'windows':
                    queries = ["\set histfile c:\\tmp\\history.txt;"]
                    queries2 = ["\Alias p c:\\tmp\\history2.txt;"]
                    queries3 = ["\set $a c:\\tmp\\history3.txt;"]
                    queries5 = ['\set $a "\\abcde";']
                    queries6 = ["\set $a '\\abcde';"]
                elif type2.lower() == "linux":
                    queries = ["\set histfile /tmp/history;"]
                    queries2 = ["\Alias p /tmp/history2;"]
                    queries3 = ["\set $a /tmp/history3;"]
                    queries5 = ['\set $a "/abcde";']
                    queries6 = ["\set $a /abcde;"]

                #import pdb;pdb.set_trace()
                queries.extend(['\ALIAS tempcommand create primary index on bucketname;','\\\\tempcommand;','\ALIAS tempcommand2 select * from bucketname limit 1;','\\\\tempcommand2;','\ALIAS;','\echo \\\\tempcommand;','\echo \\\\tempcommand2;','\echo histfile;'])
                print queries
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )

                if type2.lower() == "linux":
                    self.assertTrue('/tmp/history' in o)
                #import pdb;pdb.set_trace()

                #open and check the file


                queries2.extend(["\set histfile \\\\p;","\echo histfile;","\set histfile '\\\\p';","\echo histfile;"])
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries2,'','',bucket.name,'' )

                if type2.lower() == "linux":
                    self.assertTrue('/tmp/history2' in o)
                    self.assertTrue('\\p' in o)

                queries3.extend(["\set histfile $a;","\echo histfile;"])
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries3,'','',bucket.name,'' )

                #import pdb;pdb.set_trace()

                queries4 = ["\push histfile newhistory.txt;","\echo histfile;",'\ALIAS tempcommand create primary index on bucketname;','\\\\tempcommand;','\ALIAS tempcommand2 select * from bucketname limit 1;','\\\\tempcommand2;','\ALIAS;','\echo \\\\tempcommand;','\echo \\\\tempcommand2;','\echo histfile;']
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries4,'','',bucket.name,'' )

                #import pdb;pdb.set_trace()

                queries5.append("\echo $a;")
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries5,'','',bucket.name,'' )


                queries6.append("\echo $a;")
                o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries6,'','',bucket.name,'' )
 def test_connect_disconnect(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = ['\connect http://localhost:8091;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             print o
             queries = ['\connect http://localhost:8091;','drop primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             print o
             # wrong disconnect
             queries = ['\disconnect http://localhost:8091;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             self.assertTrue("Toomanyinputargumentstocommand" in o)
             print o
             #wrong port
             queries = ['\connect http://localhost:8097;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             self.assertTrue("Unabletoconnectto" in o)
             print o
             #wrong url including http
             queries = ['\connect http://localhost345:8097;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             print o
             self.assertTrue("Unabletoconnectto" in o)
             #wrong url not including http
             queries = ['\connect localhost3458097;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             print o
             self.assertTrue("Unabletoconnectto" in o)
             queries = ['\disconnect','drop primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             print o
             self.assertTrue("Toomanyinputargumentstocommand" in o)
             queries = ['\disconnect','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             self.assertTrue("Toomanyinputargumentstocommand" in o)
             print o
             queries = ['\connect http://localhost:8091;','create primary index on bucketname;']
             o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'','',bucket.name,'' )
             print o
 def test_shell_error(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside(
                     '%s/cbq  -q ' % (self.path), '\quit1', '', '', '', '',
                     '')
                 if self.analytics:
                     self.query = '\quit1'
                     o = self.run_cbq_query()
                     print o
                 self.assertTrue("Commanddoesnotexist" in o)
             finally:
                 shell.disconnect()
 def test_engine_postive(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 o = shell.execute_commands_inside(
                     '%s/cbq -q' % (self.path), '\quit', '', '', '', '', '',
                     '')
                 if self.analytics:
                     self.query = '\quit'
                     o = self.run_cbq_query()
                     print o
                 self.assertTrue(o is '')
             finally:
                 shell.disconnect()
Exemple #33
0
class QueryTests(BaseTestCase):
    def setUp(self):
        if not self._testMethodName == 'suite_setUp':
            self.skip_buckets_handle = True
        super(QueryTests, self).setUp()
        self.version = self.input.param("cbq_version", "git_repo")
        if self.input.tuq_client and "client" in self.input.tuq_client:
            self.shell = RemoteMachineShellConnection(self.input.tuq_client["client"])
        else:
            self.shell = RemoteMachineShellConnection(self.master)
        if not self._testMethodName == 'suite_setUp':
            self._start_command_line_query(self.master)
        self.use_rest = self.input.param("use_rest", True)
        self.max_verify = self.input.param("max_verify", None)
        self.buckets = RestConnection(self.master).get_buckets()
        docs_per_day = self.input.param("doc-per-day", 49)
        self.item_flag = self.input.param("item_flag", 4042322160)
        self.dataset = self.input.param("dataset", "default")
        self.gens_load = self.generate_docs(docs_per_day)
        if self.input.param("gomaxprocs", None):
            self.configure_gomaxprocs()
        self.gen_results = TuqGenerators(self.log, self._generate_full_docs_list(self.gens_load))

    def suite_setUp(self):
        try:
            self.load(self.gens_load, flag=self.item_flag)
            if not self.input.param("skip_build_tuq", False):
                self._build_tuq(self.master)
            self.skip_buckets_handle = True
        except:
            self.tearDown()

    def tearDown(self):
        if self._testMethodName == 'suite_tearDown':
            self.skip_buckets_handle = False
        super(QueryTests, self).tearDown()

    def suite_tearDown(self):
        if not self.input.param("skip_build_tuq", False):
            if hasattr(self, 'shell'):
                self.shell.execute_command("killall /tmp/tuq/cbq-engine")
                self.shell.execute_command("killall tuqtng")
                self.shell.disconnect()


##############################################################################################
#
#   SIMPLE CHECKS
##############################################################################################
    def test_simple_check(self):
        for bucket in self.buckets:
            query_template = 'FROM %s select $str0, $str1 ORDER BY $str0,$str1 ASC' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_simple_negative_check(self):
        queries_errors = {'SELECT $str0 FROM {0} WHERE COUNT({0}.$str0)>3' :
                          'Aggregate function not allowed here',
                          'SELECT *.$str0 FROM {0}' : 'Parse Error - syntax error',
                          'SELECT *.* FROM {0} ... ERROR' : 'Parse Error - syntax error',
                          'FROM %s SELECT $str0 WHERE id=null' : 'Parse Error - syntax error',}
        self.negative_common_body(queries_errors)

    def test_consistent_simple_check(self):
        queries = [self.gen_results.generate_query('SELECT $str0, $int0, $int1 FROM %s ' +\
                    'WHERE $str0 IS NOT NULL AND $int0<10 ' +\
                    'OR $int1 = 6 ORDER BY $int0, $int1'), 
                   self.gen_results.generate_query('SELECT $str0, $int0, $int1 FROM %s ' +\
                    'WHERE $int1 = 6 OR $str0 IS NOT NULL AND ' +\
                    '$int0<10 ORDER BY $int0, $int1')]
        for bucket in self.buckets:
            actual_result1 = self.run_cbq_query(queries[0] % bucket.name)
            actual_result2 = self.run_cbq_query(queries[1] % bucket.name)
            self.assertTrue(actual_result1['resultset'] == actual_result2['resultset'],
                              "Results are inconsistent.Difference: %s %s %s %s" %(
                                    len(actual_result1['resultset']), len(actual_result2['resultset']),
                                    actual_result1['resultset'][100], actual_result2['resultset'][100]))

    def test_simple_nulls(self):
        queries = ['SELECT id FROM %s WHERE id=NULL or id="null"']
        for bucket in self.buckets:
            for query in queries:
                actual_result = self.run_cbq_query(query % (bucket.name))
                self._verify_results(actual_result['resultset'], [])

##############################################################################################
#
#   LIMIT OFFSET CHECKS
##############################################################################################

    def test_limit_offset(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10 OFFSET 10' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)

    def test_limit_offset_zero(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 0' % (bucket.name)
            self.query = self.gen_results.generate_query(query_template)
            actual_result = self.run_cbq_query()
            self.assertEquals(actual_result['resultset'], [],
                              "Results are incorrect.Actual %s.\n Expected: %s.\n" % (
                                        actual_result['resultset'], []))
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10 OFFSET 0' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self.assertEquals(actual_result['resultset'], expected_result,
                              "Results are incorrect.Actual %s.\n Expected: %s.\n" % (
                                        actual_result['resultset'], expected_result))

    def test_limit_offset_negative_check(self):
        queries_errors = {'SELECT DISTINCT $str0 FROM {0} LIMIT -1' :
                          'Parse Error - syntax error',
                          'SELECT DISTINCT $str0 FROM {0} LIMIT 1.1' :
                          'Parse Error - syntax error',
                          'SELECT DISTINCT $str0 FROM {0} OFFSET -1' :
                          'Parse Error - syntax error',
                          'SELECT DISTINCT $str0 FROM {0} OFFSET 1.1' :
                          'Parse Error - syntax error'}
        self.negative_common_body(queries_errors)

##############################################################################################
#
#   ALIAS CHECKS
##############################################################################################

    def test_simple_alias(self):
        for bucket in self.buckets:
            query_template = 'SELECT COUNT($str0) AS COUNT_EMPLOYEE FROM %s' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self.assertEquals(actual_result['resultset'], expected_result,
                              "Results are incorrect.Actual %s.\n Expected: %s.\n" % (
                                        actual_result['resultset'], expected_result))

            query_template = 'SELECT COUNT(*) + 1 AS COUNT_EMPLOYEE FROM %s' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            expected_result = [ { "COUNT_EMPLOYEE": expected_result[0]['COUNT_EMPLOYEE'] + 1 } ]
            self.assertEquals(actual_result['resultset'], expected_result,
                              "Results are incorrect.Actual %s.\n Expected: %s.\n" % (
                                        actual_result['resultset'], expected_result))

    def test_simple_negative_alias(self):
        queries_errors = {'SELECT $str0._last_name as *' : 'Parse Error - syntax error',
                          'SELECT $str0._last_name as DATABASE ?' : 'Parse Error - syntax error',
                          'SELECT $str0 AS NULL FROM {0}' : 'Parse Error - syntax error',
                          'SELECT $str1 as $str0, $str0 FROM {0}' :
                                'alias name is defined more than once',
                          'SELECT $obj0 AS points, points.task1 FROM {0}' :
                                'Alias points cannot be referenced',
                          'SELECT $obj0.task1 AS points_new FROM {0} AS test ' +
                           'WHERE points_new >0' : "Alias points_new cannot be referenced",
                          'SELECT DISTINCT $obj0 AS points_new FROM {0} AS test ' +
                           'ORDER BY points_new':
                                'Expression points_new is not in the list',
                          'SELECT $obj0 AS points FROM {0} AS test GROUP BY points':
                                'Alias points cannot be referenced',
                          'SELECT test.$obj0 as points FROM {0} AS TEST ' +
                           'GROUP BY TEST.points' :
                                'The expression TEST is not satisfied by these dependencies',
                          'SELECT test.$obj0 as points FROM {0} AS TEST ' +
                           'GROUP BY $obj0 AS GROUP_POINT' :
                                'parse_error',
                          'SELECT COUNT($obj0) as COUNT_NEW_POINT, COUNT($str0) ' +
                           'as COUNT_EMP  FROM {0} AS TEST GROUP BY $str0 ' +
                           'HAVING COUNT_NEW_POINT >0' :
                                'Alias COUNT_NEW_POINT cannot be referenced',
                          'SELECT * FROM {0} emp UNNEST {0}.$list_obj0' : 'Invalid Bucket in UNNEST clause'}
        self.negative_common_body(queries_errors)

    def test_alias_from_clause(self):
        queries_templates = ['SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ORDER BY points'  % (bucket.name),
                   'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test WHERE test.$int0 >0'  % (bucket.name) +\
                   ' ORDER BY points',
                   'SELECT tasks_points.task1 AS points FROM %s AS test ' % (bucket.name) +\
                       'WHERE FLOOR(test.test_rate) >0 ORDER BY points',
                   'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ' % (bucket.name) +\
                   'GROUP BY test.$obj0.$_obj0_int0 ORDER BY points']
        for bucket in self.buckets:
            for query_template in queries_templates:
                actual_result, expected_result = self.run_query_from_template(query_template)
                self._verify_results(actual_result['resultset'], expected_result)

    def test_alias_from_clause_group(self):
        for bucket in self.buckets:
            query_template = 'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ' %(bucket.name) +\
                         'GROUP BY $obj0.$_obj0_int0 ORDER BY points'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_alias_order_desc(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0 AS name_new FROM %s AS test ORDER BY name_new DESC' %(
                                                                                bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_alias_order_asc(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0 AS name_new FROM %s AS test ORDER BY name_new ASC' %(
                                                                                bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_alias_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT COUNT(TEST.$str0) from %s AS TEST' %(bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_alias_unnest(self):
        for bucket in self.buckets:
            query_template = 'SELECT count(skill) FROM %s AS emp UNNEST emp.$list_str0 AS skill' %(
                                                                            bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

            query_template = 'SELECT count(skill) FROM %s AS emp UNNEST emp.$list_str0 skill' %(
                                                                            bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

##############################################################################################
#
#   ORDER BY CHECKS
##############################################################################################

    def test_order_by_check(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $str1, $obj0.$_obj0_int0 points FROM %s'  % (bucket.name) +\
            ' ORDER BY $str1, $str0, $obj0.$_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)
            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $obj0.$_obj0_int0, $str0, $str1'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_order_by_alias(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1, $obj0 AS points FROM %s'  % (bucket.name) +\
            ' AS test ORDER BY $str1 DESC, points DESC'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_order_by_alias_arrays(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1, $obj0, $list_str0[0] AS SKILL FROM %s'  % (
                                                                            bucket.name) +\
            ' AS TEST ORDER BY SKILL, $str1, TEST.$obj0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_order_by_alias_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT $int0, $int1, count(*) AS emp_per_month from %s'% (
                                                                            bucket.name) +\
            ' WHERE $int1 >7 GROUP BY $int0, $int1 ORDER BY emp_per_month, $int1, $int0'  
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_order_by_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1 AS TITLE FROM %s GROUP'  % (bucket.name) +\
            ' BY $str1 ORDER BY MIN($int1), $str1'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_order_by_precedence(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $str0, $str1'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $str1, $str0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_order_by_satisfy(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $list_obj0 FROM %s AS employee ' % (bucket.name) +\
                        'WHERE ANY vm IN employee.$list_obj0 SATISFIES vm.$_list_obj0_int0 > 5 AND' +\
                        ' vm.$_list_obj0_str0 = "ubuntu" END ORDER BY $str0, $list_obj0[0].$_list_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

##############################################################################################
#
#   DISTINCT
##############################################################################################

    def test_distinct(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str1 FROM %s ORDER BY $str1'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_distinct_nested(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $obj0.$_obj0_int0 as VAR FROM %s '  % (bucket.name) +\
                         'ORDER BY $obj0.$_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

            query_template = 'SELECT DISTINCT $list_str0[0] as skill' +\
                         ' FROM %s ORDER BY $list_str0[0]'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

            self.query = 'SELECT DISTINCT $obj0.* FROM %s'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

##############################################################################################
#
#   COMPLEX PATHS
##############################################################################################

    def test_simple_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 FROM %s.$obj0'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_alias_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 as new_attribute FROM %s.$obj0'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

    def test_where_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 FROM %s.$obj0 WHERE $_obj0_int0 = 1'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['resultset'], expected_result)

##############################################################################################
#
#   COMMON FUNCTIONS
##############################################################################################

    def run_query_from_template(self, query_template):
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def negative_common_body(self, queries_errors={}):
        if not queries_errors:
            self.fail("No queries to run!")
        for bucket in self.buckets:
            for query_template, error in queries_errors.iteritems():
                try:
                    query = self.gen_results.generate_query(query_template)
                    actual_result = self.run_cbq_query(query.format(bucket.name))
                except CBQError as ex:
                    self.log.error(ex)
                    self.assertTrue(str(ex).find(error) != -1,
                                    "Error is incorrect.Actual %s.\n Expected: %s.\n" %(
                                                                str(ex).split(':')[-1], error))
                else:
                    self.fail("There was no errors. Error expected: %s" % error)

    def run_cbq_query(self, query=None, min_output_size=10, server=None):
        if query is None:
            query = self.query
        if server is None:
           server = self.master
           if self.input.tuq_client and "client" in self.input.tuq_client:
               server = self.tuq_client
        if self.use_rest:
            result = RestConnection(server).query_tool(query)
        else:
            if self.version == "git_repo":
                output = self.shell.execute_commands_inside("$GOPATH/src/github.com/couchbaselabs/tuqtng/" +\
                                                            "tuq_client/tuq_client " +\
                                                            "-engine=http://%s:8093/" % server.ip,
                                                       subcommands=[query,],
                                                       min_output_size=20,
                                                       end_msg='tuq_client>')
            else:
                output = self.shell.execute_commands_inside("/tmp/tuq/cbq -engine=http://%s:8093/" % server.ip,
                                                           subcommands=[query,],
                                                           min_output_size=20,
                                                           end_msg='cbq>')
            result = self._parse_query_output(output)
        if 'error' in result:
            raise CBQError(result["error"], server.ip)
        self.log.info("TOTAL ELAPSED TIME: %s" % [param["message"]
                        for param in result["info"] if param["key"] == "total_elapsed_time"])
        return result

    def build_url(self, version):
        info = self.shell.extract_remote_info()
        type = info.distribution_type.lower()
        if type in ["ubuntu", "centos", "red hat"]:
            url = "https://s3.amazonaws.com/packages.couchbase.com/releases/couchbase-query/dp1/"
            url += "couchbase-query_%s_%s_linux.tar.gz" %(
                                version, info.architecture_type)
        #TODO for windows
        return url

    def _build_tuq(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                goroot = testconstants.LINUX_GOROOT
                gopath = testconstants.LINUX_GOPATH
            else:
                goroot = testconstants.WINDOWS_GOROOT
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if self.input.tuq_client and "goroot" in self.input.tuq_client:
                goroot = self.input.tuq_client["goroot"]
            cmd = "rm -rf {0}/src/github.com".format(gopath)
            self.shell.execute_command(cmd)
            cmd= 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'go get github.com/couchbaselabs/tuqtng;' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; ' +\
                'go get -d -v ./...; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; go build; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng/tuq_client; go build; cd .'
            self.shell.execute_command(cmd)
        else:
            cbq_url = self.build_url(self.version)
            #TODO for windows
            cmd = "cd /tmp; mkdir tuq;cd tuq; wget {0} -O tuq.tar.gz;".format(cbq_url)
            cmd += "tar -xvf tuq.tar.gz;rm -rf tuq.tar.gz"
            self.shell.execute_command(cmd)

    def _start_command_line_query(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                gopath = testconstants.LINUX_GOPATH
            else:
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if os == 'windows':
                cmd = "cd %s/src/github.com/couchbaselabs/tuqtng/; " % (gopath) +\
                "./tuqtng.exe -couchbase http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s/src/github.com/couchbaselabs/tuqtng/; " % (gopath) +\
                "./tuqtng -couchbase http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            self.shell.execute_command(cmd)
        else:
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                cmd = "cd /tmp/tuq;./cbq-engine -couchbase http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd /cygdrive/c/tuq;./cbq-engine.exe -couchbase http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            self.shell.execute_command(cmd)

    def _parse_query_output(self, output):
        if output.find("cbq>") == 0:
            output = output[output.find("cbq>") + 4:].strip()
        if output.find("tuq_client>") == 0:
            output = output[output.find("tuq_client>") + 11:].strip()
        if output.find("cbq>") != -1:
            output = output[:output.find("cbq>")].strip()
        if output.find("tuq_client>") != -1:
            output = output[:output.find("tuq_client>")].strip()
        return json.loads(output)

    def generate_docs(self, num_items, start=0):
        try:
            return getattr(self, 'generate_docs_' + self.dataset)(num_items, start)
        except:
            self.fail("There is no dataset %s, please enter a valid one" % self.dataset)

    def generate_docs_default(self, docs_per_day, start=0):
        generators = []
        types = ['Engineer', 'Sales', 'Support']
        join_yr = [2010, 2011]
        join_mo = xrange(1, 12 + 1)
        join_day = xrange(1, 28 + 1)
        template = '{{ "name":"{0}", "join_yr":{1}, "join_mo":{2}, "join_day":{3},'
        template += ' "email":"{4}", "job_title":"{5}", "test_rate":{8}, "skills":{9},'
        template += '"VMs": {10},'
        template += ' "tasks_points" : {{"task1" : {6}, "task2" : {7}}}}}'
        for info in types:
            for year in join_yr:
                for month in join_mo:
                    for day in join_day:
                        prefix = str(uuid.uuid4())[:7]
                        name = ["employee-%s" % (str(day))]
                        email = ["*****@*****.**" % (str(day))]
                        vms = [{"RAM": month, "os": "ubuntu",
                                "name": "vm_%s" % month, "memory": month},
                               {"RAM": month, "os": "windows",
                                "name": "vm_%s"% (month + 1), "memory": month}]
                        generators.append(DocumentGenerator("query-test" + prefix,
                                               template,
                                               name, [year], [month], [day],
                                               email, [info], range(1,10), range(1,10),
                                               [float("%s.%s" % (month, month))],
                                               [["skill%s" % y for y in join_yr]],
                                               [vms],
                                               start=start, end=docs_per_day))
        return generators

    def generate_docs_sabre(self, docs_per_day, start=0):
        generators = []
        dests = ['BOS', 'MIA', 'SFO']
        join_yr = [2010, 2011]
        join_mo = xrange(1, 12 + 1)
        join_day = xrange(1, 28 + 1)
        template = '{{ "Amount":{0}, "CurrencyCode":"{1}",'
        template += ' "TotalTax":{{"DecimalPlaces" : {2}, "Amount" : {3}, "CurrencyCode" : "{4}",}}, ,'
        template += ' "Tax":{5}, "FareBasisCode":{6}, "PassengerTypeQuantity":{7}, "TicketType":"{8}",'
        template += '"SequenceNumber": {9},'
        template += ' "DirectionInd" : "{10}",  "Itinerary" : {11}, "Destination" : "{12}",'
        template += '"join_yr":{13}, "join_mo":{14}, "join_day":{15}, "Codes" :{16}}}'
        for dest in dests:
            for year in join_yr:
                for month in join_mo:
                    for day in join_day:
                        prefix = '%s_%s-%s-%s' % (dest, year, month, day)
                        amount = [float("%s.%s" % (month, month))]
                        currency = [("USD", "EUR")[month in [1,3,5]]]
                        decimal_tax = [1,2]
                        amount_tax = [day]
                        currency_tax = currency
                        taxes = [{"DecimalPlaces": 2, "Amount": float(amount_tax)/3,
                                  "TaxCode": "US1", "CurrencyCode": currency},
                                 {"DecimalPlaces": 2, "Amount": float(amount_tax)/4,
                                  "TaxCode": "US2", "CurrencyCode": currency},
                                 {"DecimalPlaces": 2, "Amount": amount_tax - float(amount_tax)/4-\
                                  float(amount_tax)/3,
                                  "TaxCode": "US2", "CurrencyCode": currency}]

                        fare_basis = [{"content": "XA21A0NY", "DepartureAirportCode": dest,
                                       "BookingCode": "X", "ArrivalAirportCode": "MSP"},
                                      {"content": "XA21A0NY", "DepartureAirportCode": "MSP",
                                       "AvailabilityBreak": True, "BookingCode": "X",
                                       "ArrivalAirportCode": "BOS"}]
                        pass_amount = [day]
                        ticket_type = [("eTicket", "testType")[month in [1,3,5]]]
                        sequence = [year]
                        direction = [("oneWay", "return")[month in [2,6,10]]]
                        itinerary = {"OriginDestinationOptions":
                                     {"OriginDestinationOption": [
                                       {"FlightSegment": [
                                         {"TPA_Extensions":
                                           {"eTicket": {"Ind": True}},
                                           "MarketingAirline": {"Code": dest},
                                           "StopQuantity": month,
                                           "DepartureTimeZone": {"GMTOffset": -7},
                                           "OperatingAirline": {"Code": "DL",
                                                                "FlightNumber": year + month},
                                           "DepartureAirport": {"LocationCode": "SFO"},
                                           "ArrivalTimeZone": {"GMTOffset": -5},
                                           "ResBookDesigCode": "X",
                                           "FlightNumber": year + day,
                                           "ArrivalDateTime": "2014-07-12T06:07:00",
                                           "ElapsedTime": 212,
                                           "Equipment": {"AirEquipType": 763},
                                           "DepartureDateTime": "2014-07-12T00:35:00",
                                           "MarriageGrp": "O",
                                           "ArrivalAirport": {"LocationCode": "MSP"}},
                                        {"TPA_Extensions":
                                           {"eTicket": {"Ind": False}},
                                           "MarketingAirline": {"Code": dest},
                                           "StopQuantity": month,
                                           "DepartureTimeZone": {"GMTOffset": -7},
                                           "OperatingAirline": {"Code": "DL",
                                                                "FlightNumber": year + month + 1},
                                           "DepartureAirport": {"LocationCode": "SFO"},
                                           "ArrivalTimeZone": {"GMTOffset": -3},
                                           "ResBookDesigCode": "X",
                                           "FlightNumber": year + day,
                                           "ArrivalDateTime": "2014-07-12T06:07:00",
                                           "ElapsedTime": 212,
                                           "Equipment": {"AirEquipType": 764},
                                           "DepartureDateTime": "2014-07-12T00:35:00",
                                           "MarriageGrp": "1",
                                           "ArrivalAirport": {"LocationCode": "MSP"}}],
                                    "ElapsedTime": 619},
                                   {"FlightSegment": [
                                         {"TPA_Extensions":
                                           {"eTicket": {"Ind": True}},
                                           "MarketingAirline": {"Code": dest},
                                           "StopQuantity": month,
                                           "DepartureTimeZone": {"GMTOffset": -7},
                                           "OperatingAirline": {"Code": "DL",
                                                                "FlightNumber": year + month},
                                           "DepartureAirport": {"LocationCode": "SFO"},
                                           "ArrivalTimeZone": {"GMTOffset": -5},
                                           "ResBookDesigCode": "X",
                                           "FlightNumber": year + day,
                                           "ArrivalDateTime": "2014-07-12T06:07:00",
                                           "ElapsedTime": 212,
                                           "Equipment": {"AirEquipType": 763},
                                           "DepartureDateTime": "2014-07-12T00:35:00",
                                           "MarriageGrp": "O",
                                           "ArrivalAirport": {"LocationCode": "MSP"}},
                                        {"TPA_Extensions":
                                           {"eTicket": {"Ind": False}},
                                           "MarketingAirline": {"Code": dest},
                                           "StopQuantity": month,
                                           "DepartureTimeZone": {"GMTOffset": -7},
                                           "OperatingAirline": {"Code": "DL",
                                                                "FlightNumber": year + month + 1},
                                           "DepartureAirport": {"LocationCode": "SFO"},
                                           "ArrivalTimeZone": {"GMTOffset": -3},
                                           "ResBookDesigCode": "X",
                                           "FlightNumber": year + day,
                                           "ArrivalDateTime": "2014-07-12T06:07:00",
                                           "ElapsedTime": 212,
                                           "Equipment": {"AirEquipType": 764},
                                           "DepartureDateTime": "2014-07-12T00:35:00",
                                           "MarriageGrp": "1",
                                           "ArrivalAirport": {"LocationCode": "MSP"}}]}]},
                                     "DirectionInd": "Return"}
                        generators.append(DocumentGenerator(prefix, template,
                                               amount, currency, decimal_tax, amount_tax, currency_tax,
                                               [taxes], [fare_basis], pass_amount, ticket_type, sequence,
                                               direction, itinerary, [dest], [year], [month], [day],
                                               [[dest, dest]], start=start, end=docs_per_day))
        return generators

    def load(self, generators_load, exp=0, flag=0,
             kv_store=1, only_store_hash=True, batch_size=1, pause_secs=1,
             timeout_secs=30, op_type='create', start_items=0):
        gens_load = {}
        for bucket in self.buckets:
            tmp_gen = []
            for generator_load in generators_load:
                tmp_gen.append(copy.deepcopy(generator_load))
            gens_load[bucket] = copy.deepcopy(tmp_gen)
        tasks = []
        items = 0
        for gen_load in gens_load[self.buckets[0]]:
                items += (gen_load.end - gen_load.start)

        for bucket in self.buckets:
            self.log.info("%s %s to %s documents..." % (op_type, items, bucket.name))
            tasks.append(self.cluster.async_load_gen_docs(self.master, bucket.name,
                                             gens_load[bucket],
                                             bucket.kvs[kv_store], op_type, exp, flag,
                                             only_store_hash, batch_size, pause_secs,
                                             timeout_secs))
        for task in tasks:
            task.result()
        self.num_items = items + start_items
        self.verify_cluster_stats(self.servers[:self.nodes_init])
        self.log.info("LOAD IS FINISHED")

    def _generate_full_docs_list(self, gens_load, keys=[]):
        all_docs_list = []
        for gen_load in gens_load:
            doc_gen = copy.deepcopy(gen_load)
            while doc_gen.has_next():
                key, val = doc_gen.next()
                try:
                    val = json.loads(val)
                    val['mutated'] = 0
                except TypeError:
                    pass
                if keys:
                    if not (key in keys):
                        continue
                all_docs_list.append(val)
        return all_docs_list

    def _verify_results(self, actual_result, expected_result):
        if len(actual_result) != len(expected_result):
            missing, extra = self.check_missing_and_extra(actual_result, expected_result)
            self.log.error("Missing items: %s.\n Extra items: %s" % (missing[:100], extra[:100]))
            self.fail("Results are incorrect.Actual num %s. Expected num: %s.\n" % (
                                            len(actual_result), len(expected_result)))
        if self.max_verify is not None:
            actual_result = actual_result[:self.max_verify]
            expected_result = expected_result[:self.max_verify]

        msg = "Results are incorrect.\n Actual first and last 100:  %s.\n ... \n %s" +\
        "Expected first and last 100: %s.\n  ... \n %s"
        self.assertTrue(actual_result == expected_result,
                          msg % (actual_result[:100],actual_result[-100:],
                                 expected_result[:100],expected_result[-100:]))

    def check_missing_and_extra(self, actual, expected):
        missing = []
        extra = []
        for item in actual:
            if not (item in expected):
                 extra.append(item)
        for item in expected:
            if not (item in actual):
                missing.append(item)
        return missing, extra

    def sort_nested_list(self, result):
        actual_result = []
        for item in result:
            curr_item = {}
            for key, value in item.iteritems():
                if isinstance(value, list) or isinstance(value, set):
                    curr_item[key] = sorted(value)
                else:
                    curr_item[key] = value
            actual_result.append(curr_item)
        return actual_result

    def configure_gomaxprocs(self):
        max_proc = self.input.param("gomaxprocs", None)
        cmd = "export GOMAXPROCS=%s" % max_proc
        for server in self.servers:
            shell_connection = RemoteMachineShellConnection(self.master)
            shell_connection.execute_command(cmd)
    def check_onesaslbucket_auth(self):
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            for bucket in self.buckets:
                try:
                    if (bucket.saslPassword != ''):
                        print('sasl')
                        o = shell.execute_commands_inside('%s/cbq -c %s:%s -q' % (self.path,bucket.name,bucket.saslPassword),'CREATE PRIMARY INDEX ON %s USING GSI' %bucket.name,'','','','','')
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside('%s/cbq -c %s:%s -q' % (self.path,bucket.name,bucket.saslPassword),'select *,join_day from %s limit 10'%bucket.name,'','','','','')
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside('%s/cbq -c %s:%s -q' % (self.path,bucket.name,'wrong'),'select * from %s limit 10'%bucket.name,'','','','','')
                        print o
                        self.assertTrue("AuthorizationFailed"  in o)

                        o = shell.execute_commands_inside('%s/cbq -c %s:%s -q' % (self.path,'','wrong'),'select * from %s limit 10'%bucket.name,'','','','','')
                        self.assertEqual('FAIL',o[7:])
                        o = shell.execute_commands_inside('%s/cbq -c %s:%s -q' % (self.path,'wrong',bucket.saslPassword),'select * from %s limit 10'%bucket.name,'','','','','')
                        self.assertTrue("AuthorizationFailed"  in o)

                        queries = ['\set -creds user:pass;','select *,join_day from bucketname limit 10;']
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,bucket.name,bucket.saslPassword,bucket.name,'' )
                        self.assertTrue("requestID" in o)
                        queries = ['\set -creds user:pass;','select * from bucketname union all select * from default limit 10;']
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'Administrator','password',bucket.name,'' )
                        self.assertTrue("requestID" in o)
                        queries = ['\set -creds user:pass;','SELECT buck.email FROM  bucketname buck LEFT JOIN default on keys "query-testemployee10153.1877827-0";']
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'Administrator','password',bucket.name,'' )
                        self.assertTrue("requestID" in o)
                        queries = ['\set -creds user:pass;','SELECT buck.email FROM  bucketname buck LEFT JOIN default on keys "query-testemployee10153.1877827-0";']
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,bucket.name,bucket.saslPassword,bucket.name,'' )
                        self.assertTrue("requestID" in o)

                        queries = ['select count(*) from bucketname  union all select count(*) from default;']
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'',bucket.saslPassword,bucket.name,''  )
                        self.assertTrue("AuthorizationFailed"  in o)

                        queries = ['\set -creds user:pass;','select *,email,join_day from bucketname;']
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'Administrator','password',bucket.name,'' )
                        self.assertTrue("requestID" in o)
                        queries = ['\set -creds user:pass;','create primary index on default;','select email,join_day from bucketname union all select email,join_day from default;']
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,bucket.name,bucket.saslPassword,bucket.name,'' )
                        self.assertTrue("requestID" in o)

                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'wrong','wrong',bucket.name,'' )
                        self.assertTrue("AuthorizationFailed"  in o)
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,'wrong',bucket.saslPassword,bucket.name,'' )
                        self.assertTrue("AuthorizationFailed"  in o)
                        o = shell.execute_commands_inside('%s/cbq -quiet' % (self.path),'',queries,bucket.name,'wrong',bucket.name,'' )
                        self.assertTrue("AuthorizationFailed"  in o)
                        o = shell.execute_commands_inside('%s/cbq -q -u=%s -p=%s' % (self.path,'Administrator','password'),'select * from %s limit 10;' %bucket.name,'','','','','' )
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside('%s/cbq -q -u=%s -p=%s' % (self.path,bucket.name,bucket.saslPassword),'select * from %s limit 10;' %bucket.name,'','','','','' )
                        self.assertTrue("requestID" in o)
                        print('nonsasl')
                        o = shell.execute_commands_inside('%s/cbq -q -u %s -p %s' % (self.path,'Administrator','password'),'select * from default limit 10;','','','','','' )
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside('%s/cbq -q -u %s -p %s' % (self.path,bucket.name,bucket.saslPassword),'select * from default limit 10;' ,'','','','','' )
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside('%s/cbq -q ' % (self.path),'select * from default limit 10;','','','','','' )
                        self.assertTrue("requestID" in o)
                        break;

                finally:
                    shell.disconnect()
    def test_push_pop_set(self):
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            for bucket in self.buckets:
                i = 1
                pushqueries = [
                    '\set -$project "AB";', '\push -$project "CD";',
                    '\push -$project "EF";', '\push -$project "GH";',
                    'select $project;'
                ]
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', pushqueries, '', '',
                    bucket.name, '')
                self.assertTrue('{"$1":"GH"}' in o)
                pushqueries.append('\pop;')
                pushqueries.append('select $project;')
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', pushqueries, '', '',
                    bucket.name, '')
                self.assertTrue('{"$1":"EF"}' in o)

                popqueries = ['\pop;', 'select $project;']
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', popqueries, '', '',
                    bucket.name, '')
                self.assertTrue('Errorevaluatingprojection' in o)

                popqueries.extend([
                    '\push -$project "CD";', '\push -$project "EF";',
                    '\push -$project "GH";', '\pop -$project;', '\pop;',
                    'select $project;'
                ])
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', popqueries, '', '',
                    bucket.name, '')
                self.assertTrue('{"$1":"CD"}' in o)
                popqueries.append('\pop -$project;')
                popqueries.append('select $project;')
                self.assertTrue('Errorevaluatingprojection' in o)
                popqueries.extend([
                    '\set -$project "AB";', '\push -$project "CD";',
                    '\push -$project "EF";', '\pop;', '\unset -$project;',
                    'select $project;'
                ])
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', popqueries, '', '',
                    bucket.name, '')
                self.assertTrue('Errorevaluatingprojection' in o)

                while (i < 15):
                    pushqueries.append('\SET -args [7, 0,1,2011];')
                    pushqueries.append('\push;')
                    pushqueries.append('\SET -$join_day %s;' % i)
                    pushqueries.append('\push -$join_day %s;' % i)
                    pushqueries.append('\push -args [8,1,2,2011];')
                    pushqueries.append('select $join_day;')
                    pushqueries.append('\SET -$project "AB";')
                    pushqueries.append('\push;')
                    pushqueries.append('\push  -$project "CD";')
                    pushqueries.append('select  $project;')
                    pushqueries.append(
                        'prepare temp from select  tasks_points.task1 AS task from bucketname where join_day>=$join_day and  join_mo>$1 GROUP BY tasks_points.task1 HAVING COUNT(tasks_points.task1) > $2 AND  (MIN(join_day)=$3 OR MAX(join_yr=$4));'
                    )
                    pushqueries.append('execute temp;')
                    pushqueries.append('\set;')
                    i = i + 1
                    o = shell.execute_commands_inside(
                        '%s/cbq -quiet' % (self.path), '', pushqueries, '', '',
                        bucket.name, '')
                i = 1
                popqueries = []
                while (i < 10):
                    popqueries.append('\SET;')
                    popqueries.append('\pop;')
                    popqueries.append('\pop -args;')
                    popqueries.append('\pop -$join_day;')
                    popqueries.append('select $join_day;')
                    popqueries.append('\pop -$project;')
                    popqueries.append('\SET;')
                    popqueries.append(
                        'prepare temp from select tasks_points.task1 AS task from bucketname where join_day>=$join_day and  join_mo>$1 GROUP BY tasks_points.task1 HAVING COUNT(tasks_points.task1) > $2 AND  (MIN(join_day)=$3 OR MAX(join_yr=$4));'
                    )
                    popqueries.append('execute temp;')
                    i = i + 1
                    o = shell.execute_commands_inside(
                        '%s/cbq -quiet' % (self.path), '', popqueries, '', '',
                        bucket.name, '')
class QueriesUpgradeTests(QueryTests, NewUpgradeBaseTest):

    def setUp(self):
        super(QueriesUpgradeTests, self).setUp()
        if self._testMethodName == 'suite_setUp':
            return
        self.log.info("==============  QueriesUpgradeTests setup has started ==============")

        # general setup
        self.feature = self.input.param('feature', None)
        self.upgrade_type = self.input.param('upgrade_type', None)
        self._all_buckets_flush()
        self.load(self.gens_load, flag=self.item_flag)
        self.bucket_doc_map = {"default": 2016, "standard_bucket0": 2016}
        self.bucket_status_map = {"default": "healthy", "standard_bucket0": "healthy"}

        # feature specific setup
        if self.feature == "ansi-joins":
            self.rest.load_sample("travel-sample")
            self.bucket_doc_map["travel-sample"] = 31591
            self.bucket_status_map["travel-sample"] = "healthy"
        if self.feature == "backfill":
            self.directory_path = self.input.param("directory_path", "/opt/couchbase/var/lib/couchbase/tmp")
            self.create_directory = self.input.param("create_directory", True)
            self.tmp_size = self.input.param("tmp_size", 5120)
            self.nonint_size = self.input.param("nonint_size", False)
            self.out_of_range_size = self.input.param("out_of_range_size", False)
            self.set_backfill_directory = self.input.param("set_backfill_directory", True)
            self.change_directory = self.input.param("change_directory", False)
            self.reset_settings = self.input.param("reset_settings", False)
            self.curl_url = "http://%s:%s/settings/querySettings" % (self.master.ip, self.master.port)
        if self.feature == "xattrs":
            self.system_xattr_data = []
            self.user_xattr_data = []
            self.meta_ids = []
        if self.feature == "curl-whitelist":
            self.google_error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                                    "https://maps.googleapis.com/maps/api/geocode/json."
            self.jira_error_msg ="Errorevaluatingprojection.-cause:URLendpointisn'twhitelistedhttps://jira.atlassian." \
                                 "com/rest/api/latest/issue/JRA-9.PleasemakesuretowhitelisttheURLontheUI."
            self.cbqpath = '%scbq' % self.path + " -e %s:%s -q -u %s -p %s" \
                                                 % (self.master.ip, self.n1ql_port, self.rest.username, self.rest.password)
        if self.feature == "auditing":
            self.audit_codes = [28672, 28673, 28674, 28675, 28676, 28677, 28678, 28679, 28680, 28681,
                                28682, 28683, 28684, 28685, 28686, 28687, 28688]
            self.unauditedID = self.input.param("unauditedID", "")
            self.audit_url = "http://%s:%s/settings/audit" % (self.master.ip, self.master.port)
            self.filter = self.input.param("filter", False)
        self.log.info("==============  QueriesUpgradeTests setup has completed ==============")

    def suite_setUp(self):
        super(QueriesUpgradeTests, self).suite_setUp()
        self.log.info("==============  QueriesUpgradeTests suite_setup has started ==============")
        self.log.info("==============  QueriesUpgradeTests suite_setup has completed ==============")

    def tearDown(self):
        self.log.info("==============  QueriesUpgradeTests tearDown has started ==============")
        self.upgrade_servers = self.servers
        self.log.info("==============  QueriesUpgradeTests tearDown has completed ==============")
        super(QueriesUpgradeTests, self).tearDown()

    def suite_tearDown(self):
        self.log.info("==============  QueriesUpgradeTests suite_tearDown has started ==============")
        self.log.info("==============  QueriesUpgradeTests suite_tearDown has completed ==============")
        super(QueriesUpgradeTests, self).suite_tearDown()

    # old test
    def test_mixed_cluster(self):
        self._kill_all_processes_cbq()
        self.assertTrue(len(self.servers) > 1, 'Test needs more than 1 server')
        method_name = self.input.param('to_run', 'test_all_negative')
        self._install(self.servers[:2])
        self.bucket_size = 100
        self._bucket_creation()
        self.load(self.gens_load, flag=self.item_flag)
        upgrade_threads = self._async_update(self.upgrade_versions[0], [self.servers[1]], None, True)
        for upgrade_thread in upgrade_threads:
            upgrade_thread.join()
        self.cluster.rebalance(self.servers[:1], self.servers[1:2], [])
        self.shell = RemoteMachineShellConnection(self.servers[1])
        self._kill_all_processes_cbq()
        self._start_command_line_query(self.servers[1])
        self.shell.execute_command("ps -aef| grep cbq-engine")
        self.master = self.servers[1]
        getattr(self, method_name)()
        for th in threading.enumerate():
            th._Thread__stop() if th != threading.current_thread() else None

    # old test
    def test_upgrade_old(self):
        self._kill_all_processes_cbq()
        method_name = self.input.param('to_run', 'test_any')
        self._install(self.servers[:2])
        self.bucket_size = 100
        self._bucket_creation()
        self.load(self.gens_load, flag=self.item_flag)
        self.cluster.rebalance(self.servers[:1], self.servers[1:2], [])
        upgrade_threads = self._async_update(self.upgrade_versions[0], self.servers[:2])
        for upgrade_thread in upgrade_threads:
            upgrade_thread.join()
        self._kill_all_processes_cbq()
        self._start_command_line_query(self.master)
        self.create_primary_index_for_3_0_and_greater()
        getattr(self, method_name)()
        for th in threading.enumerate():
            th._Thread__stop() if th != threading.current_thread() else None

    def test_upgrade(self):
        """
        Upgrade Test.
        1) Run pre-upgrade feature test
        2) Upgrade a single node
        3) Run mixed-mode feature test on upgraded node
        4) Upgrade the remaining nodes
        5) Run post-upgrade feature test
        """

        # Perform pre_upgrade operations on cluster
        # install version defined in self.initial_version (newupgradebasetest)
        self.log.info("Begin n1ql upgrade test: {0}".format(self.upgrade_type))
        self.log_config_info()
        self.wait_for_buckets_status(self.bucket_status_map, 5, 120)
        self.wait_for_bucket_docs(self.bucket_doc_map, 5, 120)
        self.log_config_info()
        self.wait_for_all_indexes_online()
        self.ensure_primary_indexes_exist()
        self.wait_for_all_indexes_online()

        self.log.info("UPGRADE_VERSIONS = " + str(self.upgrade_versions))
        # run pre upgrade test
        self.log.info("running pre upgrade test")
        self.run_upgrade_test_for_feature(self.feature, "pre-upgrade")
        self.log.info("completed pre upgrade test")

        # take 1 server to upgrade to test mixed mode scenarios
        mixed_servers = [server for server in self.servers[:1]]  # first server to upgrade for mixed mode tests
        remaining_servers = [server for server in self.servers[1:]]  # the rest of the servers

        # set this to have initial version reinstalled in tearDown
        self.upgrade_servers = [server for server in self.servers]

        # upgrade mixed mode servers
        self.log.info("upgrading servers for mixed mode")

        if self.upgrade_type == "offline":
            # stop server, upgrade, rebalance
            self.offline_upgrade(mixed_servers)

        if self.upgrade_type == "online":
            # rebalance out, upgrade, rebalance in
            self.online_upgrade(mixed_servers)

        if self.upgrade_type == "online_with_swap_rebalance":
            # 4 servers, only 3 servers in cluster, 1 will be used to do the swap rebalance
            # rebalance out initial node, upgrade swap rebalance in
            rebalance = self.cluster.async_rebalance(remaining_servers, [], mixed_servers)
            rebalance.result()

            self.online_upgrade_with_swap_rebalance(mixed_servers[0], [remaining_servers[0]])

        if self.upgrade_type == "online_with_failover":
            # graceful failover, upgrade, full recovery
            self.master = remaining_servers[0]
            self.online_upgrade_with_failover(mixed_servers)

        # set master to the upgraded server for mixed mode tests, run_cbq_query executes against master
        self.master = mixed_servers[0]
        self.log.info("upgraded {0} servers: {1}".format(str(len(mixed_servers)), str(mixed_servers)))
        self.log.info("cluster is now in mixed mode")

        self.log_config_info()
        self.wait_for_buckets_status(self.bucket_status_map, 5, 120)
        self.wait_for_bucket_docs(self.bucket_doc_map, 5, 120)
        self.wait_for_all_indexes_online()
        self.log_config_info()

        # run mixed mode test
        self.log.info("running mixed mode test")
        self.run_upgrade_test_for_feature(self.feature, "mixed-mode")
        self.log.info("completed mixed mode test")

        # upgrade remaining servers
        self.log.info("upgrading remaining servers")

        if self.upgrade_type == "offline":
            # stop server, upgrade, rebalance in
            self.offline_upgrade(remaining_servers)

        if self.upgrade_type == "online":
            # rebalance out, upgrade, rebalance in
            self.online_upgrade(remaining_servers)

        if self.upgrade_type == "online_with_swap_rebalance":
            # rebalance out initial node, upgrade swap rebalance in
            # mixed server is upgraded and remaining_server[0] is out of cluster
            self.online_upgrade_with_swap_rebalance(remaining_servers[0], remaining_servers[1:])

        if self.upgrade_type == "online_with_failover":
            # graceful failover, upgrade, full recovery
            self.online_upgrade_with_failover(remaining_servers)

        self.log.info("successfully upgraded {0} remaining servers: {1}".format(str(len(remaining_servers)), str(remaining_servers)))

        self.log_config_info()
        self.wait_for_buckets_status(self.bucket_status_map, 5, 120)
        self.wait_for_bucket_docs(self.bucket_doc_map, 5, 120)
        self.wait_for_all_indexes_online()
        self.log_config_info()

        # run post upgrade test
        self.ensure_primary_indexes_exist()
        self.log.info("running post upgrade test")
        self.run_upgrade_test_for_feature(self.feature, "post-upgrade")
        self.log.info("completed post upgrade test")

    def stop_cb_servers(self, server_list):
        for server in server_list:
            remote = RemoteMachineShellConnection(server)
            remote.stop_server()
            remote.disconnect()

    def offline_upgrade(self, servers=[]):
        # stop server, upgrade, rebalance in
        self.stop_cb_servers(servers)
        upgrade_threads = self._async_update(self.upgrade_versions[0], servers)
        for upgrade_thread in upgrade_threads:
            upgrade_thread.join()

    def online_upgrade(self, upgrade_servers=[]):
        self.log.info("online upgrade servers: {0}".format(str(upgrade_servers)))
        for server in upgrade_servers:
            self.log.info("upgrading: {0}".format(str(server)))
            participating_servers = [s for s in self.servers]
            participating_servers.remove(server)
            self.log.info("participating servers: {0}".format(str(participating_servers)))
            rebalance = self.cluster.async_rebalance(participating_servers, [], [server])
            rebalance.result()
            upgrade_th = self._async_update(self.upgrade_versions[0], [server])
            for th in upgrade_th:
                th.join()
            rebalance = self.cluster.async_rebalance(participating_servers,
                                                     [server], [],
                                                     services=['kv,n1ql,index'])
            rebalance.result()

    def online_upgrade_with_failover(self, upgrade_servers):
        self.log.info("online upgrade servers: {0}".format(str(upgrade_servers)))
        for server in upgrade_servers:
            self.log.info("upgrading: {0}".format(str(server)))
            participating_servers = [s for s in self.servers]
            failover_task = self.cluster.async_failover([self.master], failover_nodes=[server], graceful=False)
            failover_task.result()
            upgrade_th = self._async_update(self.upgrade_versions[0], [server])
            for th in upgrade_th:
                th.join()
            rest = RestConnection(self.master)
            nodes_all = rest.node_statuses()
            for cluster_node in nodes_all:
                if cluster_node.ip == server.ip:
                    rest.add_back_node(cluster_node.id)
                    rest.set_recovery_type(otpNode=cluster_node.id, recoveryType="full")
            participating_servers.remove(server)
            self.log.info("participating servers: {0}".format(str(participating_servers)))
            rebalance = self.cluster.async_rebalance(participating_servers, [], [])
            rebalance.result()

    def online_upgrade_with_swap_rebalance(self, out_server, upgrade_servers):
        self.log.info("online upgrade servers: {0}".format(str(upgrade_servers)))
        for server in upgrade_servers:
            self.log.info("upgrading: {0}".format(str(out_server)))
            participating_servers = [s for s in self.servers]
            participating_servers.remove(out_server)
            participating_servers.remove(server)
            self.log.info("participating servers: {0}".format(str(participating_servers)))
            upgrade_th = self._async_update(self.upgrade_versions[0], [out_server])
            for th in upgrade_th:
                th.join()
            rebalance = self.cluster.async_rebalance(participating_servers,
                                                     [out_server], [server],
                                                     services=["kv,index,n1ql"])
            rebalance.result()
            out_server = server

    def run_upgrade_test_for_feature(self, feature, phase):
        if feature == "ansi-joins":
            self.run_ansi_join_upgrade_test(phase)
        elif feature == "xattrs":
            self.run_xattrs_upgrade_test(phase)
        elif feature == "auditing":
            self.run_auditing_upgrade_test(phase)
        elif feature == "backfill":
            self.run_backfill_upgrade_test(phase)
        elif feature == "curl-whitelist":
            self.run_curl_whitelist_upgrade_test(phase)
        else:
            self.fail("FAIL: feature {0} not found".format(feature))

    def run_ansi_join_upgrade_test(self, phase):
        if phase == "pre-upgrade":
            self.log.info("running pre-upgrade test for ansi joins")
        elif phase == "mixed-mode":
            self.log.info("running mixed-mode test for ansi joins")
        elif phase == "post-upgrade":
            self.run_test_basic_join()
        else:
            self.fail("FAIL: (ansi-join) invalid phase: {0}".format(phase))

    def run_xattrs_upgrade_test(self, phase):
        if phase == "pre-upgrade":
            self.log.info("running pre-upgrade test for xattrs")
        elif phase == "mixed-mode":
            self.log.info("running mixed-mode test for xattrs")
        elif phase == "post-upgrade":
            self.log.info("running post-upgrade test for xattrs")
            self.run_test_system_xattr_composite_secondary_index()
        else:
            self.fail("FAIL: (xattrs) invalid phase: {0}".format(phase))

    def run_auditing_upgrade_test(self, phase):
        if phase == "pre-upgrade":
            self.log.info("running pre-upgrade test for auditing")
        elif phase == "mixed-mode":
            self.log.info("running mixed-mode test for auditing")
        elif phase == "post-upgrade":
            self.log.info("running post-upgrade test for auditing")
            self.set_audit()
            self.eventID = 28676
            self.op_type = "insert"
            self.run_test_queryEvents()
            self.op_type = "select"
            self.unauditedID = 28678
            self.eventID = 28672
            self.filter = True
            self.run_test_queryEvents()
        else:
            self.fail("FAIL: (auditing) invalid phase: {0}".format(phase))

    def run_backfill_upgrade_test(self, phase):
        if phase == "pre-upgrade":
            self.log.info("running pre-upgrade test for backfill")
        elif phase == "mixed-mode":
            self.log.info("running mixed-mode test for backfill")
        elif phase == "post-upgrade":
            self.log.info("running post-upgrade test for backfill")
            self.reset_settings = True
            self.run_test_backfill()
            self.reset_settings = False
            self.directory_path = "/opt/couchbase/var/lib/couchbase/testing"
            self.change_directory = True
            self.run_test_backfill()
        else:
            self.fail("FAIL: (backfill) invalid phase: {0}".format(phase))

    def run_curl_whitelist_upgrade_test(self, phase):
        if phase == "pre-upgrade":
            self.log.info("running pre-upgrade test for curl whitelist")
        elif phase == "mixed-mode":
            self.log.info("running mixed-mode test for curl whitelist")
        elif phase == "post-upgrade":
            self.log.info("running post-upgrade test for curl whitelist")
            self.run_test_all_access_true()
            self.run_test_allowed_url()
            self.run_test_disallowed_url()
        else:
            self.fail("FAIL: (curl whitelist) invalid phase: {0}".format(phase))

    ###############################
    #
    # ANSI Joins Tests
    #
    ###############################

    # test_basic_join in tuq_ansi_joins.py
    def run_test_basic_join(self):
        idx_list = []
        queries_to_run = []
        index = "CREATE INDEX idx1 on `travel-sample`(id)"
        idx_list.append((index, ("`travel-sample`", "idx1")))
        query = "select * from default d1 INNER JOIN `travel-sample` t on (d1.join_day == t.id)"
        queries_to_run.append((query, 288)) # 288 for doc-per-day=1
        self.run_common_body(index_list=idx_list, queries_to_run=queries_to_run)

    ###############################
    #
    # Curl Whitelist Tests
    #
    ###############################

    # test_all_access_true from tuq_curl_whitelist.py
    def run_test_all_access_true(self):
        self.rest.create_whitelist(self.master, {"all_access": True,
                                                 "allowed_urls": ["blahahahahaha"], "disallowed_urls": ["fake"]})
        curl_output = self.shell.execute_command("%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9"
                                                 % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl(" + url + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        self.rest.create_whitelist(self.master, {"all_access": True,
                                                 "allowed_urls": None,
                                                 "disallowed_urls": None})
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    # test_allowed_url from tuq_curl_whitelist.py
    def run_test_allowed_url(self):
        self.rest.create_whitelist(self.master, {"all_access": False, "allowed_urls": ["https://maps.googleapis.com"]})

        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(self.jira_error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],self.jira_error_msg))

        curl_output = self.shell.execute_command("%s --get https://maps.googleapis.com/maps/api/geocode/json "
                                                 "-d 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'"
                                                 % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options= "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query="select curl("+ url +", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    # test_disallowed_url from tuq_curl_whitelist.py
    def run_test_disallowed_url(self):
        self.rest.create_whitelist(self.master, {"all_access": False, "disallowed_urls": ["https://maps.googleapis.com"]})

        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(self.jira_error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'], self.jira_error_msg))

        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options= "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query="select curl("+ url +", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(self.google_error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'], self.google_error_msg))

    ###############################
    #
    # Backfill Tests
    #
    ###############################

    # test_backfill from tuq_n1ql_backfill.py
    def run_test_backfill(self):
        if self.reset_settings:
            self.set_directory()
            self.set_tmpspace()
        try:
            if self.change_directory:
                self.set_directory()

            self.run_cbq_query(query="CREATE INDEX join_day on standard_bucket0(join_day)")
            for bucket in self.buckets:
                if bucket.name == 'standard_bucket0':
                    self._wait_for_index_online(bucket, 'join_day')

            thread1 = threading.Thread(name='monitor_backfill', target=self.monitor_backfill)
            thread1.setDaemon(True)
            thread2 = threading.Thread(name='execute_query', target=self.execute_query)
            thread1.start()
            thread2.start()
            thread2.join()

            if thread1.isAlive():
                self.assertTrue(False, "The backfill thread never registered any files")
            else:
                self.log.info("The backfill directory was being used during the query")
                self.assertTrue(True)
        finally:
            self.run_cbq_query(query="DROP INDEX standard_bucket0.join_day")

    def set_directory(self):
        # Try to create directory if it doesn't exist because backfill directories have to be created manually
        if self.create_directory:
            self.shell.create_directory(self.directory_path)
            self.shell.execute_command("chmod 777 %s" % self.directory_path)

        curl_output = self.shell.execute_command("%s -u Administrator:password -X POST -d 'queryTmpSpaceDir=%s' %s"
                                                 % (self.curl_path, self.directory_path, self.curl_url))
        expected_curl = self.convert_list_to_json(curl_output[0])
        self.log.info(expected_curl)
        return expected_curl

    def set_tmpspace(self):
        curl_output = self.shell.execute_command("%s -u Administrator:password -X POST -d 'queryTmpSpaceSize=%s' %s"
                                                 % (self.curl_path,  self.tmp_size, self.curl_url))
        expected_curl = self.convert_list_to_json(curl_output[0])
        self.log.info(expected_curl)
        return expected_curl

    def monitor_backfill(self):
        sftp = self.shell._ssh_client.open_sftp()
        no_backfill = True
        while no_backfill:
            if sftp.listdir(self.directory_path):
                no_backfill = False
        self.log.info("backfill is being used")
        return

    def execute_query(self):
        actual_results = self.run_cbq_query(query="select * from default d JOIN standard_bucket0 s on "
                                                  "(d.join_day == s.join_day)")
        return actual_results

    ###############################
    #
    # Auditing Tests
    #
    ###############################

    def run_test_queryEvents(self):
        # required for local testing: uncomment below
        self.ipAddress = self.master.ip
        #self.ipAddress = self.getLocalIPAddress()
        # self.eventID = self.input.param('id', None)
        auditTemp = audit(host=self.master)
        currentState = auditTemp.getAuditStatus()
        self.log.info("Current status of audit on ip - {0} is {1}".format(self.master.ip, currentState))
        if not currentState:
            self.log.info("Enabling Audit ")
            auditTemp.setAuditEnable('true')
            self.sleep(30)
        rest = RestConnection(self.master)
        self.setupLDAPSettings(rest)
        query_type = self.op_type
        user = self.master.rest_username
        source = 'ns_server'
        if query_type == 'select':
            if self.filter:
                self.execute_filtered_query()
            self.run_cbq_query(server=self.master, query="SELECT * FROM default LIMIT 100")
            expectedResults = {'node':'%s:%s' % (self.master.ip, self.master.port), 'status': 'success', 'isAdHoc': True,
                               'name': 'SELECT statement', 'real_userid': {'source': source, 'user': user},
                               'statement': 'SELECT * FROM default LIMIT 100',
                               'userAgent': 'Python-httplib2/$Rev: 259 $', 'id': self.eventID,
                               'description': 'A N1QL SELECT statement was executed'}
        elif query_type == 'insert':
            if self.filter:
                self.execute_filtered_query()
            self.run_cbq_query(server=self.master, query='INSERT INTO default ( KEY, VALUE ) VALUES ("1",{ "order_id": "1", "type": '
                                     '"order", "customer_id":"24601", "total_price": 30.3, "lineitems": '
                                     '[ "11", "12", "13" ] })')
            expectedResults = {'node': '%s:%s' % (self.master.ip, self.master.port), 'status': 'success', 'isAdHoc': True,
                               'name': 'INSERT statement', 'real_userid': {'source': source, 'user': user},
                               'statement': 'INSERT INTO default ( KEY, VALUE ) VALUES ("1",{ "order_id": "1", "type": '
                                            '"order", "customer_id":"24601", "total_price": 30.3, "lineitems": '
                                            '[ "11", "12", "13" ] })',
                               'userAgent': 'Python-httplib2/$Rev: 259 $', 'id': self.eventID,
                               'description': 'A N1QL INSERT statement was executed'}

        if query_type == 'delete':
            self.checkConfig(self.eventID, self.servers[1], expectedResults, n1ql_audit=True)
            if self.filter:
                self.checkFilter(self.unauditedID, self.servers[1])
        else:
            self.checkConfig(self.eventID, self.master, expectedResults, n1ql_audit=True)
            if self.filter:
                self.checkFilter(self.unauditedID, self.master)

    def getLocalIPAddress(self):
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        s.connect(('couchbase.com', 0))
        return s.getsockname()[0]

    def setupLDAPSettings (self, rest):
        api = rest.baseUrl + 'settings/saslauthdAuth'
        params = urllib.urlencode({"enabled":'true',"admins":[],"roAdmins":[]})
        status, content, header = rest._http_request(api, 'POST', params)
        return status, content, header

    def set_audit(self, set_disabled=False, disable_user=False):
        if set_disabled:
            curl_output = self.shell.execute_command("%s -u Administrator:password -X POST -d 'auditdEnabled=%s;disabled=%s' %s"
                                                     % (self.curl_path, 'true', ','.join(map(str, self.audit_codes)), self.audit_url))
        elif disable_user:
            curl_output = self.shell.execute_command("%s -u Administrator:password -X POST -d 'auditdEnabled=%s;disabledUsers=%s' %s"
                                                     % (self.curl_path, 'true', 'no_select/local', self.audit_url))
        else:
            curl_output = self.shell.execute_command("%s -u Administrator:password -X POST -d 'auditdEnabled=%s;disabled=' %s"
                                                     % (self.curl_path, 'true', self.audit_url))
        if "errors" in str(curl_output):
            self.log.error("Auditing settings were not set correctly")
        self.sleep(10)

    def execute_filtered_query(self):
        self.audit_codes.remove(self.eventID)
        self.set_audit(set_disabled=True)
        self.run_cbq_query(query="delete from default limit 1")

    def checkConfig(self, eventID, host, expectedResults, n1ql_audit=False):
        Audit = audit(eventID=self.eventID, host=host)
        fieldVerification, valueVerification = Audit.validateEvents(expectedResults, n1ql_audit)
        self.assertTrue(fieldVerification, "One of the fields is not matching")
        self.assertTrue(valueVerification, "Values for one of the fields is not matching")

    def checkFilter(self, eventID, host):
        Audit = audit(eventID=eventID, host=host)
        exists, entry = Audit.validateEmpty()
        self.assertTrue(exists, "There was an audit entry found. Audits for the code %s should not be logged. Here is the entry: %s" % (eventID, entry))

    ###############################
    #
    # XAttrs Tests
    #
    ###############################

    def run_test_system_xattr_composite_secondary_index(self):
        #self.reload_data()
        self.fail_if_no_buckets()
        self.system_xattr_data = self.create_xattr_data(type='system')
        # full path query non-leading
        index_statement = "CREATE INDEX idx1 ON default(meta().id, meta().xattrs._system1, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system1 FROM default where meta().id is not missing"
        self.run_xattrs_query(query, index_statement, '_system1', 'idx1', 'default', xattr_data=self.system_xattr_data)

        # full path query non-leading
        index_statement = "CREATE INDEX idx2 ON default(meta().id, meta().xattrs._system2, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system2 FROM default where meta().id is not missing"
        self.run_xattrs_query(query, index_statement, '_system2', 'idx2', 'default', xattr_data=self.system_xattr_data)

        # full path query non-leading
        index_statement = "CREATE INDEX idx3 ON default(meta().id, meta().xattrs._system3, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3 FROM default where meta().id is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx3', 'default', xattr_data=self.system_xattr_data)

        # partial path query non-leading
        index_statement = "CREATE INDEX idx4 ON default(meta().id, meta().xattrs._system3.field1, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3.field1 FROM default where meta().id is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx4', 'default', xattr_data=self.system_xattr_data)

        # nested partial path query non-leading
        index_statement = "CREATE INDEX idx5 ON default(meta().id, meta().xattrs._system3.field1.sub_field1a, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3.field1.sub_field1a FROM default where meta().id is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx5', 'default', xattr_data=self.system_xattr_data)

        # multiple paths single xattr query non-leading
        index_statement = "CREATE INDEX idx6 ON default(meta().id, meta().xattrs._system3, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3, meta().xattrs._system3.field1, meta().xattrs._system3.field1.sub_field1a FROM default where meta().id is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx6', 'default', xattr_data=self.system_xattr_data)

        # deleted doc xattr query non-leading
        index_statement = "CREATE INDEX idx7 ON default(meta().id, meta().xattrs._system3, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3 FROM default where meta().id is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx7', 'default', xattr_data=self.system_xattr_data, deleted_compare=True)

        # partial index non-leading
        index_statement = "CREATE INDEX idx8 ON default(meta().id, meta().xattrs._system3, join_day) where meta().xattrs._system3.field1.sub_field1a > 0 USING " + self.index_type
        query = "SELECT meta().xattrs._system3 FROM default where meta().id is not missing and meta().xattrs._system3.field1.sub_field1a > 0"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx8', 'default', xattr_data=self.system_xattr_data)

        # functional index non-leading
        index_statement = "CREATE INDEX idx9 ON default(meta().id, ABS(meta().xattrs._system3.field1.sub_field1a) + 2, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3 FROM default where meta().id is not missing and ABS(meta().xattrs._system3.field1.sub_field1a) + 2 > 0"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx9', 'default', xattr_data=self.system_xattr_data)

        # full path query leading
        index_statement = "CREATE INDEX idx10 ON default(meta().xattrs._system1, meta().id, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system1 FROM default where meta().xattrs._system1 is not missing"
        self.run_xattrs_query(query, index_statement, '_system1', 'idx10', 'default', xattr_data=self.system_xattr_data)

        # full path query leading
        index_statement = "CREATE INDEX idx11 ON default(meta().xattrs._system2, meta().id, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system2 FROM default where meta().xattrs._system2 is not missing"
        self.run_xattrs_query(query, index_statement, '_system2', 'idx11', 'default', xattr_data=self.system_xattr_data)

        # full path query leading
        index_statement = "CREATE INDEX idx12 ON default(meta().xattrs._system3, meta().id, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3 FROM default where meta().xattrs._system3 is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx12', 'default', xattr_data=self.system_xattr_data)

        # partial path query leading
        index_statement = "CREATE INDEX idx13 ON default(meta().xattrs._system3.field1, meta().id, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3.field1 FROM default where meta().xattrs._system3.field1 is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx13', 'default', xattr_data=self.system_xattr_data)

        # nested partial path query leading
        index_statement = "CREATE INDEX idx14 ON default(meta().xattrs._system3.field1.sub_field1a, meta().id, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3.field1.sub_field1a FROM default where meta().xattrs._system3.field1.sub_field1a is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx14', 'default', xattr_data=self.system_xattr_data)

        # multiple paths single xattr query leading
        index_statement = "CREATE INDEX idx15 ON default(meta().xattrs._system3.field1.sub_field1a, meta().id, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3, meta().xattrs._system3.field1, meta().xattrs._system3.field1.sub_field1a FROM default where meta().xattrs._system3.field1.sub_field1a is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx15', 'default', xattr_data=self.system_xattr_data)

        # deleted doc xattr query leading
        index_statement = "CREATE INDEX idx16 ON default(meta().xattrs._system3, meta().id, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3 FROM default where meta().xattrs._system3 is not missing"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx16', 'default', xattr_data=self.system_xattr_data, deleted_compare=True)

        # partial index leading
        index_statement = "CREATE INDEX idx17 ON default(meta().xattrs._system3, meta().id, join_day) where meta().xattrs._system3.field1.sub_field1a > 0 USING " + self.index_type
        query = "SELECT meta().xattrs._system3 FROM default where meta().xattrs._system3 is not missing and meta().xattrs._system3.field1.sub_field1a > 0"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx17', 'default', xattr_data=self.system_xattr_data)

        # functional index leading
        index_statement = "CREATE INDEX idx18 ON default(ABS(meta().xattrs._system3.field1.sub_field1a) + 2, meta().id, join_day) USING " + self.index_type
        query = "SELECT meta().xattrs._system3 FROM default where ABS(meta().xattrs._system3.field1.sub_field1a) + 2 > 0"
        self.run_xattrs_query(query, index_statement, '_system3', 'idx18', 'default', xattr_data=self.system_xattr_data)

    def reload_data(self):
        self._all_buckets_delete(self.master)
        self._bucket_creation()
        self.gens_load = self.gen_docs(self.docs_per_day)
        self.load(self.gens_load, batch_size=1000, flag=self.item_flag)
        self.create_primary_index_for_3_0_and_greater()

    def create_xattr_data(self, type="system"):
        cluster = Cluster('couchbase://'+str(self.master.ip))
        authenticator = PasswordAuthenticator(self.username, self.password)
        cluster.authenticate(authenticator)
        cb = cluster.open_bucket('default')
        docs = self.get_meta_ids()
        self.log.info("Docs: " + str(docs[0:5]))
        xattr_data = []
        self.log.info("Adding xattrs to data")
        val = 0
        for doc in docs:
            if type == "system":
                rv = cb.mutate_in(doc["id"], SD.upsert('_system1', val, xattr=True, create_parents=True))
                xattr_data.append({'_system1': val})
                rv = cb.mutate_in(doc["id"], SD.upsert('_system2', {'field1': val, 'field2': val*val}, xattr=True, create_parents=True))
                xattr_data.append({'_system2': {'field1': val, 'field2': val*val}})
                rv = cb.mutate_in(doc["id"], SD.upsert('_system3', {'field1': {'sub_field1a': val, 'sub_field1b': val*val}, 'field2': {'sub_field2a': 2*val, 'sub_field2b': 2*val*val}}, xattr=True, create_parents=True))
                xattr_data.append({'_system3': {'field1': {'sub_field1a': val, 'sub_field1b': val*val}, 'field2': {'sub_field2a': 2*val, 'sub_field2b': 2*val*val}}})
            if type == "user":
                rv = cb.mutate_in(doc["id"], SD.upsert('user1', val, xattr=True, create_parents=True))
                xattr_data.append({'user1': val})
                rv = cb.mutate_in(doc["id"], SD.upsert('user2', {'field1': val, 'field2': val*val}, xattr=True, create_parents=True))
                xattr_data.append({'user2': {'field1': val, 'field2': val*val}})
                rv = cb.mutate_in(doc["id"], SD.upsert('user3', {'field1': {'sub_field1a': val, 'sub_field1b': val*val}, 'field2': {'sub_field2a': 2*val, 'sub_field2b': 2*val*val}}, xattr=True, create_parents=True))
                xattr_data.append({'user3': {'field1': {'sub_field1a': val, 'sub_field1b': val*val}, 'field2': {'sub_field2a': 2*val, 'sub_field2b': 2*val*val}}})
            val = val + 1

        self.log.info("Completed adding " + type + "xattrs to data to " + str(val) + " docs")
        return xattr_data

    def get_meta_ids(self):
        return self.get_values_for_compare('meta().id')

    def get_values_for_compare(self, field):
        query_response = self.run_cbq_query("SELECT " + field + " FROM default")
        docs = sorted(query_response['results'])
        return docs

    def run_xattrs_query(self, query, index_statement, xattr_name, index_name, bucket_name, xattr_data=[], compare_fields=[], primary_compare=True, deleted_compare=False, with_retain=False, xattr_type="system", with_aggs=False, delete_leading=False):
        if index_statement != "":
            self.run_cbq_query(index_statement)
            self._wait_for_index_online(bucket_name, index_name)
            query_response = self.run_cbq_query("EXPLAIN " + query)
            self.assertTrue(index_name in str(query_response['results'][0]))
            if with_aggs:
                self.assertTrue("index_group_aggs" in str(query_response['results'][0]))

        query_response = self.run_cbq_query(query)
        docs_1 = query_response['results']
        self.log.info("XAttrs: " + str(docs_1[0:5]))
        compare = []

        if primary_compare:
            temp_query = query.split("FROM " + bucket_name)
            compare_query = temp_query[0] + "FROM " + bucket_name + ' use index(`#primary`)' + temp_query[1]
            compare_response = self.run_cbq_query(compare_query)
            compare = compare_response['results']
        else:
            if len(compare_fields) == 0:
                compare = [xattr for xattr in xattr_data if xattr_name in xattr]
            elif len(compare_fields) == 1:
                compare = [{compare_fields[0]: xattr[xattr_name][compare_fields[0]]} for xattr in xattr_data if xattr_name in xattr]
            elif len(compare_fields) == 2:
                compare = [{compare_fields[1]: xattr[xattr_name][compare_fields[0]][compare_fields[1]]} for xattr in xattr_data if xattr_name in xattr]

        # Virtual xattrs cant be compared in the way the rest of the stuff is compared because it is autogenerated by CB
        # ,therefore we do different checks and then return the results of the query passed in instead
        if xattr_type == "virtual":
            for docs in docs_1:
                if not compare_fields:
                    self.assertTrue('$document' in str(docs) and 'CAS' in str(docs) and 'datatype' in str(docs) and
                                    'deleted' in str(docs) and 'exptime' in str(docs) and 'flags' in str(docs) and
                                    'last_modified' in str(docs) and 'seqno' in str(docs) and
                                    'value_bytes' in str(docs) and 'vbucket_uuid' in str(docs))
                else:
                    self.assertTrue(docs == {"deleted": False})
        else:
            self.log.info("Compare: " + str(compare[0:5]))
            self.assertTrue(sorted(docs_1) == sorted(compare))

        if deleted_compare:
            meta_ids = self.get_meta_ids()
            delete_ids = meta_ids[0:10]
            for id in delete_ids:
                query_response = self.run_cbq_query('DELETE FROM default USE KEYS[\"' + id['id'] + '\"] returning meta().id')
                self.log.info(query_response['results'])
                self.assertTrue(query_response['results'][0]["id"] == id["id"])

            new_meta_ids = self.get_meta_ids()
            for new_id in new_meta_ids:
                self.assertTrue(new_id not in delete_ids)

            query_response = self.run_cbq_query("EXPLAIN " + query)
            self.assertTrue(index_name in str(query_response['results'][0]))

            query_response_2 = self.run_cbq_query(query)
            docs_2 = query_response_2['results']
            self.log.info("XAttrs: " + str(docs_2[0:5]))

            temp_query = query.split("FROM " + bucket_name)
            compare_query = temp_query[0] + "FROM " + bucket_name + ' use index(`#primary`)' + temp_query[1]

            compare_response = self.run_cbq_query(compare_query)
            compare_docs = compare_response['results']

            self.log.info("Compare: " + str(compare_docs[0:5]))

            if with_retain and not xattr_type == 'user':
                self.assertTrue(len(docs_1)-10 == len(compare_docs))
                if delete_leading:
                    self.assertTrue(len(docs_2) == len(compare_docs))
                    self.assertTrue(sorted(docs_2) == sorted(compare_docs))
                else:
                    self.assertTrue(len(docs_2)-10 == len(compare_docs))
                    self.assertTrue(sorted(docs_1) == sorted(docs_2))
            else:
                self.assertTrue(sorted(docs_2) == sorted(compare_docs))
                if not with_aggs:
                    self.assertTrue(len(docs_1)-10 == len(docs_2))

        if index_statement != "":
            self.run_cbq_query("DROP INDEX default." + index_name)

        if deleted_compare and with_retain:
            self.run_cbq_query(index_statement)
            self._wait_for_index_online(bucket_name, index_name)

            query_response = self.run_cbq_query("EXPLAIN " + query)
            self.assertTrue(index_name in str(query_response['results'][0]))

            query_response_1 = self.run_cbq_query(query)
            docs_3 = query_response_1['results']

            if delete_leading or xattr_type == 'user':
                self.assertTrue(len(docs_3) == len(compare_docs))
                self.assertTrue(sorted(docs_3) == sorted(compare_docs))
            else:
                self.assertTrue(len(docs_3)-10 == len(compare_docs))
                self.assertTrue(sorted(docs_2) == sorted(docs_3))

            self.run_cbq_query("DROP INDEX default." + index_name)

            self.reload_data()
            self.system_xattr_data = self.create_xattr_data(type=xattr_type)
        # Virtual xattrs cant be compared in the way the rest of the stuff is compared because it is autogenerated by CB
        # ,therefore we do different checks and then return the results of the query passed in instead
        if xattr_type == 'virtual':
            return docs_1
        else:
            return compare
Exemple #37
0
    def check_onesaslbucket_auth(self):
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            for bucket in self.buckets:
                try:
                    if (bucket.saslPassword != ''):
                        #sasl
                        o = shell.execute_commands_inside(
                            '%s/cbq -c %s:%s -q' %
                            (self.path, bucket.name, bucket.saslPassword),
                            'CREATE PRIMARY INDEX ON %s USING GSI' %
                            bucket.name, '', '', '', '', '')
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside(
                            '%s/cbq -c %s:%s -q' %
                            (self.path, bucket.name, bucket.saslPassword),
                            'select *,join_day from %s limit 10' % bucket.name,
                            '', '', '', '', '')
                        if self.analytics:
                            self.query = 'select join_day from %s limit 10' % bucket.name
                            o = self.run_cbq_query()
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside(
                            '%s/cbq -c %s:%s -q' %
                            (self.path, bucket.name, 'wrong'),
                            'select * from %s limit 10' % bucket.name, '', '',
                            '', '', '')
                        self.assertTrue("AuthorizationFailed" in o)

                        o = shell.execute_commands_inside(
                            '%s/cbq -c %s:%s -q' % (self.path, '', 'wrong'),
                            'select * from %s limit 10' % bucket.name, '', '',
                            '', '', '')
                        self.assertEqual('FAIL', o[7:])
                        o = shell.execute_commands_inside(
                            '%s/cbq -c %s:%s -q' %
                            (self.path, 'wrong', bucket.saslPassword),
                            'select * from %s limit 10' % bucket.name, '', '',
                            '', '', '')
                        self.assertTrue("AuthorizationFailed" in o)

                        queries = [
                            '\set -creds user:pass;',
                            'select *,join_day from bucketname limit 10;'
                        ]
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            bucket.name, bucket.saslPassword, bucket.name, '')
                        self.assertTrue("requestID" in o)
                        queries = [
                            '\set -creds user:pass;',
                            'select * from bucketname union all select * from default limit 10;'
                        ]
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            'Administrator', 'password', bucket.name, '')
                        self.assertTrue("requestID" in o)
                        queries = [
                            '\set -creds user:pass;',
                            'SELECT buck.email FROM  bucketname buck LEFT JOIN default on keys "query-testemployee10153.1877827-0";'
                        ]
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            'Administrator', 'password', bucket.name, '')
                        self.assertTrue("requestID" in o)
                        queries = [
                            '\set -creds user:pass;',
                            'SELECT buck.email FROM  bucketname buck LEFT JOIN default on keys "query-testemployee10153.1877827-0";'
                        ]
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            bucket.name, bucket.saslPassword, bucket.name, '')
                        self.assertTrue("requestID" in o)

                        queries = [
                            'select count(*) from bucketname  union all select count(*) from default;'
                        ]
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries, '',
                            bucket.saslPassword, bucket.name, '')
                        self.assertTrue("AuthorizationFailed" in o)

                        queries = [
                            '\set -creds user:pass;',
                            'select *,email,join_day from bucketname;'
                        ]
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            'Administrator', 'password', bucket.name, '')
                        self.assertTrue("requestID" in o)
                        queries = [
                            '\set -creds user:pass;',
                            'create primary index on default;',
                            'select email,join_day from bucketname union all select email,join_day from default;'
                        ]
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            bucket.name, bucket.saslPassword, bucket.name, '')
                        self.assertTrue("requestID" in o)

                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            'wrong', 'wrong', bucket.name, '')
                        self.assertTrue("AuthorizationFailed" in o)
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            'wrong', bucket.saslPassword, bucket.name, '')
                        self.assertTrue("AuthorizationFailed" in o)
                        o = shell.execute_commands_inside(
                            '%s/cbq -quiet' % (self.path), '', queries,
                            bucket.name, 'wrong', bucket.name, '')
                        self.assertTrue("AuthorizationFailed" in o)
                        o = shell.execute_commands_inside(
                            '%s/cbq -q -u=%s -p=%s' %
                            (self.path, 'Administrator', 'password'),
                            'select * from %s limit 10;' % bucket.name, '', '',
                            '', '', '')
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside(
                            '%s/cbq -q -u=%s -p=%s' %
                            (self.path, bucket.name, bucket.saslPassword),
                            'select * from %s limit 10;' % bucket.name, '', '',
                            '', '', '')
                        self.assertTrue("requestID" in o)
                        #nonsasl
                        o = shell.execute_commands_inside(
                            '%s/cbq -q -u %s -p %s' %
                            (self.path, 'Administrator', 'password'),
                            'select * from default limit 10;', '', '', '', '',
                            '')
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside(
                            '%s/cbq -q -u %s -p %s' %
                            (self.path, bucket.name, bucket.saslPassword),
                            'select * from default limit 10;', '', '', '', '',
                            '')
                        self.assertTrue("requestID" in o)
                        o = shell.execute_commands_inside(
                            '%s/cbq -q ' % (self.path),
                            'select * from default limit 10;', '', '', '', '',
                            '')
                        self.assertTrue("requestID" in o)
                        break

                finally:
                    shell.disconnect()
 def check_onesaslbucket_auth(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             try:
                 if bucket.saslPassword:
                     print('sasl')
                     o = shell.execute_commands_inside('%s/go_cbq -q -u %s -p %s' % (testconstants.LINUX_COUCHBASE_BIN_PATH,bucket.name,bucket.saslPassword),'select * from %s limit 10'%bucket.name,'','','','','')
                     print o
                     o = shell.execute_commands_inside('%s/go_cbq -q -u %s -p %s' % (testconstants.LINUX_COUCHBASE_BIN_PATH,bucket.name,'wrong'),'select * from %s limit 10'%bucket.name,'','','','','')
                     print o
                     self.assertTrue("AuthorizationFailed"  in o)
                     o = shell.execute_commands_inside('%s/go_cbq -q -u %s -p %s' % (testconstants.LINUX_COUCHBASE_BIN_PATH,'','wrong'),'select * from %s limit 10'%bucket.name,'','','','','')
                     self.assertEqual('FAIL',o[7:])
                     o = shell.execute_commands_inside('%s/go_cbq -q -u %s -p %s' % (testconstants.LINUX_COUCHBASE_BIN_PATH,'wrong',bucket.saslPassword),'select * from %s limit 10'%bucket.name,'','','','','')
                     print o
                     self.assertTrue("AuthorizationFailed"  in o)
                     queries = ['\set -creds user:pass;','select * from bucketname limit 10;']
                     o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,bucket.name,bucket.saslPassword,bucket.name,'' )
                     print o
                     o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'','',bucket.name,'' )
                     print o
                     o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'',bucket.saslPassword,bucket.name,''  )
                     print o
                     o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,bucket.name,'',bucket.name,'' )
                     print o
                     o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'wrong','wrong',bucket.name,'' )
                     print o
                     o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,'wrong',bucket.saslPassword,bucket.name,'' )
                     print o
                     o = shell.execute_commands_inside('%s/go_cbq -quiet' % (testconstants.LINUX_COUCHBASE_BIN_PATH),'',queries,bucket.name,'wrong',bucket.name,'' )
                     print o
                 else:
                     o = shell.execute_commands_inside('%s/go_cbq -q -u=%s -p=%s' % (testconstants.LINUX_COUCHBASE_BIN_PATH,'Admin',''),'select * from %s limit 10;' %bucket.name,'','','','','' )
                     print o
                     self.assertTrue("InvalidPassword in o");
             finally:
                 shell.disconnect()
    def test_history(self):
        for server in self.servers:
            shell = RemoteMachineShellConnection(server)
            type2 = shell.extract_remote_info().distribution_type
            queries = []
            queries2 = []
            queries3 = []
            queries5 = []
            queries6 = []
            for bucket in self.buckets:
                if type2.lower() == 'windows':
                    queries = ["\set histfile c:\\tmp\\history.txt;"]
                    queries2 = ["\Alias p c:\\tmp\\history2.txt;"]
                    queries3 = ["\set $a c:\\tmp\\history3.txt;"]
                    queries5 = ['\set $a "\\abcde";']
                    queries6 = ["\set $a '\\abcde';"]
                elif type2.lower() == "linux":
                    queries = ["\set histfile /tmp/history;"]
                    queries2 = ["\Alias p /tmp/history2;"]
                    queries3 = ["\set $a /tmp/history3;"]
                    queries5 = ['\set $a "/abcde";']
                    queries6 = ["\set $a /abcde;"]

                queries.extend([
                    '\ALIAS tempcommand create primary index on bucketname;',
                    '\\\\tempcommand;',
                    '\ALIAS tempcommand2 select * from bucketname limit 1;',
                    '\\\\tempcommand2;', '\ALIAS;', '\echo \\\\tempcommand;',
                    '\echo \\\\tempcommand2;', '\echo histfile;'
                ])
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', queries, '', '',
                    bucket.name, '')
                if type2.lower() == "linux":
                    self.assertTrue('/tmp/history' in o)

                queries2.extend([
                    "\set histfile \\\\p;", "\echo histfile;",
                    "\set histfile '\\\\p';", "\echo histfile;"
                ])
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', queries2, '', '',
                    bucket.name, '')

                if type2.lower() == "linux":
                    self.assertTrue('/tmp/history2' in o)
                    self.assertTrue('\\p' in o)

                queries3.extend(["\set histfile $a;", "\echo histfile;"])
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', queries3, '', '',
                    bucket.name, '')

                queries4 = [
                    "\push histfile newhistory.txt;", "\echo histfile;",
                    '\ALIAS tempcommand create primary index on bucketname;',
                    '\\\\tempcommand;',
                    '\ALIAS tempcommand2 select * from bucketname limit 1;',
                    '\\\\tempcommand2;', '\ALIAS;', '\echo \\\\tempcommand;',
                    '\echo \\\\tempcommand2;', '\echo histfile;'
                ]
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', queries4, '', '',
                    bucket.name, '')

                queries5.append("\echo $a;")
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', queries5, '', '',
                    bucket.name, '')

                queries6.append("\echo $a;")
                o = shell.execute_commands_inside(
                    '%s/cbq -quiet' % (self.path), '', queries6, '', '',
                    bucket.name, '')
 def test_connect_disconnect(self):
     for server in self.servers:
         shell = RemoteMachineShellConnection(server)
         for bucket in self.buckets:
             queries = [
                 '\connect http://localhost:8091;',
                 'create primary index on bucketname;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             queries = [
                 '\connect http://localhost:8091;',
                 'drop primary index on bucketname;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             # wrong disconnect
             queries = [
                 '\disconnect http://localhost:8091;',
                 'create primary index on bucketname;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             # o currently comes back as status:FAIL
             self.assertTrue("Toomanyinputargumentstocommand" in o)
             #wrong port
             queries = [
                 '\connect http://localhost:8097;',
                 'create primary index on bucketname;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             # o currently comes back as status:FAIL
             self.assertTrue("Connectionfailed" in o)
             #wrong url including http
             queries = [
                 '\connect http://localhost345:8097;',
                 'create primary index on bucketname;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             # o currently comes back as status:FAIL
             self.assertTrue("Connectionfailed" in o)
             #wrong url not including http
             queries = [
                 '\connect localhost3458097;',
                 'create primary index on bucketname;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             # o currently comes back as status:FAIL
             self.assertTrue(
                 "InvalidinputURLmissingportinaddresslocalhost" in o)
             queries = ['\disconnect', 'drop primary index on bucketname;']
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             # o currently comes back as status:FAIL
             self.assertTrue("Toomanyinputargumentstocommand" in o)
             queries = [
                 '\disconnect', 'create primary index on bucketname;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             #o currently comes back as status:FAIL
             self.assertTrue("Toomanyinputargumentstocommand" in o)
             queries = [
                 '\connect http://localhost:8091;',
                 'create primary index on bucketname;',
                 'drop primary index on bucketname;'
             ]
             o = shell.execute_commands_inside(self.cbqpath, '', queries,
                                               '', '', bucket.name, '')
             self.assertTrue(
                 "GSICreatePrimaryIndex()-cause:Index#primaryalreadyexists."
                 in o)
Exemple #41
0
class QueryTests(BaseTestCase):
    def setUp(self):
        if not self._testMethodName == 'suite_setUp':
            self.skip_buckets_handle = True
        super(QueryTests, self).setUp()
        self.version = self.input.param("cbq_version", "git_repo")
        if self.input.tuq_client and "client" in self.input.tuq_client:
            self.shell = RemoteMachineShellConnection(
                self.input.tuq_client["client"])
        else:
            self.shell = RemoteMachineShellConnection(self.master)
        if not self._testMethodName == 'suite_setUp':
            self._start_command_line_query(self.master)
        self.use_rest = self.input.param("use_rest", True)
        self.max_verify = self.input.param("max_verify", None)
        self.buckets = RestConnection(self.master).get_buckets()
        self.docs_per_day = self.input.param("doc-per-day", 49)
        self.item_flag = self.input.param("item_flag", 4042322160)
        self.n1ql_port = self.input.param("n1ql_port", 8093)
        self.dataset = self.input.param("dataset", "default")
        self.gens_load = self.generate_docs(self.docs_per_day)
        if self.input.param("gomaxprocs", None):
            self.configure_gomaxprocs()
        self.gen_results = TuqGenerators(
            self.log, self.generate_full_docs_list(self.gens_load))
        # temporary for MB-12848
        self.create_primary_index_for_3_0_and_greater()

    def suite_setUp(self):
        try:
            self.load(self.gens_load, flag=self.item_flag)
            self.create_primary_index_for_3_0_and_greater()
            if not self.input.param("skip_build_tuq", True):
                self._build_tuq(self.master)
            self.skip_buckets_handle = True
        except:
            self.log.error('SUITE SETUP FAILED')
            self.tearDown()

    def tearDown(self):
        if self._testMethodName == 'suite_tearDown':
            self.skip_buckets_handle = False
        super(QueryTests, self).tearDown()

    def suite_tearDown(self):
        if not self.input.param("skip_build_tuq", False):
            if hasattr(self, 'shell'):
                self.shell.execute_command("killall /tmp/tuq/cbq-engine")
                self.shell.execute_command("killall tuqtng")
                self.shell.disconnect()

##############################################################################################
#
#   SIMPLE CHECKS
##############################################################################################

    def test_simple_check(self):
        for bucket in self.buckets:
            query_template = 'FROM %s select $str0, $str1 ORDER BY $str0,$str1 ASC' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_simple_negative_check(self):
        queries_errors = {
            'SELECT $str0 FROM {0} WHERE COUNT({0}.$str0)>3':
            'Aggregates not allowed in WHERE',
            'SELECT *.$str0 FROM {0}': 'syntax error',
            'SELECT *.* FROM {0} ... ERROR': 'syntax error',
            'FROM %s SELECT $str0 WHERE id=null': 'syntax error',
        }
        self.negative_common_body(queries_errors)

    def test_consistent_simple_check(self):
        queries = [self.gen_results.generate_query('SELECT $str0, $int0, $int1 FROM %s ' +\
                    'WHERE $str0 IS NOT NULL AND $int0<10 ' +\
                    'OR $int1 = 6 ORDER BY $int0, $int1'),
                   self.gen_results.generate_query('SELECT $str0, $int0, $int1 FROM %s ' +\
                    'WHERE $int1 = 6 OR $str0 IS NOT NULL AND ' +\
                    '$int0<10 ORDER BY $int0, $int1')]
        for bucket in self.buckets:
            actual_result1 = self.run_cbq_query(queries[0] % bucket.name)
            actual_result2 = self.run_cbq_query(queries[1] % bucket.name)
            self.assertTrue(
                actual_result1['results'] == actual_result2['results'],
                "Results are inconsistent.Difference: %s %s %s %s" %
                (len(actual_result1['results']), len(
                    actual_result2['results']),
                 actual_result1['results'][:100],
                 actual_result2['results'][:100]))

    def test_simple_nulls(self):
        queries = ['SELECT id FROM %s WHERE id=NULL or id="null"']
        for bucket in self.buckets:
            for query in queries:
                actual_result = self.run_cbq_query(query % (bucket.name))
                self._verify_results(actual_result['results'], [])

##############################################################################################
#
#   LIMIT OFFSET CHECKS
##############################################################################################

    def test_limit_offset(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10 OFFSET 10' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)

    def test_limit_offset_zero(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 0' % (
                bucket.name)
            self.query = self.gen_results.generate_query(query_template)
            actual_result = self.run_cbq_query()
            self.assertEquals(
                actual_result['results'], [],
                "Results are incorrect.Actual %s.\n Expected: %s.\n" %
                (actual_result['results'], []))
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10 OFFSET 0' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self.assertEquals(
                actual_result['results'], expected_result,
                "Results are incorrect.Actual %s.\n Expected: %s.\n" %
                (actual_result['results'], expected_result))

    def test_limit_offset_negative_check(self):
        queries_errors = {
            'SELECT DISTINCT $str0 FROM {0} LIMIT -1':
            'Parse Error - syntax error',
            'SELECT DISTINCT $str0 FROM {0} LIMIT 1.1':
            'Parse Error - syntax error',
            'SELECT DISTINCT $str0 FROM {0} OFFSET -1':
            'Parse Error - syntax error',
            'SELECT DISTINCT $str0 FROM {0} OFFSET 1.1':
            'Parse Error - syntax error'
        }
        self.negative_common_body(queries_errors)

##############################################################################################
#
#   ALIAS CHECKS
##############################################################################################

    def test_simple_alias(self):
        for bucket in self.buckets:
            query_template = 'SELECT COUNT($str0) AS COUNT_EMPLOYEE FROM %s' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self.assertEquals(
                actual_result['results'], expected_result,
                "Results are incorrect.Actual %s.\n Expected: %s.\n" %
                (actual_result['results'], expected_result))

            query_template = 'SELECT COUNT(*) + 1 AS COUNT_EMPLOYEE FROM %s' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            expected_result = [{
                "COUNT_EMPLOYEE":
                expected_result[0]['COUNT_EMPLOYEE'] + 1
            }]
            self.assertEquals(
                actual_result['results'], expected_result,
                "Results are incorrect.Actual %s.\n Expected: %s.\n" %
                (actual_result['results'], expected_result))

    def test_simple_negative_alias(self):
        queries_errors = {
            'SELECT $str0._last_name as *':
            'syntax error',
            'SELECT $str0._last_name as DATABASE ?':
            'syntax error',
            'SELECT $str0 AS NULL FROM {0}':
            'syntax error',
            'SELECT $str1 as $str0, $str0 FROM {0}':
            'Duplicate result alias name',
            'SELECT test.$obj0 as points FROM {0} AS TEST ' + 'GROUP BY $obj0 AS GROUP_POINT':
            'syntax error'
        }
        self.negative_common_body(queries_errors)

    def test_alias_from_clause(self):
        queries_templates = ['SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ORDER BY points',
                   'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test WHERE test.$int0 >0'  +\
                   ' ORDER BY points',
                   'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ' +\
                   'GROUP BY test.$obj0.$_obj0_int0 ORDER BY points']
        for bucket in self.buckets:
            for query_template in queries_templates:
                actual_result, expected_result = self.run_query_from_template(
                    query_template % (bucket.name))
                self._verify_results(actual_result['results'], expected_result)

    def test_alias_from_clause_group(self):
        for bucket in self.buckets:
            query_template = 'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ' %(bucket.name) +\
                         'GROUP BY $obj0.$_obj0_int0 ORDER BY points'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_order_desc(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0 AS name_new FROM %s AS test ORDER BY name_new DESC' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_order_asc(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0 AS name_new FROM %s AS test ORDER BY name_new ASC' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT COUNT(TEST.$str0) from %s AS TEST' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_unnest(self):
        for bucket in self.buckets:
            query_template = 'SELECT count(skill) FROM %s AS emp UNNEST emp.$list_str0 AS skill' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

            query_template = 'SELECT count(skill) FROM %s AS emp UNNEST emp.$list_str0 skill' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

##############################################################################################
#
#   ORDER BY CHECKS
##############################################################################################

    def test_order_by_check(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $str1, $obj0.$_obj0_int0 points FROM %s'  % (bucket.name) +\
            ' ORDER BY $str1, $str0, $obj0.$_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)
            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $obj0.$_obj0_int0, $str0, $str1'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_alias(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1, $obj0 AS points FROM %s'  % (bucket.name) +\
            ' AS test ORDER BY $str1 DESC, points DESC'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_alias_arrays(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1, $obj0, $list_str0[0] AS SKILL FROM %s'  % (
                                                                            bucket.name) +\
            ' AS TEST ORDER BY SKILL, $str1, TEST.$obj0'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_alias_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT $int0, $int1, count(*) AS emp_per_month from %s'% (
                                                                            bucket.name) +\
            ' WHERE $int1 >7 GROUP BY $int0, $int1 ORDER BY emp_per_month, $int1, $int0'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1 AS TITLE FROM %s GROUP'  % (bucket.name) +\
            ' BY $str1 ORDER BY MIN($int1), $str1'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_precedence(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $str0, $str1'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $str1, $str0'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_satisfy(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $list_obj0 FROM %s AS employee ' % (bucket.name) +\
                        'WHERE ANY vm IN employee.$list_obj0 SATISFIES vm.$_list_obj0_int0 > 5 AND' +\
                        ' vm.$_list_obj0_str0 = "ubuntu" END ORDER BY $str0, $list_obj0[0].$_list_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

##############################################################################################
#
#   DISTINCT
##############################################################################################

    def test_distinct(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str1 FROM %s ORDER BY $str1' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_distinct_nested(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $obj0.$_obj0_int0 as VAR FROM %s '  % (bucket.name) +\
                         'ORDER BY $obj0.$_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

            query_template = 'SELECT DISTINCT $list_str0[0] as skill' +\
                         ' FROM %s ORDER BY $list_str0[0]'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

            self.query = 'SELECT DISTINCT $obj0.* FROM %s' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

##############################################################################################
#
#   COMPLEX PATHS
##############################################################################################

    def test_simple_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 FROM %s.$obj0' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 as new_attribute FROM %s.$obj0' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_where_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 FROM %s.$obj0 WHERE $_obj0_int0 = 1' % (
                bucket.name)
            actual_result, expected_result = self.run_query_from_template(
                query_template)
            self._verify_results(actual_result['results'], expected_result)

##############################################################################################
#
#   COMMON FUNCTIONS
##############################################################################################

    def run_query_from_template(self, query_template):
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def negative_common_body(self, queries_errors={}):
        if not queries_errors:
            self.fail("No queries to run!")
        for bucket in self.buckets:
            for query_template, error in queries_errors.iteritems():
                try:
                    query = self.gen_results.generate_query(query_template)
                    actual_result = self.run_cbq_query(
                        query.format(bucket.name))
                except CBQError as ex:
                    self.log.error(ex)
                    self.assertTrue(
                        str(ex).find(error) != -1,
                        "Error is incorrect.Actual %s.\n Expected: %s.\n" %
                        (str(ex).split(':')[-1], error))
                else:
                    self.fail("There was no errors. Error expected: %s" %
                              error)

    def run_cbq_query(self, query=None, min_output_size=10, server=None):
        if query is None:
            query = self.query
        if server is None:
            server = self.master
            if server.ip == "127.0.0.1":
                self.n1ql_port = server.n1ql_port
        else:
            if server.ip == "127.0.0.1":
                self.n1ql_port = server.n1ql_port
            if self.input.tuq_client and "client" in self.input.tuq_client:
                server = self.tuq_client
        if self.n1ql_port == None or self.n1ql_port == '':
            self.n1ql_port = self.input.param("n1ql_port", 8093)
            if not self.n1ql_port:
                self.log.info(
                    " n1ql_port is not defined, processing will not proceed further"
                )
                raise Exception(
                    "n1ql_port is not defined, processing will not proceed further"
                )
        if self.use_rest:
            result = RestConnection(server).query_tool(query, self.n1ql_port)
        else:
            if self.version == "git_repo":
                output = self.shell.execute_commands_inside("$GOPATH/src/github.com/couchbaselabs/tuqtng/" +\
                                                            "tuq_client/tuq_client " +\
                                                            "-engine=http://%s:8093/" % server.ip,
                                                       subcommands=[query,],
                                                       min_output_size=20,
                                                       end_msg='tuq_client>')
            else:
                output = self.shell.execute_commands_inside(
                    "/tmp/tuq/cbq -engine=http://%s:8093/" % server.ip,
                    subcommands=[
                        query,
                    ],
                    min_output_size=20,
                    end_msg='cbq>')
            result = self._parse_query_output(output)
        if isinstance(result, str) or 'errors' in result:
            raise CBQError(result, server.ip)
        self.log.info("TOTAL ELAPSED TIME: %s" %
                      result["metrics"]["elapsedTime"])
        return result

    def build_url(self, version):
        info = self.shell.extract_remote_info()
        type = info.distribution_type.lower()
        if type in ["ubuntu", "centos", "red hat"]:
            url = "https://s3.amazonaws.com/packages.couchbase.com/releases/couchbase-query/dp1/"
            url += "couchbase-query_%s_%s_linux.tar.gz" % (
                version, info.architecture_type)
        #TODO for windows
        return url

    def _build_tuq(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                goroot = testconstants.LINUX_GOROOT
                gopath = testconstants.LINUX_GOPATH
            else:
                goroot = testconstants.WINDOWS_GOROOT
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if self.input.tuq_client and "goroot" in self.input.tuq_client:
                goroot = self.input.tuq_client["goroot"]
            cmd = "rm -rf {0}/src/github.com".format(gopath)
            self.shell.execute_command(cmd)
            cmd= 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'go get github.com/couchbaselabs/tuqtng;' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; ' +\
                'go get -d -v ./...; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; go build; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng/tuq_client; go build; cd .'
            self.shell.execute_command(cmd)
        else:
            cbq_url = self.build_url(self.version)
            #TODO for windows
            cmd = "cd /tmp; mkdir tuq;cd tuq; wget {0} -O tuq.tar.gz;".format(
                cbq_url)
            cmd += "tar -xvf tuq.tar.gz;rm -rf tuq.tar.gz"
            self.shell.execute_command(cmd)

    def _start_command_line_query(self, server):
        self.shell.execute_command(
            "export NS_SERVER_CBAUTH_URL=\"http://{0}:{1}/_cbauth\"".format(
                server.ip, server.port))
        self.shell.execute_command(
            "export NS_SERVER_CBAUTH_USER=\"{0}\"".format(
                server.rest_username))
        self.shell.execute_command(
            "export NS_SERVER_CBAUTH_PWD=\"{0}\"".format(server.rest_password))
        self.shell.execute_command(
            "export NS_SERVER_CBAUTH_RPC_URL=\"http://{0}:{1}/cbauth-demo\"".
            format(server.ip, server.port))
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                gopath = testconstants.LINUX_GOPATH
            else:
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if os == 'windows':
                cmd = "cd %s/src/github.com/couchbaselabs/query/server/main; " % (gopath) +\
                "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s/src/github.com/couchbaselabs/query//server/main; " % (gopath) +\
                "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" %(
                                                                server.ip, server.port)
            self.shell.execute_command(cmd)
        elif self.version == "sherlock":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                couchbase_path = testconstants.LINUX_COUCHBASE_BIN_PATH
            else:
                couchbase_path = testconstants.WIN_COUCHBASE_BIN_PATH
            if self.input.tuq_client and "sherlock_path" in self.input.tuq_client:
                couchbase_path = "%s/bin" % self.input.tuq_client[
                    "sherlock_path"]
                print "PATH TO SHERLOCK: %s" % couchbase_path
            if os == 'windows':
                cmd = "cd %s; " % (couchbase_path) +\
                "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s; " % (couchbase_path) +\
                "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" %(
                                                                server.ip, server.port)
                n1ql_port = self.input.param("n1ql_port", None)
                if server.ip == "127.0.0.1" and server.n1ql_port:
                    n1ql_port = server.n1ql_port
                if n1ql_port:
                    cmd = "cd %s; " % (couchbase_path) +\
                './cbq-engine -datastore http://%s:%s/ -http=":%s">n1ql.log 2>&1 &' %(
                                                                server.ip, server.port, n1ql_port)
            self.shell.execute_command(cmd)
        else:
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                cmd = "cd /tmp/tuq;./cbq-engine -couchbase http://%s:%s/ >/dev/null 2>&1 &" % (
                    server.ip, server.port)
            else:
                cmd = "cd /cygdrive/c/tuq;./cbq-engine.exe -couchbase http://%s:%s/ >/dev/null 2>&1 &" % (
                    server.ip, server.port)
            self.shell.execute_command(cmd)

    def _parse_query_output(self, output):
        if output.find("cbq>") == 0:
            output = output[output.find("cbq>") + 4:].strip()
        if output.find("tuq_client>") == 0:
            output = output[output.find("tuq_client>") + 11:].strip()
        if output.find("cbq>") != -1:
            output = output[:output.find("cbq>")].strip()
        if output.find("tuq_client>") != -1:
            output = output[:output.find("tuq_client>")].strip()
        return json.loads(output)

    def generate_docs(self, num_items, start=0):
        try:
            return getattr(self, 'generate_docs_' + self.dataset)(num_items,
                                                                  start)
        except:
            self.fail("There is no dataset %s, please enter a valid one" %
                      self.dataset)

    def generate_docs_default(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee(docs_per_day, start)

    def generate_docs_sabre(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_sabre(docs_per_day, start)

    def generate_docs_employee(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_data(
            docs_per_day=docs_per_day, start=start)

    def generate_docs_simple(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_simple_data(
            docs_per_day=docs_per_day, start=start)

    def generate_docs_sales(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_sales_data(
            docs_per_day=docs_per_day, start=start)

    def generate_docs_bigdata(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_bigdata(docs_per_day=docs_per_day *
                                                    1000,
                                                    start=start,
                                                    value_size=self.value_size)

    def _verify_results(self,
                        actual_result,
                        expected_result,
                        missing_count=1,
                        extra_count=1):
        if len(actual_result) != len(expected_result):
            missing, extra = self.check_missing_and_extra(
                actual_result, expected_result)
            self.log.error("Missing items: %s.\n Extra items: %s" %
                           (missing[:missing_count], extra[:extra_count]))
            self.fail(
                "Results are incorrect.Actual num %s. Expected num: %s.\n" %
                (len(actual_result), len(expected_result)))
        if self.max_verify is not None:
            actual_result = actual_result[:self.max_verify]
            expected_result = expected_result[:self.max_verify]

        msg = "Results are incorrect.\n Actual first and last 100:  %s.\n ... \n %s" +\
        "Expected first and last 100: %s.\n  ... \n %s"
        self.assertTrue(
            actual_result == expected_result,
            msg % (actual_result[:100], actual_result[-100:],
                   expected_result[:100], expected_result[-100:]))

    def check_missing_and_extra(self, actual, expected):
        missing = []
        extra = []
        for item in actual:
            if not (item in expected):
                extra.append(item)
        for item in expected:
            if not (item in actual):
                missing.append(item)
        return missing, extra

    def sort_nested_list(self, result):
        actual_result = []
        for item in result:
            curr_item = {}
            for key, value in item.iteritems():
                if isinstance(value, list) or isinstance(value, set):
                    curr_item[key] = sorted(value)
                else:
                    curr_item[key] = value
            actual_result.append(curr_item)
        return actual_result

    def configure_gomaxprocs(self):
        max_proc = self.input.param("gomaxprocs", None)
        cmd = "export GOMAXPROCS=%s" % max_proc
        for server in self.servers:
            shell_connection = RemoteMachineShellConnection(self.master)
            shell_connection.execute_command(cmd)

    def create_primary_index_for_3_0_and_greater(self):
        self.log.info("CHECK FOR PRIMARY INDEXES")
        rest = RestConnection(self.master)
        versions = rest.get_nodes_versions()
        ddoc_name = 'ddl_#primary'
        if versions[0].startswith("3"):
            try:
                rest.get_ddoc(self.buckets[0], ddoc_name)
            except ReadDocumentException:
                for bucket in self.buckets:
                    self.log.info("Creating primary index for %s ..." %
                                  bucket.name)
                    self.query = "CREATE PRIMARY INDEX ON %s " % (bucket.name)
                    try:
                        self.run_cbq_query()
                    except Exception, ex:
                        self.log.error('ERROR during index creation %s' %
                                       str(ex))
 def run_cbq_query(self,
                   query=None,
                   min_output_size=10,
                   server=None,
                   query_params={},
                   is_prepared=False,
                   scan_consistency=None,
                   scan_vector=None,
                   verbose=True):
     if query is None:
         query = self.query
     if server is None:
         server = self.master
         if server.ip == "127.0.0.1":
             self.n1ql_port = server.n1ql_port
     else:
         if server.ip == "127.0.0.1":
             self.n1ql_port = server.n1ql_port
         if self.input.tuq_client and "client" in self.input.tuq_client:
             server = self.tuq_client
     if self.n1ql_port == None or self.n1ql_port == '':
         self.n1ql_port = self.input.param("n1ql_port", 90)
         if not self.n1ql_port:
             self.log.info(
                 " n1ql_port is not defined, processing will not proceed further"
             )
             raise Exception(
                 "n1ql_port is not defined, processing will not proceed further"
             )
     if self.use_rest:
         query_params = {}
         if scan_consistency:
             query_params['scan_consistency'] = scan_consistency
         if scan_vector:
             query_params['scan_vector'] = str(scan_vector).replace(
                 "'", '"')
         if verbose:
             self.log.info('RUN QUERY %s' % query)
         result = RestConnection(server).query_tool(
             query,
             self.n1ql_port,
             query_params=query_params,
             is_prepared=is_prepared,
             verbose=verbose)
     else:
         # if self.version == "git_repo":
         #     output = self.shell.execute_commands_inside("$GOPATH/src/github.com/couchbaselabs/tuqtng/" +\
         #                                                 "tuq_client/tuq_client " +\
         #                                                 "-engine=http://%s:8093/" % server.ip,
         #                                            subcommands=[query,],
         #                                            min_output_size=20,
         #                                            end_msg='tuq_client>')
         # else:
         #os = self.shell.extract_remote_info().type.lower()
         shell = RemoteMachineShellConnection(server)
         #query = query.replace('"', '\\"')
         #query = query.replace('`', '\\`')
         #if os == "linux":
         cmd = "%s/cbq  -engine=http://%s:8093/" % (
             testconstants.LINUX_COUCHBASE_BIN_PATH, server.ip)
         output = shell.execute_commands_inside(cmd, query, "", "", "", "",
                                                "")
         print(
             "--------------------------------------------------------------------------------------------------------------------------------"
         )
         print(output)
         result = json.loads(output)
         print(result)
         result = self._parse_query_output(output)
     if isinstance(result, str) or 'errors' in result:
         error_result = str(result)
         length_display = len(error_result)
         if length_display > 500:
             error_result = error_result[:500]
         raise CBQError(error_result, server.ip)
     self.log.info("TOTAL ELAPSED TIME: %s" %
                   result["metrics"]["elapsedTime"])
     return result
Exemple #43
0
class QueryTests(BaseTestCase):
    def setUp(self):
        if not self._testMethodName == 'suite_setUp':
            self.skip_buckets_handle = True
        super(QueryTests, self).setUp()
        self.version = self.input.param("cbq_version", "sherlock")
        if self.input.tuq_client and "client" in self.input.tuq_client:
            self.shell = RemoteMachineShellConnection(self.input.tuq_client["client"])
        else:
            self.shell = RemoteMachineShellConnection(self.master)
        if not self._testMethodName == 'suite_setUp' and self.input.param("cbq_version", "sherlock") != 'sherlock':
            self._start_command_line_query(self.master)
        self.use_rest = self.input.param("use_rest", True)
        self.max_verify = self.input.param("max_verify", None)
        self.buckets = RestConnection(self.master).get_buckets()
        self.docs_per_day = self.input.param("doc-per-day", 49)
        self.item_flag = self.input.param("item_flag", 4042322160)
        self.n1ql_port = self.input.param("n1ql_port", 8093)
        self.analytics = self.input.param("analytics",False)
        self.dataset = self.input.param("dataset", "default")
        self.primary_indx_type = self.input.param("primary_indx_type", 'GSI')
        self.index_type = self.input.param("index_type", 'GSI')
        self.primary_indx_drop = self.input.param("primary_indx_drop", False)
        self.monitoring = self.input.param("monitoring",False)
        self.isprepared = False
        self.named_prepare = self.input.param("named_prepare", None)
        self.skip_primary_index = self.input.param("skip_primary_index",False)
        self.scan_consistency = self.input.param("scan_consistency", 'REQUEST_PLUS')
        shell = RemoteMachineShellConnection(self.master)
        type = shell.extract_remote_info().distribution_type
        self.path = testconstants.LINUX_COUCHBASE_BIN_PATH
        if type.lower() == 'windows':
            self.path = testconstants.WIN_COUCHBASE_BIN_PATH
        elif type.lower() == "mac":
            self.path = testconstants.MAC_COUCHBASE_BIN_PATH
        self.threadFailure = False
        if self.primary_indx_type.lower() == "gsi":
            self.gsi_type = self.input.param("gsi_type", 'plasma')
        else:
            self.gsi_type = None
        if self.input.param("reload_data", False):
            if self.analytics:
                self.cluster.rebalance([self.master, self.cbas_node], [], [self.cbas_node], services=['cbas'])
            for bucket in self.buckets:
                self.cluster.bucket_flush(self.master, bucket=bucket,
                                          timeout=self.wait_timeout * 5)
            self.gens_load = self.generate_docs(self.docs_per_day)
            self.load(self.gens_load, flag=self.item_flag)
            if self.analytics:
                self.cluster.rebalance([self.master, self.cbas_node], [self.cbas_node], [], services=['cbas'])
        self.gens_load = self.generate_docs(self.docs_per_day)
        if self.input.param("gomaxprocs", None):
            self.configure_gomaxprocs()
        self.gen_results = TuqGenerators(self.log, self.generate_full_docs_list(self.gens_load))
        if (self.analytics == False):
                self.create_primary_index_for_3_0_and_greater()
        if (self.analytics):
            self.setup_analytics()
            self.sleep(30,'wait for analytics setup')

    def suite_setUp(self):
        try:
            self.load(self.gens_load, flag=self.item_flag)
            if not self.input.param("skip_build_tuq", True):
                self._build_tuq(self.master)
            self.skip_buckets_handle = True
            if (self.analytics):
                self.cluster.rebalance([self.master, self.cbas_node], [self.cbas_node], [], services=['cbas'])
                self.setup_analytics()
                self.sleep(30,'wait for analytics setup')
        except:
            self.log.error('SUITE SETUP FAILED')
            self.tearDown()

    def tearDown(self):
        if self._testMethodName == 'suite_tearDown':
            self.skip_buckets_handle = False
        if self.analytics:
            bucket_username = "******"
            bucket_password = "******"
            data = 'use Default ;'
            for bucket in self.buckets:
                data += 'disconnect bucket {0} if connected;'.format(bucket.name)
                data += 'drop dataset {0} if exists;'.format(bucket.name+ "_shadow")
                data += 'drop bucket {0} if exists;'.format(bucket.name)
            filename = "file.txt"
            f = open(filename,'w')
            f.write(data)
            f.close()
            url = 'http://{0}:8095/analytics/service'.format(self.cbas_node.ip)
            cmd = 'curl -s --data pretty=true --data-urlencode "*****@*****.**" ' + url + " -u " + bucket_username + ":" + bucket_password
            os.system(cmd)
            os.remove(filename)
        super(QueryTests, self).tearDown()

    def suite_tearDown(self):
        if not self.input.param("skip_build_tuq", False):
            if hasattr(self, 'shell'):
                self.shell.execute_command("killall /tmp/tuq/cbq-engine")
                self.shell.execute_command("killall tuqtng")
                self.shell.disconnect()

##############################################################################################
#
#  Setup Helpers
##############################################################################################

    def setup_analytics(self):
        data = 'use Default;'
        bucket_username = "******"
        bucket_password = "******"
        for bucket in self.buckets:
            data += 'create bucket {0} with {{"bucket":"{0}","nodes":"{1}"}} ;'.format(
                bucket.name, self.master.ip)
            data += 'create shadow dataset {1} on {0}; '.format(bucket.name,
                                                                bucket.name + "_shadow")
            data += 'connect bucket {0} with {{"username":"******","password":"******"}};'.format(
                bucket.name, bucket_username, bucket_password)
        filename = "file.txt"
        f = open(filename,'w')
        f.write(data)
        f.close()
        url = 'http://{0}:8095/analytics/service'.format(self.cbas_node.ip)
        cmd = 'curl -s --data pretty=true --data-urlencode "*****@*****.**" ' + url + " -u " + bucket_username + ":" + bucket_password
        os.system(cmd)
        os.remove(filename)

    def run_active_requests(self, e, t):
        while not e.isSet():
            logging.debug('wait_for_event_timeout starting')
            event_is_set = e.wait(t)
            logging.debug('event set: %s', event_is_set)
            if event_is_set:
                result = self.run_cbq_query("select * from system:active_requests")
                self.assertTrue(result['metrics']['resultCount'] == 1)
                requestId = result['requestID']
                result = self.run_cbq_query(
                    'delete from system:active_requests where requestId  =  "%s"' % requestId)
                time.sleep(20)
                result = self.run_cbq_query(
                    'select * from system:active_requests  where requestId  =  "%s"' % requestId)
                self.assertTrue(result['metrics']['resultCount'] == 0)
                result = self.run_cbq_query("select * from system:completed_requests")
                requestId = result['requestID']
                result = self.run_cbq_query(
                    'delete from system:completed_requests where requestId  =  "%s"' % requestId)
                time.sleep(10)
                result = self.run_cbq_query(
                    'select * from system:completed_requests where requestId  =  "%s"' % requestId)
                self.assertTrue(result['metrics']['resultCount'] == 0)

##############################################################################################
#
#   COMMON FUNCTIONS
##############################################################################################

    def run_query_from_template(self, query_template):
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def run_query_with_subquery_select_from_template(self, query_template):
        subquery_template = re.sub(r'.*\$subquery\(', '', query_template)
        subquery_template = subquery_template[:subquery_template.rfind(')')]
        keys_num = int(re.sub(r'.*KEYS \$', '', subquery_template).replace('KEYS $', ''))
        subquery_full_list = self.generate_full_docs_list(gens_load=self.gens_load,keys=self._get_keys(keys_num))
        subquery_template = re.sub(r'USE KEYS.*', '', subquery_template)
        sub_results = TuqGenerators(self.log, subquery_full_list)
        self.query = sub_results.generate_query(subquery_template)
        expected_sub = sub_results.generate_expected_result()
        alias = re.sub(r',.*', '', re.sub(r'.*\$subquery\(.*\)', '', query_template))
        alias = re.sub(r'.*as','', re.sub(r'FROM.*', '', alias)).strip()
        if not alias:
            alias = '$1'
        for item in self.gen_results.full_set:
            item[alias] = expected_sub[0]
        query_template = re.sub(r',.*\$subquery\(.*\).*%s' % alias, ',%s' % alias, query_template)
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def run_query_with_subquery_from_template(self, query_template):
        subquery_template = re.sub(r'.*\$subquery\(', '', query_template)
        subquery_template = subquery_template[:subquery_template.rfind(')')]
        subquery_full_list = self.generate_full_docs_list(gens_load=self.gens_load)
        sub_results = TuqGenerators(self.log, subquery_full_list)
        self.query = sub_results.generate_query(subquery_template)
        expected_sub = sub_results.generate_expected_result()
        alias = re.sub(r',.*', '', re.sub(r'.*\$subquery\(.*\)', '', query_template))
        alias = re.sub(r'.*as ', '', alias).strip()
        self.gen_results = TuqGenerators(self.log, expected_sub)
        query_template = re.sub(r'\$subquery\(.*\).*%s' % alias, ' %s' % alias, query_template)
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def negative_common_body(self, queries_errors={}):
        if not queries_errors:
            self.fail("No queries to run!")
        for bucket in self.buckets:
            for query_template, error in queries_errors.iteritems():
                try:
                    query = self.gen_results.generate_query(query_template)
                    actual_result = self.run_cbq_query(query.format(bucket.name))
                except CBQError as ex:
                    self.log.error(ex)
                    self.assertTrue(str(ex).find(error) != -1,
                                    "Error is incorrect.Actual %s.\n Expected: %s.\n" %(
                                                                str(ex).split(':')[-1], error))
                else:
                    self.fail("There were no errors. Error expected: %s" % error)

    def run_cbq_query(self, query=None, min_output_size=10, server=None):
        if query is None:
            query = self.query
        if server is None:
           server = self.master
           if server.ip == "127.0.0.1":
            self.n1ql_port = server.n1ql_port
        else:
            if server.ip == "127.0.0.1":
                self.n1ql_port = server.n1ql_port
            if self.input.tuq_client and "client" in self.input.tuq_client:
                server = self.tuq_client
        query_params = {}
        cred_params = {'creds': []}
        rest = RestConnection(server)
        username = rest.username
        password = rest.password
        cred_params['creds'].append({'user': username, 'pass': password})
        for bucket in self.buckets:
            if bucket.saslPassword:
                cred_params['creds'].append({'user': '******' % bucket.name, 'pass': bucket.saslPassword})
        query_params.update(cred_params)
        if self.use_rest:
            query_params.update({'scan_consistency': self.scan_consistency})
            self.log.info('RUN QUERY %s' % query)

            if self.analytics:
                query = query + ";"
                for bucket in self.buckets:
                    query = query.replace(bucket.name,bucket.name+"_shadow")
                result = RestConnection(self.cbas_node).execute_statement_on_cbas(query, "immediate")
                result = json.loads(result)

            else :
                result = rest.query_tool(query, self.n1ql_port, query_params=query_params)


        else:
            if self.version == "git_repo":
                output = self.shell.execute_commands_inside("$GOPATH/src/github.com/couchbase/query/" +\
                                                            "shell/cbq/cbq ","","","","","","")
            else:
                os = self.shell.extract_remote_info().type.lower()
                if not(self.isprepared):
                    query = query.replace('"', '\\"')
                    query = query.replace('`', '\\`')

                cmd =  "%s/cbq  -engine=http://%s:%s/ -q -u %s -p %s" % (self.path, server.ip, server.port, username, password)

                output = self.shell.execute_commands_inside(cmd,query,"","","","","")
                if not(output[0] == '{'):
                    output1 = '{'+str(output)
                else:
                    output1 = output
                result = json.loads(output1)
        if isinstance(result, str) or 'errors' in result:
            raise CBQError(result, server.ip)
        self.log.info("TOTAL ELAPSED TIME: %s" % result["metrics"]["elapsedTime"])
        return result

    def build_url(self, version):
        info = self.shell.extract_remote_info()
        type = info.distribution_type.lower()
        if type in ["ubuntu", "centos", "red hat"]:
            url = "https://s3.amazonaws.com/packages.couchbase.com/releases/couchbase-query/dp1/"
            url += "couchbase-query_%s_%s_linux.tar.gz" %(
                                version, info.architecture_type)
        #TODO for windows
        return url

    def _build_tuq(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                goroot = testconstants.LINUX_GOROOT
                gopath = testconstants.LINUX_GOPATH
            else:
                goroot = testconstants.WINDOWS_GOROOT
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if self.input.tuq_client and "goroot" in self.input.tuq_client:
                goroot = self.input.tuq_client["goroot"]
            cmd = "rm -rf {0}/src/github.com".format(gopath)
            self.shell.execute_command(cmd)
            cmd= 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'go get github.com/couchbaselabs/tuqtng;' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; ' +\
                'go get -d -v ./...; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; go build; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng/tuq_client; go build; cd .'
            self.shell.execute_command(cmd)
        else:
            cbq_url = self.build_url(self.version)
            #TODO for windows
            cmd = "cd /tmp; mkdir tuq;cd tuq; wget {0} -O tuq.tar.gz;".format(cbq_url)
            cmd += "tar -xvf tuq.tar.gz;rm -rf tuq.tar.gz"
            self.shell.execute_command(cmd)

    def _start_command_line_query(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                gopath = testconstants.LINUX_GOPATH
            else:
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if os == 'windows':
                cmd = "cd %s/src/github.com/couchbase/query/server/main; " % (gopath) +\
                "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s/src/github.com/couchbase/query//server/main; " % (gopath) +\
                "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" %(
                                                                server.ip, server.port)
            self.shell.execute_command(cmd)
        elif self.version == "sherlock":
            if self.services_init.find('n1ql') != -1:
                return
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                couchbase_path = testconstants.LINUX_COUCHBASE_BIN_PATH
            else:
                couchbase_path = testconstants.WIN_COUCHBASE_BIN_PATH
            if self.input.tuq_client and "sherlock_path" in self.input.tuq_client:
                couchbase_path = "%s/bin" % self.input.tuq_client["sherlock_path"]
            if os == 'windows':
                cmd = "cd %s; " % (couchbase_path) +\
                "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s; " % (couchbase_path) +\
                "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" %(
                                                                server.ip, server.port)
                n1ql_port = self.input.param("n1ql_port", None)
                if server.ip == "127.0.0.1" and server.n1ql_port:
                    n1ql_port = server.n1ql_port
                if n1ql_port:
                    cmd = "cd %s; " % (couchbase_path) +\
                './cbq-engine -datastore http://%s:%s/ -http=":%s">n1ql.log 2>&1 &' %(
                                                                server.ip, server.port, n1ql_port)
            self.shell.execute_command(cmd)
        else:
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                cmd = "cd /tmp/tuq;./cbq-engine -couchbase http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd /cygdrive/c/tuq;./cbq-engine.exe -couchbase http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            self.shell.execute_command(cmd)

    def _parse_query_output(self, output):
        if output.find("cbq>") == 0:
            output = output[output.find("cbq>") + 4:].strip()
        if output.find("tuq_client>") == 0:
            output = output[output.find("tuq_client>") + 11:].strip()
        if output.find("cbq>") != -1:
            output = output[:output.find("cbq>")].strip()
        if output.find("tuq_client>") != -1:
            output = output[:output.find("tuq_client>")].strip()
        return json.loads(output)

    def generate_docs(self, num_items, start=0):
        try:
            return getattr(self, 'generate_docs_' + self.dataset)(num_items, start)
        except:
            self.fail("There is no dataset %s, please enter a valid one" % self.dataset)

    def generate_docs_default(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee(docs_per_day, start)

    def generate_docs_sabre(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_sabre(docs_per_day, start)

    def generate_docs_employee(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_data(docs_per_day = docs_per_day, start = start)

    def generate_docs_simple(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_simple_data(docs_per_day = docs_per_day, start = start)

    def generate_docs_sales(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_sales_data(docs_per_day = docs_per_day, start = start)

    def generate_docs_bigdata(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_bigdata(end=(1000*docs_per_day), start=start, value_size=self.value_size)


    def _verify_results(self, actual_result, expected_result, missing_count = 1, extra_count = 1):
        if len(actual_result) != len(expected_result):
            missing, extra = self.check_missing_and_extra(actual_result, expected_result)
            self.log.error("Missing items: %s.\n Extra items: %s" % (missing[:missing_count], extra[:extra_count]))
            self.fail("Results are incorrect.Actual num %s. Expected num: %s.\n" % (
                                            len(actual_result), len(expected_result)))
        if self.max_verify is not None:
            actual_result = actual_result[:self.max_verify]
            expected_result = expected_result[:self.max_verify]

        msg = "Results are incorrect.\n Actual first and last 100:  %s.\n ... \n %s" +\
        "Expected first and last 100: %s.\n  ... \n %s"
        self.assertTrue(actual_result == expected_result,
                          msg % (actual_result[:100],actual_result[-100:],
                                 expected_result[:100],expected_result[-100:]))

    def check_missing_and_extra(self, actual, expected):
        missing = []
        extra = []
        for item in actual:
            if not (item in expected):
                 extra.append(item)
        for item in expected:
            if not (item in actual):
                missing.append(item)
        return missing, extra

    def sort_nested_list(self, result):
        actual_result = []
        for item in result:
            curr_item = {}
            for key, value in item.iteritems():
                if isinstance(value, list) or isinstance(value, set):
                    curr_item[key] = sorted(value)
                else:
                    curr_item[key] = value
            actual_result.append(curr_item)
        return actual_result

    def configure_gomaxprocs(self):
        max_proc = self.input.param("gomaxprocs", None)
        cmd = "export GOMAXPROCS=%s" % max_proc
        for server in self.servers:
            shell_connection = RemoteMachineShellConnection(self.master)
            shell_connection.execute_command(cmd)

    def create_primary_index_for_3_0_and_greater(self):
        self.log.info("CREATE PRIMARY INDEX using %s" % self.primary_indx_type)
        rest = RestConnection(self.master)
        versions = rest.get_nodes_versions()
        if versions[0].startswith("4") or versions[0].startswith("3") or versions[0].startswith("5"):
            for bucket in self.buckets:
                if self.primary_indx_drop:
                    self.log.info("Dropping primary index for %s using %s ..." % (bucket.name,self.primary_indx_type))
                    self.query = "DROP PRIMARY INDEX ON %s USING %s" % (bucket.name,self.primary_indx_type)
                    #self.run_cbq_query()
                    self.sleep(3, 'Sleep for some time after index drop')
                self.query = 'select * from system:indexes where name="#primary" and keyspace_id = "%s"' % bucket.name
                res = self.run_cbq_query()
                self.sleep(10)
                if self.monitoring:
                    self.query = "delete from system:completed_requests"
                    self.run_cbq_query()
                if not self.skip_primary_index:
                    if (res['metrics']['resultCount'] == 0):
                        self.query = "CREATE PRIMARY INDEX ON %s USING %s" % (bucket.name, self.primary_indx_type)
                        self.log.info("Creating primary index for %s ..." % bucket.name)
                        try:
                            self.run_cbq_query()
                            self.primary_index_created = True
                            if self.primary_indx_type.lower() == 'gsi':
                                self._wait_for_index_online(bucket, '#primary')
                        except Exception, ex:
                            self.log.info(str(ex))
Exemple #44
0
class QueryWhitelistTests(QueryTests):
    def setUp(self):
        super(QueryWhitelistTests, self).setUp()
        self.shell = RemoteMachineShellConnection(self.master)
        self.info = self.shell.extract_remote_info()
        if self.info.type.lower() == 'windows':
            self.curl_path = "%scurl" % self.path
            self.file_path = "Filec:\\ProgramFiles\\Couchbase\\Server\\bin\\..\\var\\lib\\couchbase\\n1qlcerts\\curl_whitelist"
            self.lowercase_file_path = "filec:\\ProgramFiles\\Couchbase\\Server\\bin\\..\\var\\lib\\couchbase\\n1qlcerts\\curl_whitelist"
        else:
            self.curl_path = "curl"
            self.file_path = "File/opt/couchbase/bin/../var/lib/couchbase/n1qlcerts/curl_whitelist"
            self.lowercase_file_path = "file/opt/couchbase/bin/../var/lib/couchbase/n1qlcerts/curl_whitelist"
        self.rest = RestConnection(self.master)
        self.cbqpath = '%scbq' % self.path + " -e %s:%s -q -u %s -p %s"\
                                             % (self.master.ip, self.n1ql_port, self.rest.username, self.rest.password)
        #Whitelist error messages
        self.query_error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelistedhttp://%s:%s/query/service." \
                "PleasemakesuretowhitelisttheURLontheUI." % (self.master.ip, self.n1ql_port)
        self.jira_error_msg ="Errorevaluatingprojection.-cause:URLendpointisn'twhitelistedhttps://jira.atlassian." \
                             "com/rest/api/latest/issue/JRA-9.PleasemakesuretowhitelisttheURLontheUI."
        self.google_error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                                "https://maps.googleapis.com/maps/api/geocode/json."
        #End of whitelist error messages
        self.query_service_url = "'http://%s:%s/query/service'" % (
            self.master.ip, self.n1ql_port)
        self.api_port = self.input.param("api_port", 8094)
        self.load_sample = self.input.param("load_sample", False)
        self.create_users = self.input.param("create_users", False)
        self.full_access = self.input.param("full_access", True)
        self.run_cbq_query('delete from system:prepareds')

    def suite_setUp(self):
        super(QueryWhitelistTests, self).suite_setUp()
        # Create the users necessary for the RBAC tests in curl
        if self.create_users:
            testuser = [{
                'id': 'no_curl',
                'name': 'no_curl',
                'password': '******'
            }, {
                'id': 'curl',
                'name': 'curl',
                'password': '******'
            }, {
                'id': 'curl_no_insert',
                'name': 'curl_no_insert',
                'password': '******'
            }]
            RbacBase().create_user_source(testuser, 'builtin', self.master)

            noncurl_permissions = 'bucket_full_access[*]:query_select[*]:query_update[*]:' \
                                  'query_insert[*]:query_delete[*]:query_manage_index[*]:' \
                                  'query_system_catalog'
            curl_permissions = 'bucket_full_access[*]:query_select[*]:query_update[*]:' \
                               'query_insert[*]:query_delete[*]:query_manage_index[*]:' \
                               'query_system_catalog:query_external_access'
            # Assign user to role
            role_list = [{
                'id': 'no_curl',
                'name': 'no_curl',
                'roles': '%s' % noncurl_permissions
            }, {
                'id': 'curl',
                'name': 'curl',
                'roles': '%s' % curl_permissions
            }]
            temp = RbacBase().add_user_role(role_list, self.rest, 'builtin')

    def tearDown(self):
        super(QueryWhitelistTests, self).tearDown()

    def suite_tearDown(self):
        super(QueryWhitelistTests, self).suite_tearDown()

    '''Test running a curl command without a whitelist present'''

    def test_no_whitelist(self):
        # The query that curl will send to couchbase
        n1ql_query = 'select * from default limit 5'
        # This is the query that the cbq-engine will execute
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.query_error_msg in json_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (json_curl['errors'][0]['msg'], self.query_error_msg))

    '''Test running a curl command with an empty whitelist'''

    def test_empty_whitelist(self):
        response, content = self.rest.create_whitelist(self.master, {})
        result = json.loads(content)
        self.assertEqual(result['errors']['all_access'],
                         'The value must be supplied')
        n1ql_query = 'select * from default limit 5'

        # This is the query that the cbq-engine will execute
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.query_error_msg in json_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (json_curl['errors'][0]['msg'], self.query_error_msg))

        self.rest.create_whitelist(self.master, {
            "all_access": None,
            "allowed_urls": None,
            "disallowed_urls": None
        })
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.query_error_msg in json_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (json_curl['errors'][0]['msg'], self.query_error_msg))

    '''Test running a curl command with whitelists that are invalid'''

    def test_invalid_whitelist(self):
        response, content = self.rest.create_whitelist(self.master,
                                                       "thisisnotvalid")
        result = json.loads(content)
        self.assertEqual(result['errors']['_'], 'Unexpected Json')
        n1ql_query = 'select * from default limit 5'
        # This is the query that the cbq-engine will execute
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.query_error_msg in json_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (json_curl['errors'][0]['msg'], self.query_error_msg))

        self.rest.create_whitelist(
            self.master, {
                "all_access": "hello",
                "allowed_urls": ["goodbye"],
                "disallowed_urls": ["invalid"]
            })
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.query_error_msg in json_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (json_curl['errors'][0]['msg'], self.query_error_msg))

    '''Test running a curl command with a whitelist that contains the field all_access: True and also
       inavlid/fake fields'''

    def test_basic_all_access_true(self):
        n1ql_query = 'select * from default limit 5'
        self.rest.create_whitelist(self.master, {"all_access": True})
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        json_curl = self.convert_to_json(curl)
        expected_result = self.run_cbq_query('select * from default limit 5')
        self.assertEqual(json_curl['results'][0]['$1']['results'],
                         expected_result['results'])

        curl_output = self.shell.execute_command(
            "%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9" %
            self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query = "select curl(" + url + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        self.rest.create_whitelist(self.master, {
            "all_access": True,
            "fake_field": "blahahahahaha",
            "fake_url": "fake"
        })

        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        self.rest.create_whitelist(self.master, {
            "fake_field": "blahahahahaha",
            "all_access": True,
            "fake_url": "fake"
        })

        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test all_access: True with nonsense in the allowed/disallowed fields as well as nothing
       in the allowed/disallowed fields'''

    def test_all_access_true(self):
        self.rest.create_whitelist(
            self.master, {
                "all_access": True,
                "allowed_urls": ["blahahahahaha"],
                "disallowed_urls": ["fake"]
            })
        curl_output = self.shell.execute_command(
            "%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9" %
            self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query = "select curl(" + url + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        self.rest.create_whitelist(self.master, {
            "all_access": True,
            "allowed_urls": None,
            "disallowed_urls": None
        })
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test what happens if you give an disallowed_url field as well as an all_access field, all_access
       should get precedence over disallowed_urls field'''

    def test_all_access_true_disallowed_url(self):
        self.rest.create_whitelist(
            self.master, {
                "all_access": True,
                "disallowed_urls": ["https://maps.googleapis.com"]
            })
        curl_output = self.shell.execute_command(
            "%s --get https://maps.googleapis.com/maps/api/geocode/json "
            "-d 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'"
            % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options = "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query = "select curl(" + url + ", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test what happens if you give an allowed_url field as well as an all_access field, all_access
       should get precedence over allowed_urls field'''

    def test_all_access_true_allowed_url(self):
        self.rest.create_whitelist(
            self.master, {
                "all_access": True,
                "allowed_urls": ["https://maps.googleapis.com"]
            })
        curl_output = self.shell.execute_command(
            "%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9" %
            self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query = "select curl(" + url + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test what happens when you set the all_access field multiple times, or try and give it multiple
       values'''

    def test_multiple_all_access(self):
        self.rest.create_whitelist(self.master, {
            "all_access": True,
            "all_access": False
        })

        curl_output = self.shell.execute_command(
            "%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9" %
            self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query = "select curl(" + url + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.jira_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.jira_error_msg))

        self.rest.create_whitelist(self.master, {
            "all_access": False,
            "all_access": True
        })
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        self.rest.create_whitelist(self.master, {"all_access": [True, False]})
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test to make sure that whitelist enforces that allowed_urls field must be given as a list'''

    def test_invalid_allowed_url(self):
        self.rest.create_whitelist(self.master, {"all_access": False})
        # Whitelist should not accept this setting and thus leave the above settting of all_access = False intact
        response, content = self.rest.create_whitelist(
            self.master, {
                "all_access": False,
                "allowed_urls": "blahblahblah"
            })
        result = json.loads(content)
        self.assertEqual(result['errors']['allowed_urls'],
                         "Invalid type: Must be a list of non-empty strings")
        n1ql_query = 'select * from default limit 5'
        # This is the query that the cbq-engine will execute
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.query_error_msg in json_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (json_curl['errors'][0]['msg'], self.query_error_msg))

    '''Test the allowed_urls field, try to run curl against an endpoint not in allowed_urls and then
       try to run curl against an endpoint in allowed_urls'''

    def test_allowed_url(self):
        self.rest.create_whitelist(
            self.master, {
                "all_access": False,
                "allowed_urls": ["https://maps.googleapis.com"]
            })

        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query = "select curl(" + url + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.jira_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.jira_error_msg))

        curl_output = self.shell.execute_command(
            "%s --get https://maps.googleapis.com/maps/api/geocode/json "
            "-d 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'"
            % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options = "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query = "select curl(" + url + ", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test the allowed_urls field, try to run curl against an endpoint not in disallowed_urls and then
       try to run curl against an endpoint in disallowed_urls, both should fail'''

    def test_disallowed_url(self):
        self.rest.create_whitelist(
            self.master, {
                "all_access": False,
                "disallowed_urls": ["https://maps.googleapis.com"]
            })

        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query = "select curl(" + url + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.jira_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.jira_error_msg))

        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options = "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query = "select curl(" + url + ", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.google_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.google_error_msg))

    '''Test that disallowed_urls field has precedence over allowed_urls'''

    def test_disallowed_precedence(self):
        self.rest.create_whitelist(
            self.master, {
                "all_access":
                False,
                "allowed_urls":
                ["https://maps.googleapis.com/maps/api/geocode/json"],
                "disallowed_urls": ["https://maps.googleapis.com"]
            })

        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options = "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query = "select curl(" + url + ", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.google_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.google_error_msg))

        self.rest.create_whitelist(
            self.master, {
                "all_access":
                False,
                "allowed_urls":
                ["https://maps.googleapis.com/maps/api/geocode/json"],
                "disallowed_urls":
                ["https://maps.googleapis.com/maps/api/geocode/json"]
            })
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.google_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.google_error_msg))

    '''Test valid allowed with an invalid disallowed'''

    def test_allowed_invalid_disallowed(self):
        self.rest.create_whitelist(
            self.master, {
                "all_access": False,
                "allowed_urls": ["https://maps.googleapis.com"],
                "disallowed_urls": ["blahblahblah"]
            })

        curl_output = self.shell.execute_command(
            "%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9" %
            self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query = "select curl(" + url + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.jira_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.jira_error_msg))

        curl_output = self.shell.execute_command(
            "%s --get https://maps.googleapis.com/maps/api/geocode/json "
            "-d 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'"
            % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options = "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query = "select curl(" + url + ", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test a valid disallowed with an invalid allowed'''

    def test_disallowed_invalid_allowed(self):
        self.rest.create_whitelist(
            self.master, {
                "all_access": False,
                "allowed_urls": ["blahblahblah"],
                "disallowed_urls": ["https://maps.googleapis.com"]
            })
        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options = "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query = "select curl(" + url + ", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.google_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.google_error_msg))

        response, content = self.rest.create_whitelist(
            self.master, {
                "all_access": False,
                "allowed_urls": "blahblahblah",
                "disallowed_urls": ["https://maps.googleapis.com"]
            })
        result = json.loads(content)
        self.assertEqual(result['errors']['allowed_urls'],
                         "Invalid type: Must be a list of non-empty strings")
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(
            self.google_error_msg in actual_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (actual_curl['errors'][0]['msg'], self.google_error_msg))

    def test_invalid_disallowed_url_validation(self):
        response, content = self.rest.create_whitelist(
            self.master, {
                "all_access": False,
                "disallowed_urls": "blahblahblahblahblah"
            })
        result = json.loads(content)
        self.assertEqual(result['errors']['disallowed_urls'],
                         "Invalid type: Must be a list of non-empty strings")

    '''Should not be able to curl localhost even if you are on the localhost unless whitelisted'''

    def test_localhost(self):
        self.rest.create_whitelist(self.master, {"all_access": False})
        error_msg ="Errorevaluatingprojection.-cause:URLendpointisn'twhitelistedhttp://localhost:8093/query/service." \
                   "PleasemakesuretowhitelisttheURLontheUI."

        n1ql_query = 'select * from default limit 5'
        query = "select curl('http://localhost:8093/query/service', {'data' : 'statement=%s'," \
                "'user':'******'})" % (n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '',
                                                  '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(
            error_msg in json_curl['errors'][0]['msg'],
            "Error message is %s this is incorrect it should be %s" %
            (json_curl['errors'][0]['msg'], error_msg))
class QueryWhitelistTests(QueryTests):
    def setUp(self):
        super(QueryWhitelistTests, self).setUp()
        self.shell = RemoteMachineShellConnection(self.master)
        self.info = self.shell.extract_remote_info()
        if self.info.type.lower() == 'windows':
            self.curl_path = "%scurl" % self.path
            self.file_path = "Filec:\\ProgramFiles\\Couchbase\\Server\\bin\\..\\var\\lib\\couchbase\\n1qlcerts\\curl_whitelist"
            self.lowercase_file_path ="filec:\\ProgramFiles\\Couchbase\\Server\\bin\\..\\var\\lib\\couchbase\\n1qlcerts\\curl_whitelist"
        else:
            self.curl_path = "curl"
            self.file_path = "File/opt/couchbase/bin/../var/lib/couchbase/n1qlcerts/curl_whitelist"
            self.lowercase_file_path = "file/opt/couchbase/bin/../var/lib/couchbase/n1qlcerts/curl_whitelist"
        self.rest = RestConnection(self.master)
        self.cbqpath = '%scbq' % self.path + " -e %s:%s -q -u %s -p %s"\
                                             % (self.master.ip, self.n1ql_port, self.rest.username, self.rest.password)
        self.query_service_url = "'http://%s:%s/query/service'" % (self.master.ip,self.n1ql_port)
        self.api_port = self.input.param("api_port", 8094)
        self.load_sample = self.input.param("load_sample", False)
        self.create_users = self.input.param("create_users", False)
        self.full_access = self.input.param("full_access", True)
        self.run_cbq_query('delete from system:prepareds')

    def suite_setUp(self):
        super(QueryWhitelistTests, self).suite_setUp()
        # Create the users necessary for the RBAC tests in curl
        if self.create_users:
            testuser = [{'id': 'no_curl', 'name': 'no_curl', 'password': '******'},
                        {'id': 'curl', 'name': 'curl', 'password': '******'},
                        {'id': 'curl_no_insert', 'name': 'curl_no_insert', 'password': '******'}]
            RbacBase().create_user_source(testuser, 'builtin', self.master)

            noncurl_permissions = 'bucket_full_access[*]:query_select[*]:query_update[*]:' \
                                  'query_insert[*]:query_delete[*]:query_manage_index[*]:' \
                                  'query_system_catalog'
            curl_permissions = 'bucket_full_access[*]:query_select[*]:query_update[*]:' \
                               'query_insert[*]:query_delete[*]:query_manage_index[*]:' \
                               'query_system_catalog:query_external_access'
            # Assign user to role
            role_list = [{'id': 'no_curl', 'name': 'no_curl','roles': '%s' % noncurl_permissions},
                         {'id': 'curl', 'name': 'curl', 'roles': '%s' % curl_permissions}]
            temp = RbacBase().add_user_role(role_list, self.rest, 'builtin')

    def tearDown(self):
        super(QueryWhitelistTests, self).tearDown()

    def suite_tearDown(self):
        super(QueryWhitelistTests, self).suite_tearDown()

    '''Test running a curl command without a whitelist present'''
    def test_no_whitelist(self):
        # The query that curl will send to couchbase
        n1ql_query = 'select * from default limit 5'
        error_msg= "Errorevaluatingprojection.-cause:%s.jsondoesnotexistonnode" % (self.file_path)
        # This is the query that the cbq-engine will execute
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in json_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (json_curl['errors'][0]['msg'],error_msg))

    '''Test running a curl command with an empty whitelist'''
    def test_empty_whitelist(self):
        self.shell.create_whitelist(self.n1ql_certs_path,{})
        n1ql_query = 'select * from default limit 5'
        error_msg= "Errorevaluatingprojection.-cause:%s.jsoncontainsemptyJSONobjectonnode" % (self.file_path)

        # This is the query that the cbq-engine will execute
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in json_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (json_curl['errors'][0]['msg'],error_msg))

        error_msg = "Errorevaluatingprojection.-cause:all_accessshouldbebooleanvaluein%s.jsononnode"\
                    % (self.lowercase_file_path)

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": None, "allowed_urls": None,
                                                           "disallowed_urls": None})
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in json_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (json_curl['errors'][0]['msg'],error_msg))

    '''Test running a curl command with whitelists that are invalid'''
    def test_invalid_whitelist(self):
        self.shell.create_whitelist(self.n1ql_certs_path,"thisisnotvalid")
        n1ql_query = 'select * from default limit 5'
        error_msg= "Errorevaluatingprojection.-cause:%s.jsoncontainsinvalidJSONonnode" % \
                   (self.file_path)
        # This is the query that the cbq-engine will execute
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in json_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (json_curl['errors'][0]['msg'],error_msg))

        error_msg = "Errorevaluatingprojection.-cause:all_accessshouldbebooleanvaluein%s.jsononnode" \
                    % (self.lowercase_file_path)

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": "hello",
                                                           "allowed_urls": ["goodbye"],
                                                           "disallowed_urls": ["invalid"]})
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in json_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (json_curl['errors'][0]['msg'],error_msg))

    '''Test running a curl command with a whitelist that contains the field all_access: True and also
       inavlid/fake fields'''
    def test_basic_all_access_true(self):
        n1ql_query = 'select * from default limit 5'
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access":True})
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        json_curl = self.convert_to_json(curl)
        expected_result = self.run_cbq_query('select * from default limit 5')
        self.assertEqual(json_curl['results'][0]['$1']['results'],expected_result['results'])

        curl_output = self.shell.execute_command("%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9"
                                                 %self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": True,
                                                           "fake_field":"blahahahahaha",
                                                           "fake_url": "fake"})

        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'],expected_curl)

        self.shell.create_whitelist(self.n1ql_certs_path, {"fake_field":"blahahahahaha",
                                                           "all_access": True,
                                                           "fake_url": "fake"})

        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test all_access: True with nonsense in the allowed/disallowed fields as well as nothing
       in the allowed/disallowed fields'''
    def test_all_access_true(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": True,
                                                           "allowed_urls":["blahahahahaha"],
                                                           "disallowed_urls": ["fake"]})
        curl_output = self.shell.execute_command("%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9"
                                                 % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": True,
                                                           "allowed_urls": None,
                                                           "disallowed_urls": None})
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test what happens if you give an disallowed_url field as well as an all_access field, all_access
       should get precedence over disallowed_urls field'''
    def test_all_access_true_disallowed_url(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": True,
                                                           "disallowed_urls":
                                                               ["https://maps.googleapis.com"]})
        curl_output = self.shell.execute_command("%s --get https://maps.googleapis.com/maps/api/geocode/json "
                                                 "-d 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'"
                                                 % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options= "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query="select curl("+ url +", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test what happens if you give an allowed_url field as well as an all_access field, all_access
       should get precedence over allowed_urls field'''
    def test_all_access_true_allowed_url(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": True,
                                                           "allowed_urls":
                                                               ["https://maps.googleapis.com"]})
        curl_output = self.shell.execute_command("%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9"
                                                 %self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test what happens when you set the all_access field multiple times, or try and give it multiple
       values'''
    def test_multiple_all_access(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": True,
                                                           "all_access": False})
        error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                    "https://jira.atlassian.com/rest/api/latest/issue/JRA-9onnode" 

        curl_output = self.shell.execute_command("%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9"
                                                 %self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "all_access": True})
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        error_msg = "Errorevaluatingprojection.-cause:all_accessshouldbebooleanvaluein%s.jsononnode" \
                    % (self.lowercase_file_path)

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": [True,False]})
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

    '''Test to make sure that whitelist enforces that allowed_urls field must be given as a list'''
    def test_invalid_allowed_url(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "allowed_urls": "blahblahblah"})
        error_msg = "Errorevaluatingprojection.-cause:allowed_urlsshouldbelistofurlsin%s.jsononnode" \
                    % (self.lowercase_file_path)

        n1ql_query = 'select * from default limit 5'
        # This is the query that the cbq-engine will execute
        query = "select curl(" + self.query_service_url + \
                ", {'data' : 'statement=%s','user':'******'})" % (
                n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in json_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (json_curl['errors'][0]['msg'],error_msg))

    '''Test the allowed_urls field, try to run curl against an endpoint not in allowed_urls and then
       try to run curl against an endpoint in allowed_urls'''
    def test_allowed_url(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "allowed_urls":
                                                               ["https://maps.googleapis.com"]})
        error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                    "https://jira.atlassian.com/rest/api/latest/issue/JRA-9onnode"

        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

        curl_output = self.shell.execute_command("%s --get https://maps.googleapis.com/maps/api/geocode/json "
                                                 "-d 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'"
                                                 % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options= "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query="select curl("+ url +", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

    '''Test the allowed_urls field, try to run curl against an endpoint not in disallowed_urls and then
       try to run curl against an endpoint in disallowed_urls, both should fail'''
    def test_disallowed_url(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "disallowed_urls":
                                                               ["https://maps.googleapis.com"]})
        error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                    "https://jira.atlassian.com/rest/api/latest/issue/JRA-9onnode"

        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

        error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                    "https://maps.googleapis.com/maps/api/geocode/jsononnode"

        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options= "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query="select curl("+ url +", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

    '''Test that disallowed_urls field has precedence over allowed_urls'''
    def test_disallowed_precedence(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "allowed_urls":
                                                               ["https://maps.googleapis.com/maps/api/geocode/json"],
                                                           "disallowed_urls":
                                                               ["https://maps.googleapis.com"]})
        error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                    "https://maps.googleapis.com/maps/api/geocode/jsononnode"

        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options= "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query="select curl("+ url +", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "allowed_urls":
                                                               ["https://maps.googleapis.com/maps/api/geocode/json"],
                                                           "disallowed_urls":
                                                               ["https://maps.googleapis.com/maps/api/geocode/json"]})
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

    '''Test valid allowed with an invalid disallowed'''
    def test_allowed_invalid_disallowed(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "allowed_urls":
                                                               ["https://maps.googleapis.com"],
                                                           "disallowed_urls":["blahblahblah"]})
        error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                    "https://jira.atlassian.com/rest/api/latest/issue/JRA-9onnode"

        curl_output = self.shell.execute_command("%s https://jira.atlassian.com/rest/api/latest/issue/JRA-9"
                                                 %self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://jira.atlassian.com/rest/api/latest/issue/JRA-9'"
        query="select curl("+ url +")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

        curl_output = self.shell.execute_command("%s --get https://maps.googleapis.com/maps/api/geocode/json "
                                                 "-d 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'"
                                                 % self.curl_path)
        expected_curl = self.convert_list_to_json(curl_output[0])
        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options= "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query="select curl("+ url +", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertEqual(actual_curl['results'][0]['$1'], expected_curl)

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "allowed_urls":
                                                               ["https://maps.googleapis.com"],
                                                           "disallowed_urls":"blahblahblah"})
        error_msg = "Errorevaluatingprojection.-cause:disallowed_urlsshouldbelistofurlsin%s.jsononnode" \
                    % (self.lowercase_file_path)

        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

    '''Test a valid disallowed with an invalid allowed'''
    def test_disallowed_invalid_allowed(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "allowed_urls":
                                                               ["blahblahblah"],
                                                           "disallowed_urls":["https://maps.googleapis.com"]})

        error_msg = "Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                    "https://maps.googleapis.com/maps/api/geocode/jsononnode"

        url = "'https://maps.googleapis.com/maps/api/geocode/json'"
        options= "{'get':True,'data': 'address=santa+cruz&components=country:ES&key=AIzaSyCT6niGCMsgegJkQSYSqpoLZ4_rSO59XQQ'}"
        query="select curl("+ url +", %s" % options + ")"
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False,
                                                           "allowed_urls": "blahblahblah",
                                                           "disallowed_urls":["https://maps.googleapis.com"]})
        curl = self.shell.execute_commands_inside(self.cbqpath,query,'', '', '', '', '')
        actual_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in actual_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (actual_curl['errors'][0]['msg'],error_msg))

    '''Should not be able to curl localhost even if you are on the localhost unless whitelisted'''
    def test_localhost(self):
        self.shell.create_whitelist(self.n1ql_certs_path, {"all_access": False})
        error_msg ="Errorevaluatingprojection.-cause:URLendpointisn'twhitelisted" \
                   "http://localhost:8093/query/serviceonnode"

        n1ql_query = 'select * from default limit 5'
        query = "select curl('http://localhost:8093/query/service', {'data' : 'statement=%s'," \
                "'user':'******'})" % (n1ql_query, self.username, self.password)
        curl = self.shell.execute_commands_inside(self.cbqpath, query, '', '', '', '', '')
        json_curl = self.convert_to_json(curl)
        self.assertTrue(error_msg in json_curl['errors'][0]['msg'],
                        "Error message is %s this is incorrect it should be %s"
                        % (json_curl['errors'][0]['msg'],error_msg))
Exemple #46
0
class QueryTests(BaseTestCase):
    def setUp(self):
        if not self._testMethodName == 'suite_setUp':
            self.skip_buckets_handle = True
        super(QueryTests, self).setUp()
        self.version = self.input.param("cbq_version", "sherlock")
        if self.input.tuq_client and "client" in self.input.tuq_client:
            self.shell = RemoteMachineShellConnection(
                self.input.tuq_client["client"])
        else:
            self.shell = RemoteMachineShellConnection(self.master)
        if not self._testMethodName == 'suite_setUp' and self.input.param(
                "cbq_version", "sherlock") != 'sherlock':
            self._start_command_line_query(self.master)
        self.use_rest = self.input.param("use_rest", True)
        self.max_verify = self.input.param("max_verify", None)
        self.buckets = RestConnection(self.master).get_buckets()
        self.docs_per_day = self.input.param("doc-per-day", 49)
        self.item_flag = self.input.param("item_flag", 4042322160)
        self.n1ql_port = self.input.param("n1ql_port", 8093)
        self.analytics = self.input.param("analytics", False)
        self.dataset = self.input.param("dataset", "default")
        self.primary_indx_type = self.input.param("primary_indx_type", 'GSI')
        self.index_type = self.input.param("index_type", 'GSI')
        self.primary_indx_drop = self.input.param("primary_indx_drop", False)
        self.monitoring = self.input.param("monitoring", False)
        self.isprepared = False
        self.named_prepare = self.input.param("named_prepare", None)
        self.skip_primary_index = self.input.param("skip_primary_index", False)
        self.scan_consistency = self.input.param("scan_consistency",
                                                 'REQUEST_PLUS')
        shell = RemoteMachineShellConnection(self.master)
        type = shell.extract_remote_info().distribution_type
        self.path = testconstants.LINUX_COUCHBASE_BIN_PATH
        if type.lower() == 'windows':
            self.path = testconstants.WIN_COUCHBASE_BIN_PATH
        elif type.lower() == "mac":
            self.path = testconstants.MAC_COUCHBASE_BIN_PATH
        self.threadFailure = False
        if self.primary_indx_type.lower() == "gsi":
            self.gsi_type = self.input.param("gsi_type", 'plasma')
        else:
            self.gsi_type = None
        if self.input.param("reload_data", False):
            if self.analytics:
                self.cluster.rebalance([self.master, self.cbas_node], [],
                                       [self.cbas_node],
                                       services=['cbas'])
            for bucket in self.buckets:
                self.cluster.bucket_flush(self.master,
                                          bucket=bucket,
                                          timeout=self.wait_timeout * 5)
            # Adding sleep after flushing buckets (see CBQE-5838)
            self.sleep(210)
            self.gens_load = self.generate_docs(self.docs_per_day)
            self.load(self.gens_load, flag=self.item_flag)
            if self.analytics:
                self.cluster.rebalance([self.master, self.cbas_node],
                                       [self.cbas_node], [],
                                       services=['cbas'])
        self.gens_load = self.generate_docs(self.docs_per_day)
        if self.input.param("gomaxprocs", None):
            self.configure_gomaxprocs()
        self.gen_results = TuqGenerators(
            self.log, self.generate_full_docs_list(self.gens_load))
        if (self.analytics == False):
            self.create_primary_index_for_3_0_and_greater()
        if (self.analytics):
            self.setup_analytics()
            self.sleep(30, 'wait for analytics setup')

    def suite_setUp(self):
        try:
            self.load(self.gens_load, flag=self.item_flag)
            if not self.input.param("skip_build_tuq", True):
                self._build_tuq(self.master)
            self.skip_buckets_handle = True
            if (self.analytics):
                self.cluster.rebalance([self.master, self.cbas_node],
                                       [self.cbas_node], [],
                                       services=['cbas'])
                self.setup_analytics()
                self.sleep(30, 'wait for analytics setup')
        except:
            self.log.error('SUITE SETUP FAILED')
            self.tearDown()

    def tearDown(self):
        if self._testMethodName == 'suite_tearDown':
            self.skip_buckets_handle = False
        if self.analytics:
            bucket_username = "******"
            bucket_password = "******"
            data = 'use Default ;'
            for bucket in self.buckets:
                data += 'disconnect bucket {0} if connected;'.format(
                    bucket.name)
                data += 'drop dataset {0} if exists;'.format(bucket.name +
                                                             "_shadow")
                data += 'drop bucket {0} if exists;'.format(bucket.name)
            filename = "file.txt"
            f = open(filename, 'w')
            f.write(data)
            f.close()
            url = 'http://{0}:8095/analytics/service'.format(self.cbas_node.ip)
            cmd = 'curl -s --data pretty=true --data-urlencode "*****@*****.**" ' + url + " -u " + bucket_username + ":" + bucket_password
            os.system(cmd)
            os.remove(filename)
        super(QueryTests, self).tearDown()

    def suite_tearDown(self):
        if not self.input.param("skip_build_tuq", False):
            if hasattr(self, 'shell'):
                self.shell.execute_command("killall /tmp/tuq/cbq-engine")
                self.shell.execute_command("killall tuqtng")
                self.shell.disconnect()

##############################################################################################
#
#  Setup Helpers
##############################################################################################

    def setup_analytics(self):
        data = 'use Default;'
        bucket_username = "******"
        bucket_password = "******"
        for bucket in self.buckets:
            data += 'create bucket {0} with {{"bucket":"{0}","nodes":"{1}"}} ;'.format(
                bucket.name, self.master.ip)
            data += 'create shadow dataset {1} on {0}; '.format(
                bucket.name, bucket.name + "_shadow")
            data += 'connect bucket {0} with {{"username":"******","password":"******"}};'.format(
                bucket.name, bucket_username, bucket_password)
        filename = "file.txt"
        f = open(filename, 'w')
        f.write(data)
        f.close()
        url = 'http://{0}:8095/analytics/service'.format(self.cbas_node.ip)
        cmd = 'curl -s --data pretty=true --data-urlencode "*****@*****.**" ' + url + " -u " + bucket_username + ":" + bucket_password
        os.system(cmd)
        os.remove(filename)

    def run_active_requests(self, e, t):
        while not e.isSet():
            logging.debug('wait_for_event_timeout starting')
            event_is_set = e.wait(t)
            logging.debug('event set: %s', event_is_set)
            if event_is_set:
                result = self.run_cbq_query(
                    "select * from system:active_requests")
                self.assertTrue(result['metrics']['resultCount'] == 1)
                requestId = result['requestID']
                result = self.run_cbq_query(
                    'delete from system:active_requests where requestId  =  "%s"'
                    % requestId)
                time.sleep(20)
                result = self.run_cbq_query(
                    'select * from system:active_requests  where requestId  =  "%s"'
                    % requestId)
                self.assertTrue(result['metrics']['resultCount'] == 0)
                result = self.run_cbq_query(
                    "select * from system:completed_requests")
                requestId = result['requestID']
                result = self.run_cbq_query(
                    'delete from system:completed_requests where requestId  =  "%s"'
                    % requestId)
                time.sleep(10)
                result = self.run_cbq_query(
                    'select * from system:completed_requests where requestId  =  "%s"'
                    % requestId)
                self.assertTrue(result['metrics']['resultCount'] == 0)

##############################################################################################
#
#   COMMON FUNCTIONS
##############################################################################################

    def run_query_from_template(self, query_template):
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def run_query_with_subquery_select_from_template(self, query_template):
        subquery_template = re.sub(r'.*\$subquery\(', '', query_template)
        subquery_template = subquery_template[:subquery_template.rfind(')')]
        keys_num = int(
            re.sub(r'.*KEYS \$', '', subquery_template).replace('KEYS $', ''))
        subquery_full_list = self.generate_full_docs_list(
            gens_load=self.gens_load, keys=self._get_keys(keys_num))
        subquery_template = re.sub(r'USE KEYS.*', '', subquery_template)
        sub_results = TuqGenerators(self.log, subquery_full_list)
        self.query = sub_results.generate_query(subquery_template)
        expected_sub = sub_results.generate_expected_result()
        alias = re.sub(r',.*', '',
                       re.sub(r'.*\$subquery\(.*\)', '', query_template))
        alias = re.sub(r'.*as', '', re.sub(r'FROM.*', '', alias)).strip()
        if not alias:
            alias = '$1'
        for item in self.gen_results.full_set:
            item[alias] = expected_sub[0]
        query_template = re.sub(r',.*\$subquery\(.*\).*%s' % alias,
                                ',%s' % alias, query_template)
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def run_query_with_subquery_from_template(self, query_template):
        subquery_template = re.sub(r'.*\$subquery\(', '', query_template)
        subquery_template = subquery_template[:subquery_template.rfind(')')]
        subquery_full_list = self.generate_full_docs_list(
            gens_load=self.gens_load)
        sub_results = TuqGenerators(self.log, subquery_full_list)
        self.query = sub_results.generate_query(subquery_template)
        expected_sub = sub_results.generate_expected_result()
        alias = re.sub(r',.*', '',
                       re.sub(r'.*\$subquery\(.*\)', '', query_template))
        alias = re.sub(r'.*as ', '', alias).strip()
        self.gen_results = TuqGenerators(self.log, expected_sub)
        query_template = re.sub(r'\$subquery\(.*\).*%s' % alias, ' %s' % alias,
                                query_template)
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def negative_common_body(self, queries_errors={}):
        if not queries_errors:
            self.fail("No queries to run!")
        for bucket in self.buckets:
            for query_template, error in queries_errors.items():
                try:
                    query = self.gen_results.generate_query(query_template)
                    actual_result = self.run_cbq_query(
                        query.format(bucket.name))
                except CBQError as ex:
                    self.log.error(ex)
                    self.assertTrue(
                        str(ex).find(error) != -1,
                        "Error is incorrect.Actual %s.\n Expected: %s.\n" %
                        (str(ex).split(':')[-1], error))
                else:
                    self.fail("There were no errors. Error expected: %s" %
                              error)

    def run_cbq_query(self, query=None, min_output_size=10, server=None):
        if query is None:
            query = self.query
        if server is None:
            server = self.master
            if server.ip == "127.0.0.1":
                self.n1ql_port = server.n1ql_port
        else:
            if server.ip == "127.0.0.1":
                self.n1ql_port = server.n1ql_port
            if self.input.tuq_client and "client" in self.input.tuq_client:
                server = self.tuq_client
        query_params = {}
        cred_params = {'creds': []}
        rest = RestConnection(server)
        username = rest.username
        password = rest.password
        cred_params['creds'].append({'user': username, 'pass': password})
        query_params.update(cred_params)
        if self.use_rest:
            query_params.update({'scan_consistency': self.scan_consistency})
            self.log.info('RUN QUERY %s' % query)

            if self.analytics:
                query = query + ";"
                for bucket in self.buckets:
                    query = query.replace(bucket.name, bucket.name + "_shadow")
                result = RestConnection(
                    self.cbas_node).execute_statement_on_cbas(
                        query, "immediate")
                result = json.loads(result)

            else:
                result = rest.query_tool(query,
                                         self.n1ql_port,
                                         query_params=query_params)

        else:
            if self.version == "git_repo":
                output = self.shell.execute_commands_inside("$GOPATH/src/github.com/couchbase/query/" +\
                                                            "shell/cbq/cbq ", "", "", "", "", "", "")
            else:
                os = self.shell.extract_remote_info().type.lower()
                if not (self.isprepared):
                    query = query.replace('"', '\\"')
                    query = query.replace('`', '\\`')

                cmd = "%s/cbq  -engine=http://%s:%s/ -q -u %s -p %s" % (
                    self.path, server.ip, server.port, username, password)

                output = self.shell.execute_commands_inside(
                    cmd, query, "", "", "", "", "")
                if not (output[0] == '{'):
                    output1 = '{' + str(output)
                else:
                    output1 = output
                result = json.loads(output1)
        if isinstance(result, str) or 'errors' in result:
            raise CBQError(result, server.ip)
        self.log.info("TOTAL ELAPSED TIME: %s" %
                      result["metrics"]["elapsedTime"])
        return result

    def build_url(self, version):
        info = self.shell.extract_remote_info()
        type = info.distribution_type.lower()
        if type in ["ubuntu", "centos", "red hat"]:
            url = "https://s3.amazonaws.com/packages.couchbase.com/releases/couchbase-query/dp1/"
            url += "couchbase-query_%s_%s_linux.tar.gz" % (
                version, info.architecture_type)
        #TODO for windows
        return url

    def _build_tuq(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                goroot = testconstants.LINUX_GOROOT
                gopath = testconstants.LINUX_GOPATH
            else:
                goroot = testconstants.WINDOWS_GOROOT
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if self.input.tuq_client and "goroot" in self.input.tuq_client:
                goroot = self.input.tuq_client["goroot"]
            cmd = "rm -rf {0}/src/github.com".format(gopath)
            self.shell.execute_command(cmd)
            cmd= 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'go get github.com/couchbaselabs/tuqtng;' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; ' +\
                'go get -d -v ./...; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; go build; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng/tuq_client; go build; cd .'
            self.shell.execute_command(cmd)
        else:
            cbq_url = self.build_url(self.version)
            #TODO for windows
            cmd = "cd /tmp; mkdir tuq;cd tuq; wget {0} -O tuq.tar.gz;".format(
                cbq_url)
            cmd += "tar -xvf tuq.tar.gz;rm -rf tuq.tar.gz"
            self.shell.execute_command(cmd)

    def _start_command_line_query(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                gopath = testconstants.LINUX_GOPATH
            else:
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if os == 'windows':
                cmd = "cd %s/src/github.com/couchbase/query/server/main; " % (gopath) +\
                "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s/src/github.com/couchbase/query//server/main; " % (gopath) +\
                "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" %(
                                                                server.ip, server.port)
            self.shell.execute_command(cmd)
        elif self.version == "sherlock":
            if self.services_init.find('n1ql') != -1:
                return
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                couchbase_path = testconstants.LINUX_COUCHBASE_BIN_PATH
            else:
                couchbase_path = testconstants.WIN_COUCHBASE_BIN_PATH
            if self.input.tuq_client and "sherlock_path" in self.input.tuq_client:
                couchbase_path = "%s/bin" % self.input.tuq_client[
                    "sherlock_path"]
            if os == 'windows':
                cmd = "cd %s; " % (couchbase_path) +\
                "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s; " % (couchbase_path) +\
                "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" %(
                                                                server.ip, server.port)
                n1ql_port = self.input.param("n1ql_port", None)
                if server.ip == "127.0.0.1" and server.n1ql_port:
                    n1ql_port = server.n1ql_port
                if n1ql_port:
                    cmd = "cd %s; " % (couchbase_path) +\
                './cbq-engine -datastore http://%s:%s/ -http=":%s">n1ql.log 2>&1 &' %(
                                                                server.ip, server.port, n1ql_port)
            self.shell.execute_command(cmd)
        else:
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                cmd = "cd /tmp/tuq;./cbq-engine -couchbase http://%s:%s/ >/dev/null 2>&1 &" % (
                    server.ip, server.port)
            else:
                cmd = "cd /cygdrive/c/tuq;./cbq-engine.exe -couchbase http://%s:%s/ >/dev/null 2>&1 &" % (
                    server.ip, server.port)
            self.shell.execute_command(cmd)

    def _parse_query_output(self, output):
        if output.find("cbq>") == 0:
            output = output[output.find("cbq>") + 4:].strip()
        if output.find("tuq_client>") == 0:
            output = output[output.find("tuq_client>") + 11:].strip()
        if output.find("cbq>") != -1:
            output = output[:output.find("cbq>")].strip()
        if output.find("tuq_client>") != -1:
            output = output[:output.find("tuq_client>")].strip()
        return json.loads(output)

    def generate_docs(self, num_items, start=0):
        try:
            return getattr(self, 'generate_docs_' + self.dataset)(num_items,
                                                                  start)
        except:
            self.fail("There is no dataset %s, please enter a valid one" %
                      self.dataset)

    def generate_docs_default(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee(docs_per_day, start)

    def generate_docs_sabre(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_sabre(docs_per_day, start)

    def generate_docs_employee(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_data(
            docs_per_day=docs_per_day, start=start)

    def generate_docs_simple(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_simple_data(
            docs_per_day=docs_per_day, start=start)

    def generate_docs_sales(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_sales_data(
            docs_per_day=docs_per_day, start=start)

    def generate_docs_bigdata(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_bigdata(end=(1000 * docs_per_day),
                                                    start=start,
                                                    value_size=self.value_size)

    def _verify_results(self,
                        actual_result,
                        expected_result,
                        missing_count=1,
                        extra_count=1):
        if len(actual_result) != len(expected_result):
            missing, extra = self.check_missing_and_extra(
                actual_result, expected_result)
            self.log.error("Missing items: %s.\n Extra items: %s" %
                           (missing[:missing_count], extra[:extra_count]))
            self.fail(
                "Results are incorrect.Actual num %s. Expected num: %s.\n" %
                (len(actual_result), len(expected_result)))
        if self.max_verify is not None:
            actual_result = actual_result[:self.max_verify]
            expected_result = expected_result[:self.max_verify]

        msg = "Results are incorrect.\n Actual first and last 100:  %s.\n ... \n %s" +\
        "Expected first and last 100: %s.\n  ... \n %s"
        self.assertTrue(
            actual_result == expected_result,
            msg % (actual_result[:100], actual_result[-100:],
                   expected_result[:100], expected_result[-100:]))

    def check_missing_and_extra(self, actual, expected):
        missing = []
        extra = []
        for item in actual:
            if not (item in expected):
                extra.append(item)
        for item in expected:
            if not (item in actual):
                missing.append(item)
        return missing, extra

    def sort_nested_list(self, result):
        actual_result = []
        for item in result:
            curr_item = {}
            for key, value in item.items():
                if isinstance(value, list) or isinstance(value, set):
                    curr_item[key] = sorted(value)
                else:
                    curr_item[key] = value
            actual_result.append(curr_item)
        return actual_result

    def configure_gomaxprocs(self):
        max_proc = self.input.param("gomaxprocs", None)
        cmd = "export GOMAXPROCS=%s" % max_proc
        for server in self.servers:
            shell_connection = RemoteMachineShellConnection(self.master)
            shell_connection.execute_command(cmd)

    def create_primary_index_for_3_0_and_greater(self):
        self.log.info("CREATE PRIMARY INDEX using %s" % self.primary_indx_type)
        rest = RestConnection(self.master)
        versions = rest.get_nodes_versions()
        if versions[0].startswith("4") or versions[0].startswith(
                "3") or versions[0].startswith("5"):
            for bucket in self.buckets:
                if self.primary_indx_drop:
                    self.log.info(
                        "Dropping primary index for %s using %s ..." %
                        (bucket.name, self.primary_indx_type))
                    self.query = "DROP PRIMARY INDEX ON %s USING %s" % (
                        bucket.name, self.primary_indx_type)
                    #self.run_cbq_query()
                    self.sleep(3, 'Sleep for some time after index drop')
                self.query = 'select * from system:indexes where name="#primary" and keyspace_id = "%s"' % bucket.name
                res = self.run_cbq_query()
                self.sleep(10)
                if self.monitoring:
                    self.query = "delete from system:completed_requests"
                    self.run_cbq_query()
                if not self.skip_primary_index:
                    if (res['metrics']['resultCount'] == 0):
                        self.query = "CREATE PRIMARY INDEX ON %s USING %s" % (
                            bucket.name, self.primary_indx_type)
                        self.log.info("Creating primary index for %s ..." %
                                      bucket.name)
                        try:
                            self.run_cbq_query()
                            self.primary_index_created = True
                            if self.primary_indx_type.lower() == 'gsi':
                                self._wait_for_index_online(bucket, '#primary')
                        except Exception as ex:
                            self.log.info(str(ex))

    def _wait_for_index_online(self, bucket, index_name, timeout=6000):
        end_time = time.time() + timeout
        while time.time() < end_time:
            query = "SELECT * FROM system:indexes where name='%s'" % index_name
            res = self.run_cbq_query(query)
            for item in res['results']:
                if 'keyspace_id' not in item['indexes']:
                    self.log.error(item)
                    continue
                if item['indexes']['keyspace_id'] == bucket.name:
                    if item['indexes']['state'] == "online":
                        return
            self.sleep(
                5, 'index is pending or not in the list. sleeping... (%s)' %
                [item['indexes'] for item in res['results']])
        raise Exception('index %s is not online. last response is %s' %
                        (index_name, res))

    def _get_keys(self, key_num):
        keys = []
        for gen in self.gens_load:
            gen_copy = copy.deepcopy(gen)
            for i in range(gen_copy.end):
                key, _ = next(gen_copy)
                keys.append(key)
                if len(keys) == key_num:
                    return keys
        return keys
Exemple #47
0
class QueryTests(BaseTestCase):
    def setUp(self):
        if not self._testMethodName == "suite_setUp":
            self.skip_buckets_handle = True
        super(QueryTests, self).setUp()
        self.version = self.input.param("cbq_version", "sherlock")
        if self.input.tuq_client and "client" in self.input.tuq_client:
            self.shell = RemoteMachineShellConnection(self.input.tuq_client["client"])
        else:
            self.shell = RemoteMachineShellConnection(self.master)
        if not self._testMethodName == "suite_setUp" and self.input.param("cbq_version", "sherlock") != "sherlock":
            self._start_command_line_query(self.master)
        self.use_rest = self.input.param("use_rest", True)
        self.max_verify = self.input.param("max_verify", None)
        self.buckets = RestConnection(self.master).get_buckets()
        self.docs_per_day = self.input.param("doc-per-day", 49)
        self.item_flag = self.input.param("item_flag", 4042322160)
        self.n1ql_port = self.input.param("n1ql_port", 8093)
        self.analytics = self.input.param("analytics", False)
        self.dataset = self.input.param("dataset", "default")
        self.primary_indx_type = self.input.param("primary_indx_type", "GSI")
        self.index_type = self.input.param("index_type", "GSI")
        self.primary_indx_drop = self.input.param("primary_indx_drop", False)
        self.monitoring = self.input.param("monitoring", False)
        self.isprepared = False
        self.skip_primary_index = self.input.param("skip_primary_index", False)
        self.scan_consistency = self.input.param("scan_consistency", "REQUEST_PLUS")
        if self.primary_indx_type.lower() == "gsi":
            self.gsi_type = self.input.param("gsi_type", None)
        else:
            self.gsi_type = None
        if self.input.param("reload_data", False):
            for bucket in self.buckets:
                self.cluster.bucket_flush(self.master, bucket=bucket, timeout=self.wait_timeout * 5)
            self.gens_load = self.generate_docs(self.docs_per_day)
            self.load(self.gens_load, flag=self.item_flag)
        self.gens_load = self.generate_docs(self.docs_per_day)
        if self.input.param("gomaxprocs", None):
            self.configure_gomaxprocs()
        self.gen_results = TuqGenerators(self.log, self.generate_full_docs_list(self.gens_load))
        if self.analytics == False:
            self.create_primary_index_for_3_0_and_greater()
        if self.analytics:
            self.setup_analytics()
            self.sleep(30, "wait for analytics setup")

    def suite_setUp(self):
        try:
            self.load(self.gens_load, flag=self.item_flag)
            if not self.input.param("skip_build_tuq", True):
                self._build_tuq(self.master)
            self.skip_buckets_handle = True
        except:
            self.log.error("SUITE SETUP FAILED")
            self.tearDown()

    def tearDown(self):
        if self._testMethodName == "suite_tearDown":
            self.skip_buckets_handle = False
        if self.analytics:
            data = "use Default ;" + "\n"
            for bucket in self.buckets:
                data += "disconnect bucket {0} if connected;".format(bucket.name) + "\n"
                data += "drop dataset {0} if exists;".format(bucket.name + "_shadow") + "\n"
                data += "drop bucket {0} if exists;".format(bucket.name) + "\n"
            filename = "file.txt"
            f = open(filename, "w")
            f.write(data)
            f.close()
            url = "http://{0}:8095/analytics/service".format(self.master.ip)
            cmd = 'curl -s --data pretty=true --data-urlencode "*****@*****.**" ' + url
            os.system(cmd)
            os.remove(filename)
        super(QueryTests, self).tearDown()

    def suite_tearDown(self):
        if not self.input.param("skip_build_tuq", False):
            if hasattr(self, "shell"):
                self.shell.execute_command("killall /tmp/tuq/cbq-engine")
                self.shell.execute_command("killall tuqtng")
                self.shell.disconnect()

    def setup_analytics(self):
        # data = ""
        # for bucket in self.buckets:
        #         data += 'disconnect bucket {0} ;'.format(bucket.name) + "\n"
        #         data += 'connect bucket {0};'.format(bucket.name) + "\n"
        # filename = "file.txt"
        # f = open(filename,'w')
        # f.write(data)
        # f.close()
        # url = 'http://{0}:8095/analytics/service'.format(self.master.ip)
        # cmd = 'curl -s --data pretty=true --data-urlencode "*****@*****.**" ' + url
        # os.system(cmd)
        # os.remove(filename)
        data = "use Default;" + "\n"
        for bucket in self.buckets:
            data += (
                'create bucket {0} with {{"bucket":"{0}","nodes":"{1}"}} ;'.format(bucket.name, self.master.ip) + "\n"
            )
            data += "create shadow dataset {1} on {0}; ".format(bucket.name, bucket.name + "_shadow") + "\n"
            data += "connect bucket {0} ;".format(bucket.name) + "\n"
        filename = "file.txt"
        f = open(filename, "w")
        f.write(data)
        f.close()
        url = "http://{0}:8095/analytics/service".format(self.master.ip)
        cmd = 'curl -s --data pretty=true --data-urlencode "*****@*****.**" ' + url
        os.system(cmd)
        os.remove(filename)

    ##############################################################################################
    #
    #   SIMPLE CHECKS
    ##############################################################################################
    def test_simple_check(self):
        for bucket in self.buckets:
            if self.monitoring:
                e = threading.Event()
                t1 = threading.Thread(name="run_simple", target=self.run_active_requests, args=(e, 2))
                t1.start()

            query = "select * from %s" % (bucket.name)
            self.run_cbq_query(query)
            logging.debug("event is set")
            if self.monitoring:
                e.set()
                t1.join(100)
            query_template = "FROM %s select $str0, $str1 ORDER BY $str0,$str1 ASC" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_joins_monitoring(self):
        for bucket in self.buckets:
            e = threading.Event()
            if self.monitoring:
                e = threading.Event()
                t2 = threading.Thread(name="run_joins", target=self.run_active_requests, args=(e, 2))
                t2.start()
            query = (
                "select * from %s b1 inner join %s b2 on keys b1.CurrencyCode inner join %s b3 on keys b1.CurrencyCode left outer join %s b4 on keys b1.CurrencyCode"
                % (bucket.name, bucket.name, bucket.name, bucket.name)
            )
            actual_result = self.run_cbq_query(query)
            logging.debug("event is set")
            if self.monitoring:
                e.set()
                t2.join(100)

    def run_active_requests(self, e, t):
        while not e.isSet():
            logging.debug("wait_for_event_timeout starting")
            event_is_set = e.wait(t)
            logging.debug("event set: %s", event_is_set)
            if event_is_set:
                result = self.run_cbq_query("select * from system:active_requests")
                print result
                self.assertTrue(result["metrics"]["resultCount"] == 1)
                requestId = result["requestID"]
                result = self.run_cbq_query('delete from system:active_requests where RequestId  =  "%s"' % requestId)
                time.sleep(20)
                result = self.run_cbq_query(
                    'select * from system:active_requests  where RequestId  =  "%s"' % requestId
                )
                self.assertTrue(result["metrics"]["resultCount"] == 0)
                result = self.run_cbq_query("select * from system:completed_requests")
                print result
                requestId = result["requestID"]
                result = self.run_cbq_query(
                    'delete from system:completed_requests where RequestId  =  "%s"' % requestId
                )
                time.sleep(10)
                result = self.run_cbq_query(
                    'select * from system:completed_requests where RequestId  =  "%s"' % requestId
                )
                print result
                self.assertTrue(result["metrics"]["resultCount"] == 0)

    def test_simple_negative_check(self):
        queries_errors = {
            "SELECT $str0 FROM {0} WHERE COUNT({0}.$str0)>3": "Aggregates not allowed in WHERE",
            "SELECT *.$str0 FROM {0}": "syntax error",
            "SELECT *.* FROM {0} ... ERROR": "syntax error",
            "FROM %s SELECT $str0 WHERE id=null": "syntax error",
        }
        self.negative_common_body(queries_errors)

    def test_unnest(self):
        for bucket in self.buckets:
            query_template = "SELECT emp.$int0, task FROM %s emp UNNEST emp.$nested_list_3l0 task" % bucket.name
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(sorted(actual_result["results"]), sorted(expected_result))

    def test_subquery_select(self):
        for bucket in self.buckets:
            self.query = "SELECT $str0, $subquery(SELECT COUNT($str0) cn FROM %s d USE KEYS $5) as names FROM %s" % (
                bucket.name,
                bucket.name,
            )
            actual_result, expected_result = self.run_query_with_subquery_select_from_template(self.query)
            self._verify_results(actual_result["results"], expected_result)

    def test_subquery_from(self):
        for bucket in self.buckets:
            self.query = "SELECT tasks.$str0 FROM $subquery(SELECT $str0, $int0 FROM %s) as tasks" % (bucket.name)
            actual_result, expected_result = self.run_query_with_subquery_from_template(self.query)
            self._verify_results(actual_result["results"], expected_result)

    def test_consistent_simple_check(self):
        queries = [
            self.gen_results.generate_query(
                "SELECT $str0, $int0, $int1 FROM %s "
                + "WHERE $str0 IS NOT NULL AND $int0<10 "
                + "OR $int1 = 6 ORDER BY $int0, $int1"
            ),
            self.gen_results.generate_query(
                "SELECT $str0, $int0, $int1 FROM %s "
                + "WHERE $int1 = 6 OR $str0 IS NOT NULL AND "
                + "$int0<10 ORDER BY $int0, $int1"
            ),
        ]
        for bucket in self.buckets:
            actual_result1 = self.run_cbq_query(queries[0] % bucket.name)
            actual_result2 = self.run_cbq_query(queries[1] % bucket.name)
            self.assertTrue(
                actual_result1["results"] == actual_result2["results"],
                "Results are inconsistent.Difference: %s %s %s %s"
                % (
                    len(actual_result1["results"]),
                    len(actual_result2["results"]),
                    actual_result1["results"][:100],
                    actual_result2["results"][:100],
                ),
            )

    def test_simple_nulls(self):
        queries = ['SELECT id FROM %s WHERE id=NULL or id="null"']
        for bucket in self.buckets:
            if self.monitoring:
                e = threading.Event()
                t3 = threading.Thread(name="run_simple_nulls", target=self.run_active_requests, args=(e, 2))
                t3.start()
            for query in queries:
                actual_result = self.run_cbq_query(query % (bucket.name))
                logging.debug("event is set")
                if self.monitoring:
                    e.set()
                    t3.join(100)
                self._verify_results(actual_result["results"], [])

    ##############################################################################################
    #
    #   LIMIT OFFSET CHECKS
    ##############################################################################################

    def test_limit_negative(self):
        # queries_errors = {'SELECT * FROM default LIMIT {0}' : ('Invalid LIMIT value 2.5', 5030)}
        queries_errors = {"SELECT ALL * FROM %s": ("syntax error", 3000)}
        self.negative_common_body(queries_errors)

    def test_limit_offset(self):
        for bucket in self.buckets:
            if self.monitoring:
                e = threading.Event()
                t4 = threading.Thread(name="run_limit_offset", target=self.run_active_requests, args=(e, 2))
                t4.start()
            query_template = "SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            if self.monitoring:
                e.set()
                t4.join(100)
            self._verify_results(actual_result["results"], expected_result)
            query_template = "SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10 OFFSET 10" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)

    def test_limit_offset_zero(self):
        for bucket in self.buckets:
            query_template = "SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 0" % (bucket.name)
            self.query = self.gen_results.generate_query(query_template)
            actual_result = self.run_cbq_query()
            self.assertEquals(
                actual_result["results"],
                [],
                "Results are incorrect.Actual %s.\n Expected: %s.\n" % (actual_result["results"], []),
            )
            query_template = "SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10 OFFSET 0" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self.assertEquals(
                actual_result["results"],
                expected_result,
                "Results are incorrect.Actual %s.\n Expected: %s.\n" % (actual_result["results"], expected_result),
            )

    def test_limit_offset_negative_check(self):
        queries_errors = {
            "SELECT DISTINCT $str0 FROM {0} LIMIT 1.1": "Invalid LIMIT value 1.1",
            "SELECT DISTINCT $str0 FROM {0} OFFSET 1.1": "Invalid OFFSET value 1.1",
        }
        self.negative_common_body(queries_errors)

    def test_limit_offset_sp_char_check(self):
        queries_errors = {
            "SELECT DISTINCT $str0 FROM {0} LIMIT ~": "syntax erro",
            "SELECT DISTINCT $str0 FROM {0} OFFSET ~": "syntax erro",
        }
        self.negative_common_body(queries_errors)

    ##############################################################################################
    #
    #   ALIAS CHECKS
    ##############################################################################################

    def test_simple_alias(self):
        for bucket in self.buckets:
            if self.monitoring:
                e = threading.Event()
                t5 = threading.Thread(name="run_limit_offset", target=self.run_active_requests, args=(e, 2))
                t5.start()
            query_template = "SELECT COUNT($str0) AS COUNT_EMPLOYEE FROM %s" % (bucket.name)
            if self.analytics:
                query_template = "SELECT COUNT(`$str0`) AS COUNT_EMPLOYEE FROM %s" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self.assertEquals(
                actual_result["results"],
                expected_result,
                "Results are incorrect.Actual %s.\n Expected: %s.\n" % (actual_result["results"], expected_result),
            )

            query_template = "SELECT COUNT(*) + 1 AS COUNT_EMPLOYEE FROM %s" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            if self.monitoring:
                e.set()
                t5.join(100)
            expected_result = [{"COUNT_EMPLOYEE": expected_result[0]["COUNT_EMPLOYEE"] + 1}]
            self.assertEquals(
                actual_result["results"],
                expected_result,
                "Results are incorrect.Actual %s.\n Expected: %s.\n" % (actual_result["results"], expected_result),
            )

    def test_simple_negative_alias(self):
        queries_errors = {
            "SELECT $str0._last_name as *": "syntax error",
            "SELECT $str0._last_name as DATABASE ?": "syntax error",
            "SELECT $str0 AS NULL FROM {0}": "syntax error",
            "SELECT $str1 as $str0, $str0 FROM {0}": "Duplicate result alias name",
            "SELECT test.$obj0 as points FROM {0} AS TEST " + "GROUP BY $obj0 AS GROUP_POINT": "syntax error",
        }
        self.negative_common_body(queries_errors)

    def test_alias_from_clause(self):
        queries_templates = [
            "SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ORDER BY points",
            "SELECT $obj0.$_obj0_int0 AS points FROM %s AS test WHERE test.$int0 >0" + " ORDER BY points",
            "SELECT $obj0.$_obj0_int0 AS points FROM %s AS test " + "GROUP BY test.$obj0.$_obj0_int0 ORDER BY points",
        ]
        # if self.analytics:
        #      queries_templates = ['SELECT test.$obj0.$_obj0_int0 AS points FROM %s AS test ORDER BY test.points',
        #            'SELECT test.$obj0.$_obj0_int0 AS points FROM %s AS test WHERE test.$int0 >0'  +\
        #            ' ORDER BY test.points',
        #            'SELECT test.$obj0.$_obj0_int0 AS points FROM %s AS test ' +\
        #            'GROUP BY test.$obj0.$_obj0_int0 ORDER BY test.points']
        for bucket in self.buckets:
            if self.monitoring:
                e = threading.Event()
                t6 = threading.Thread(name="run_limit_offset", target=self.run_active_requests, args=(e, 2))
                t6.start()
            for query_template in queries_templates:
                actual_result, expected_result = self.run_query_from_template(query_template % (bucket.name))
                if self.monitoring:
                    e.set()
                    t6.join(100)
                self._verify_results(actual_result["results"], expected_result)

    def test_alias_from_clause_group(self):
        for bucket in self.buckets:
            query_template = (
                "SELECT $obj0.$_obj0_int0 AS points FROM %s AS test " % (bucket.name)
                + "GROUP BY $obj0.$_obj0_int0 ORDER BY points"
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_alias_order_desc(self):
        for bucket in self.buckets:
            if self.monitoring:
                e = threading.Event()
                t7 = threading.Thread(name="run_limit_offset", target=self.run_active_requests, args=(e, 2))
                t7.start()
            query_template = "SELECT $str0 AS name_new FROM %s AS test ORDER BY name_new DESC" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            if self.monitoring:
                e.set()
                t7.join(100)
            self._verify_results(actual_result["results"], expected_result)

    def test_alias_order_asc(self):
        for bucket in self.buckets:
            query_template = "SELECT $str0 AS name_new FROM %s AS test ORDER BY name_new ASC" % (bucket.name)
            if self.analytics:
                query_template = "SELECT `$str0` AS name_new FROM %s AS test ORDER BY name_new ASC" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_alias_aggr_fn(self):
        for bucket in self.buckets:
            if self.monitoring:
                e = threading.Event()
                t8 = threading.Thread(name="run_limit_offset", target=self.run_active_requests, args=(e, 2))
                t8.start()
            query_template = "SELECT COUNT(TEST.$str0) from %s AS TEST" % (bucket.name)
            if self.analytics:
                query_template = "SELECT COUNT(TEST.`$str0`) from %s AS TEST" % (bucket.name)

            actual_result, expected_result = self.run_query_from_template(query_template)
            if self.monitoring:
                e.set()
                t8.join(100)
            self._verify_results(actual_result["results"], expected_result)

    def test_alias_unnest(self):
        for bucket in self.buckets:
            query_template = "SELECT count(skill) FROM %s AS emp UNNEST emp.$list_str0 AS skill" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

            query_template = "SELECT count(skill) FROM %s AS emp UNNEST emp.$list_str0 skill" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    ##############################################################################################
    #
    #   ORDER BY CHECKS
    ##############################################################################################

    def test_order_by_check(self):
        for bucket in self.buckets:
            query_template = (
                "SELECT $str0, $str1, $obj0.$_obj0_int0 points FROM %s" % (bucket.name)
                + " ORDER BY $str1, $str0, $obj0.$_obj0_int0"
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)
            query_template = "SELECT $str0, $str1 FROM %s" % (bucket.name) + " ORDER BY $obj0.$_obj0_int0, $str0, $str1"
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_order_by_alias(self):
        for bucket in self.buckets:
            query_template = (
                "SELECT $str1, $obj0 AS points FROM %s" % (bucket.name) + " AS test ORDER BY $str1 DESC, points DESC"
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_order_by_alias_arrays(self):
        for bucket in self.buckets:
            query_template = (
                "SELECT $str1, $obj0, $list_str0[0] AS SKILL FROM %s" % (bucket.name)
                + " AS TEST ORDER BY SKILL, $str1, TEST.$obj0"
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_order_by_alias_aggr_fn(self):
        for bucket in self.buckets:
            query_template = (
                "SELECT $int0, $int1, count(*) AS emp_per_month from %s" % (bucket.name)
                + " WHERE $int1 >7 GROUP BY $int0, $int1 ORDER BY emp_per_month, $int1, $int0"
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_order_by_aggr_fn(self):
        for bucket in self.buckets:
            query_template = (
                "SELECT $str1 AS TITLE, min($int1) day FROM %s GROUP" % (bucket.name)
                + " BY $str1 ORDER BY MIN($int1), $str1"
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

            if self.analytics:
                self.query = (
                    "SELECT d.email AS TITLE, min(d.join_day) day FROM %s d GROUP" % (bucket.name)
                    + " BY d.$str1 ORDER BY MIN(d.join_day), d.$str1"
                )
                actual_result1 = self.run_cbq_query()
                self._verify_results(actual_result1["results"], actual_result["results"])

    def test_order_by_precedence(self):
        for bucket in self.buckets:
            query_template = "SELECT $str0, $str1 FROM %s" % (bucket.name) + " ORDER BY $str0, $str1"
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

            query_template = "SELECT $str0, $str1 FROM %s" % (bucket.name) + " ORDER BY $str1, $str0"
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_order_by_satisfy(self):
        for bucket in self.buckets:
            query_template = (
                "SELECT $str0, $list_obj0 FROM %s AS employee " % (bucket.name)
                + "WHERE ANY vm IN employee.$list_obj0 SATISFIES vm.$_list_obj0_int0 > 5 AND"
                + ' vm.$_list_obj0_str0 = "ubuntu" END ORDER BY $str0, $list_obj0[0].$_list_obj0_int0'
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    ##############################################################################################
    #
    #   DISTINCT
    ##############################################################################################

    def test_distinct(self):
        for bucket in self.buckets:
            query_template = "SELECT DISTINCT $str1 FROM %s ORDER BY $str1" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_distinct_nested(self):
        for bucket in self.buckets:
            query_template = (
                "SELECT DISTINCT $obj0.$_obj0_int0 as VAR FROM %s " % (bucket.name) + "ORDER BY $obj0.$_obj0_int0"
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

            query_template = "SELECT DISTINCT $list_str0[0] as skill" + " FROM %s ORDER BY $list_str0[0]" % (
                bucket.name
            )
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

            self.query = "SELECT DISTINCT $obj0.* FROM %s" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    ##############################################################################################
    #
    #   COMPLEX PATHS
    ##############################################################################################

    def test_simple_complex_paths(self):
        for bucket in self.buckets:
            query_template = "SELECT $_obj0_int0 FROM %s.$obj0" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_alias_complex_paths(self):
        for bucket in self.buckets:
            query_template = "SELECT $_obj0_int0 as new_attribute FROM %s.$obj0" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    def test_where_complex_paths(self):
        for bucket in self.buckets:
            query_template = "SELECT $_obj0_int0 FROM %s.$obj0 WHERE $_obj0_int0 = 1" % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result["results"], expected_result)

    ##############################################################################################
    #
    #   COMMON FUNCTIONS
    ##############################################################################################

    def run_query_from_template(self, query_template):
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def run_query_with_subquery_select_from_template(self, query_template):
        subquery_template = re.sub(r".*\$subquery\(", "", query_template)
        subquery_template = subquery_template[: subquery_template.rfind(")")]
        keys_num = int(re.sub(r".*KEYS \$", "", subquery_template).replace("KEYS $", ""))
        subquery_full_list = self.generate_full_docs_list(gens_load=self.gens_load, keys=self._get_keys(keys_num))
        subquery_template = re.sub(r"USE KEYS.*", "", subquery_template)
        sub_results = TuqGenerators(self.log, subquery_full_list)
        self.query = sub_results.generate_query(subquery_template)
        expected_sub = sub_results.generate_expected_result()
        alias = re.sub(r",.*", "", re.sub(r".*\$subquery\(.*\)", "", query_template))
        alias = re.sub(r".*as", "", re.sub(r"FROM.*", "", alias)).strip()
        if not alias:
            alias = "$1"
        for item in self.gen_results.full_set:
            item[alias] = expected_sub[0]
        query_template = re.sub(r",.*\$subquery\(.*\).*%s" % alias, ",%s" % alias, query_template)
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def run_query_with_subquery_from_template(self, query_template):
        subquery_template = re.sub(r".*\$subquery\(", "", query_template)
        subquery_template = subquery_template[: subquery_template.rfind(")")]
        subquery_full_list = self.generate_full_docs_list(gens_load=self.gens_load)
        sub_results = TuqGenerators(self.log, subquery_full_list)
        self.query = sub_results.generate_query(subquery_template)
        expected_sub = sub_results.generate_expected_result()
        alias = re.sub(r",.*", "", re.sub(r".*\$subquery\(.*\)", "", query_template))
        alias = re.sub(r".*as ", "", alias).strip()
        self.gen_results = TuqGenerators(self.log, expected_sub)
        query_template = re.sub(r"\$subquery\(.*\).*%s" % alias, " %s" % alias, query_template)
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def negative_common_body(self, queries_errors={}):
        if not queries_errors:
            self.fail("No queries to run!")
        for bucket in self.buckets:
            for query_template, error in queries_errors.iteritems():
                try:
                    query = self.gen_results.generate_query(query_template)
                    actual_result = self.run_cbq_query(query.format(bucket.name))
                except CBQError as ex:
                    self.log.error(ex)
                    self.assertTrue(
                        str(ex).find(error) != -1,
                        "Error is incorrect.Actual %s.\n Expected: %s.\n" % (str(ex).split(":")[-1], error),
                    )
                else:
                    self.fail("There were no errors. Error expected: %s" % error)

    def run_cbq_query(self, query=None, min_output_size=10, server=None):
        if query is None:
            query = self.query
        if server is None:
            server = self.master
            if server.ip == "127.0.0.1":
                self.n1ql_port = server.n1ql_port
        else:
            if server.ip == "127.0.0.1":
                self.n1ql_port = server.n1ql_port
            if self.input.tuq_client and "client" in self.input.tuq_client:
                server = self.tuq_client
        if self.n1ql_port == None or self.n1ql_port == "":
            self.n1ql_port = self.input.param("n1ql_port", 8093)
            if not self.n1ql_port:
                self.log.info(" n1ql_port is not defined, processing will not proceed further")
                raise Exception("n1ql_port is not defined, processing will not proceed further")
        query_params = {}
        cred_params = {"creds": []}
        for bucket in self.buckets:
            if bucket.saslPassword:
                cred_params["creds"].append({"user": "******" % bucket.name, "pass": bucket.saslPassword})
        query_params.update(cred_params)
        if self.use_rest:
            query_params.update({"scan_consistency": self.scan_consistency})
            self.log.info("RUN QUERY %s" % query)

            if self.analytics:
                query = query + ";"
                for bucket in self.buckets:
                    query = query.replace(bucket.name, bucket.name + "_shadow")
                result = RestConnection(server).analytics_tool(query, 8095, query_params=query_params)

            else:
                result = RestConnection(server).query_tool(query, self.n1ql_port, query_params=query_params)

        else:
            if self.version == "git_repo":
                output = self.shell.execute_commands_inside(
                    "$GOPATH/src/github.com/couchbase/query/" + "shell/cbq/cbq ", "", "", "", "", "", ""
                )
            else:
                os = self.shell.extract_remote_info().type.lower()
                # if (query.find("VALUES") > 0):
                if not (self.isprepared):
                    query = query.replace('"', '\\"')
                    query = query.replace("`", "\\`")
                if os == "linux":
                    cmd = "%s/cbq  -engine=http://%s:8091/ -q" % (testconstants.LINUX_COUCHBASE_BIN_PATH, server.ip)
                elif os == "windows":
                    cmd = "%s/cbq  -q" % (testconstants.WIN_COUCHBASE_BIN_PATH)
                output = self.shell.execute_commands_inside(cmd, query, "", "", "", "", "")
                if not (output[0] == "{"):
                    output1 = "{" + str(output)
                else:
                    output1 = output
                result = json.loads(output1)
            # result = self._parse_query_output(output)
        if isinstance(result, str) or "errors" in result:
            raise CBQError(result, server.ip)
        self.log.info("TOTAL ELAPSED TIME: %s" % result["metrics"]["elapsedTime"])
        return result

    def build_url(self, version):
        info = self.shell.extract_remote_info()
        type = info.distribution_type.lower()
        if type in ["ubuntu", "centos", "red hat"]:
            url = "https://s3.amazonaws.com/packages.couchbase.com/releases/couchbase-query/dp1/"
            url += "couchbase-query_%s_%s_linux.tar.gz" % (version, info.architecture_type)
        # TODO for windows
        return url

    def _build_tuq(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != "windows":
                goroot = testconstants.LINUX_GOROOT
                gopath = testconstants.LINUX_GOPATH
            else:
                goroot = testconstants.WINDOWS_GOROOT
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if self.input.tuq_client and "goroot" in self.input.tuq_client:
                goroot = self.input.tuq_client["goroot"]
            cmd = "rm -rf {0}/src/github.com".format(gopath)
            self.shell.execute_command(cmd)
            cmd = (
                "export GOROOT={0} && export GOPATH={1} &&".format(goroot, gopath)
                + " export PATH=$PATH:$GOROOT/bin && "
                + "go get github.com/couchbaselabs/tuqtng;"
                + "cd $GOPATH/src/github.com/couchbaselabs/tuqtng; "
                + "go get -d -v ./...; cd ."
            )
            self.shell.execute_command(cmd)
            cmd = (
                "export GOROOT={0} && export GOPATH={1} &&".format(goroot, gopath)
                + " export PATH=$PATH:$GOROOT/bin && "
                + "cd $GOPATH/src/github.com/couchbaselabs/tuqtng; go build; cd ."
            )
            self.shell.execute_command(cmd)
            cmd = (
                "export GOROOT={0} && export GOPATH={1} &&".format(goroot, gopath)
                + " export PATH=$PATH:$GOROOT/bin && "
                + "cd $GOPATH/src/github.com/couchbaselabs/tuqtng/tuq_client; go build; cd ."
            )
            self.shell.execute_command(cmd)
        else:
            cbq_url = self.build_url(self.version)
            # TODO for windows
            cmd = "cd /tmp; mkdir tuq;cd tuq; wget {0} -O tuq.tar.gz;".format(cbq_url)
            cmd += "tar -xvf tuq.tar.gz;rm -rf tuq.tar.gz"
            self.shell.execute_command(cmd)

    def _start_command_line_query(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != "windows":
                gopath = testconstants.LINUX_GOPATH
            else:
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if os == "windows":
                cmd = "cd %s/src/github.com/couchbase/query/server/main; " % (
                    gopath
                ) + "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" % (server.ip, server.port)
            else:
                cmd = "cd %s/src/github.com/couchbase/query//server/main; " % (
                    gopath
                ) + "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" % (server.ip, server.port)
            self.shell.execute_command(cmd)
        elif self.version == "sherlock":
            if self.services_init.find("n1ql") != -1:
                return
            os = self.shell.extract_remote_info().type.lower()
            if os != "windows":
                couchbase_path = testconstants.LINUX_COUCHBASE_BIN_PATH
            else:
                couchbase_path = testconstants.WIN_COUCHBASE_BIN_PATH
            if self.input.tuq_client and "sherlock_path" in self.input.tuq_client:
                couchbase_path = "%s/bin" % self.input.tuq_client["sherlock_path"]
                print "PATH TO SHERLOCK: %s" % couchbase_path
            if os == "windows":
                cmd = "cd %s; " % (couchbase_path) + "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" % (
                    server.ip,
                    server.port,
                )
            else:
                cmd = "cd %s; " % (couchbase_path) + "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" % (
                    server.ip,
                    server.port,
                )
                n1ql_port = self.input.param("n1ql_port", None)
                if server.ip == "127.0.0.1" and server.n1ql_port:
                    n1ql_port = server.n1ql_port
                if n1ql_port:
                    cmd = "cd %s; " % (
                        couchbase_path
                    ) + './cbq-engine -datastore http://%s:%s/ -http=":%s">n1ql.log 2>&1 &' % (
                        server.ip,
                        server.port,
                        n1ql_port,
                    )
            self.shell.execute_command(cmd)
        else:
            os = self.shell.extract_remote_info().type.lower()
            if os != "windows":
                cmd = "cd /tmp/tuq;./cbq-engine -couchbase http://%s:%s/ >/dev/null 2>&1 &" % (server.ip, server.port)
            else:
                cmd = "cd /cygdrive/c/tuq;./cbq-engine.exe -couchbase http://%s:%s/ >/dev/null 2>&1 &" % (
                    server.ip,
                    server.port,
                )
            self.shell.execute_command(cmd)

    def _parse_query_output(self, output):
        if output.find("cbq>") == 0:
            output = output[output.find("cbq>") + 4 :].strip()
        if output.find("tuq_client>") == 0:
            output = output[output.find("tuq_client>") + 11 :].strip()
        if output.find("cbq>") != -1:
            output = output[: output.find("cbq>")].strip()
        if output.find("tuq_client>") != -1:
            output = output[: output.find("tuq_client>")].strip()
        return json.loads(output)

    def generate_docs(self, num_items, start=0):
        try:
            return getattr(self, "generate_docs_" + self.dataset)(num_items, start)
        except:
            self.fail("There is no dataset %s, please enter a valid one" % self.dataset)

    def generate_docs_default(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee(docs_per_day, start)

    def generate_docs_sabre(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_sabre(docs_per_day, start)

    def generate_docs_employee(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_data(docs_per_day=docs_per_day, start=start)

    def generate_docs_simple(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_simple_data(docs_per_day=docs_per_day, start=start)

    def generate_docs_sales(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_sales_data(docs_per_day=docs_per_day, start=start)

    def generate_docs_bigdata(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_bigdata(end=(1000 * docs_per_day), start=start, value_size=self.value_size)

    def _verify_results(self, actual_result, expected_result, missing_count=1, extra_count=1):
        if len(actual_result) != len(expected_result):
            missing, extra = self.check_missing_and_extra(actual_result, expected_result)
            self.log.error("Missing items: %s.\n Extra items: %s" % (missing[:missing_count], extra[:extra_count]))
            self.fail(
                "Results are incorrect.Actual num %s. Expected num: %s.\n" % (len(actual_result), len(expected_result))
            )
        if self.max_verify is not None:
            actual_result = actual_result[: self.max_verify]
            expected_result = expected_result[: self.max_verify]

        msg = (
            "Results are incorrect.\n Actual first and last 100:  %s.\n ... \n %s"
            + "Expected first and last 100: %s.\n  ... \n %s"
        )
        self.assertTrue(
            actual_result == expected_result,
            msg % (actual_result[:100], actual_result[-100:], expected_result[:100], expected_result[-100:]),
        )

    def check_missing_and_extra(self, actual, expected):
        missing = []
        extra = []
        for item in actual:
            if not (item in expected):
                extra.append(item)
        for item in expected:
            if not (item in actual):
                missing.append(item)
        return missing, extra

    def sort_nested_list(self, result):
        actual_result = []
        for item in result:
            curr_item = {}
            for key, value in item.iteritems():
                if isinstance(value, list) or isinstance(value, set):
                    curr_item[key] = sorted(value)
                else:
                    curr_item[key] = value
            actual_result.append(curr_item)
        return actual_result

    def configure_gomaxprocs(self):
        max_proc = self.input.param("gomaxprocs", None)
        cmd = "export GOMAXPROCS=%s" % max_proc
        for server in self.servers:
            shell_connection = RemoteMachineShellConnection(self.master)
            shell_connection.execute_command(cmd)

    def create_primary_index_for_3_0_and_greater(self):
        self.log.info("CREATE PRIMARY INDEX using %s" % self.primary_indx_type)
        rest = RestConnection(self.master)
        versions = rest.get_nodes_versions()
        if versions[0].startswith("4") or versions[0].startswith("3"):
            for bucket in self.buckets:
                if self.primary_indx_drop:
                    self.log.info("Dropping primary index for %s using %s ..." % (bucket.name, self.primary_indx_type))
                    self.query = "DROP PRIMARY INDEX ON %s USING %s" % (bucket.name, self.primary_indx_type)
                    # self.run_cbq_query()
                    self.sleep(3, "Sleep for some time after index drop")
                self.query = 'select * from system:indexes where name="#primary" and keyspace_id = "%s"' % bucket.name
                res = self.run_cbq_query()
                self.sleep(10)
                print res
                if self.monitoring:
                    self.query = "delete from system:completed_requests"
                    self.run_cbq_query()
                if not self.skip_primary_index:
                    if res["metrics"]["resultCount"] == 0:
                        self.query = "CREATE PRIMARY INDEX ON %s USING %s" % (bucket.name, self.primary_indx_type)
                        self.log.info("Creating primary index for %s ..." % bucket.name)
                        # if self.gsi_type:
                        #     self.query += " WITH {'index_type': 'memdb'}"
                        try:
                            self.run_cbq_query()
                            self.primary_index_created = True
                            if self.primary_indx_type.lower() == "gsi":
                                self._wait_for_index_online(bucket, "#primary")
                        except Exception, ex:
                            self.log.info(str(ex))
                if self.monitoring:
                    self.query = "select * from system:active_requests"
                    result = self.run_cbq_query()
                    print result
                    self.assertTrue(result["metrics"]["resultCount"] == 1)
                    self.query = "select * from system:completed_requests"
                    time.sleep(20)
                    result = self.run_cbq_query()
                    print result
    def test_url(self):
        '''
        Description: This test will ensure that the commandline cbq command can and will connect to the valid URLs
        and will through an error if the URL is invalid.

        Steps:
        1. Create list of URLs that should work and a list of URLs the should not work
        2. Send cbq command to remote shell on each server. The command will use a URL with the -e parameter

        Author: Korrigan Clark
        Date Modified: 26/07/2017
        '''
        ###
        prefixes = ['http://', 'https://', 'couchbase://', 'couchbases://']
        ips = ['localhost', '127.0.0.1'] + [str(server.ip) for server in self.servers]
        ports = [':8091', ':8093', ':18091', ':18093']

        pass_urls = []

        # creates url, port tuples that should be valid.
        # port will be used to verify it connected to the proper endpoint
        for prefix in prefixes:
            for ip in ips:
                pass_urls.append((ip, '8091'))
                if prefix == 'couchbase://':
                    pass_urls.append((prefix+ip, '8091'))
                if prefix == 'couchbases://':
                    pass_urls.append((prefix+ip, '18091'))
                for port in ports:
                    if prefix == 'http://' and port in ['8091', '8093']:
                        pass_urls.append((prefix+ip+port, port))
                    if prefix == 'https://' and port in ['18091', '18093']:
                        pass_urls.append((prefix+ip+port, port))

        fail_urls = []

        # creates urls that should not work, either wrong prefix/prot combo or invalid url
        for prefix in prefixes:
            for ip in ips:
                for port in ports:
                    if prefix == 'http://' and port in ['18091', '18093']:
                        fail_urls.append(prefix+ip+port)
                        fail_urls.append(prefix+ip+port+'!')
                        fail_urls.append(prefix+ip+'!'+port)
                    if prefix == 'https://' and port in ['8091', '8093']:
                        fail_urls.append(prefix+ip+port)
                        fail_urls.append(prefix+ip+port+'!')
                        fail_urls.append(prefix+ip+'!'+port)
                    if prefix == 'couchbase://':
                        fail_urls.append(prefix+ip+port)
                    if prefix == 'couchbases://':
                        fail_urls.append(prefix+ip+port)

        # run through all servers and try to connect cbq to the given url
        for server in self.servers:
            for bucket in self.buckets:
                shell = RemoteMachineShellConnection(server)
                try:
                    for url in pass_urls:
                        cmd = self.path+'cbq  -u=Administrator -p=password -e='+url[0]+' -no-ssl-verify=true'
                        o = shell.execute_commands_inside(cmd, '', ['select * from system:nodes;', '\quit;'], '', '', '', '')
                        self.assertTrue(url[1] in o)

                    for url in fail_urls:
                        cmd = self.path+'cbq  -u=Administrator -p=password -e='+url+' -no-ssl-verify=true'
                        o = shell.execute_commands_inside(cmd, '', ['select * from system:nodes;', '\quit;'], '', '', '', '')
                        self.assertTrue('status:FAIL' in o)
                finally:
                    shell.disconnect()
Exemple #49
0
    def test_url(self):
        '''
        Description: This test will ensure that the commandline cbq command can and will connect to the valid URLs
        and will through an error if the URL is invalid.

        Steps:
        1. Create list of URLs that should work and a list of URLs the should not work
        2. Send cbq command to remote shell on each server. The command will use a URL with the -e parameter

        Author: Korrigan Clark
        Date Modified: 26/07/2017
        '''
        ###
        prefixes = ['http://', 'https://', 'couchbase://', 'couchbases://']
        ips = ['localhost', '127.0.0.1'
               ] + [str(server.ip) for server in self.servers]
        ports = [':8091', ':8093', ':18091', ':18093']

        pass_urls = []

        # creates url, port tuples that should be valid.
        # port will be used to verify it connected to the proper endpoint
        for prefix in prefixes:
            for ip in ips:
                pass_urls.append((ip, '8091'))
                if prefix == 'couchbase://':
                    pass_urls.append((prefix + ip, '8091'))
                if prefix == 'couchbases://':
                    pass_urls.append((prefix + ip, '18091'))
                for port in ports:
                    if prefix == 'http://' and port in ['8091', '8093']:
                        pass_urls.append((prefix + ip + port, port))
                    if prefix == 'https://' and port in ['18091', '18093']:
                        pass_urls.append((prefix + ip + port, port))

        fail_urls = []

        # creates urls that should not work, either wrong prefix/prot combo or invalid url
        for prefix in prefixes:
            for ip in ips:
                for port in ports:
                    if prefix == 'http://' and port in ['18091', '18093']:
                        fail_urls.append(prefix + ip + port)
                        fail_urls.append(prefix + ip + port + '!')
                        fail_urls.append(prefix + ip + '!' + port)
                    if prefix == 'https://' and port in ['8091', '8093']:
                        fail_urls.append(prefix + ip + port)
                        fail_urls.append(prefix + ip + port + '!')
                        fail_urls.append(prefix + ip + '!' + port)
                    if prefix == 'couchbase://':
                        fail_urls.append(prefix + ip + port)
                    if prefix == 'couchbases://':
                        fail_urls.append(prefix + ip + port)

        # run through all servers and try to connect cbq to the given url
        for server in self.servers:
            for bucket in self.buckets:
                shell = RemoteMachineShellConnection(server)
                try:
                    for url in pass_urls:
                        cmd = self.path + 'cbq  -u=Administrator -p=password -e=' + url[
                            0] + ' -no-ssl-verify=true'
                        o = shell.execute_commands_inside(
                            cmd, '', ['select * from system:nodes;', '\quit;'],
                            '', '', '', '')
                        self.assertTrue(url[1] in o)

                    for url in fail_urls:
                        cmd = self.path + 'cbq  -u=Administrator -p=password -e=' + url + ' -no-ssl-verify=true'
                        o = shell.execute_commands_inside(
                            cmd, '', ['select * from system:nodes;', '\quit;'],
                            '', '', '', '')
                        self.assertTrue('status:FAIL' in o)
                finally:
                    shell.disconnect()
Exemple #50
0
class QueryTests(BaseTestCase):
    def setUp(self):
        if not self._testMethodName == 'suite_setUp':
            self.skip_buckets_handle = True
        super(QueryTests, self).setUp()
        self.version = self.input.param("cbq_version", "sherlock")
        if self.input.tuq_client and "client" in self.input.tuq_client:
            self.shell = RemoteMachineShellConnection(self.input.tuq_client["client"])
        else:
            self.shell = RemoteMachineShellConnection(self.master)
        if not self._testMethodName == 'suite_setUp' and self.input.param("cbq_version", "sherlock") != 'sherlock':
            self._start_command_line_query(self.master)
        self.use_rest = self.input.param("use_rest", True)
        self.max_verify = self.input.param("max_verify", None)
        self.buckets = RestConnection(self.master).get_buckets()
        self.docs_per_day = self.input.param("doc-per-day", 49)
        self.item_flag = self.input.param("item_flag", 4042322160)
        self.n1ql_port = self.input.param("n1ql_port", 8093)
        self.dataset = self.input.param("dataset", "default")
        self.primary_indx_type = self.input.param("primary_indx_type", 'VIEW')
        self.primary_indx_drop = self.input.param("primary_indx_drop", False)
        self.scan_consistency = self.input.param("scan_consistency", 'REQUEST_PLUS')
        if self.input.param("reload_data", False):
            for bucket in self.buckets:
                self.cluster.bucket_flush(self.master, bucket=bucket,
                                          timeout=self.wait_timeout * 5)
            self.gens_load = self.generate_docs(self.docs_per_day)
            self.load(self.gens_load, flag=self.item_flag)
        self.gens_load = self.generate_docs(self.docs_per_day)
        if self.input.param("gomaxprocs", None):
            self.configure_gomaxprocs()
        self.gen_results = TuqGenerators(self.log, self.generate_full_docs_list(self.gens_load))
        # temporary for MB-12848
        self.create_primary_index_for_3_0_and_greater()

    def suite_setUp(self):
        try:
            self.load(self.gens_load, flag=self.item_flag)
#            self.create_primary_index_for_3_0_and_greater()
            if not self.input.param("skip_build_tuq", True):
                self._build_tuq(self.master)
            self.skip_buckets_handle = True
        except:
            self.log.error('SUITE SETUP FAILED')
            self.tearDown()

    def tearDown(self):
        if self._testMethodName == 'suite_tearDown':
            self.skip_buckets_handle = False
        super(QueryTests, self).tearDown()

    def suite_tearDown(self):
        if not self.input.param("skip_build_tuq", False):
            if hasattr(self, 'shell'):
                self.shell.execute_command("killall /tmp/tuq/cbq-engine")
                self.shell.execute_command("killall tuqtng")
                self.shell.disconnect()


##############################################################################################
#
#   SIMPLE CHECKS
##############################################################################################
    def test_simple_check(self):
        for bucket in self.buckets:
            query_template = 'FROM %s select $str0, $str1 ORDER BY $str0,$str1 ASC' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_simple_negative_check(self):
        queries_errors = {'SELECT $str0 FROM {0} WHERE COUNT({0}.$str0)>3' :
                          'Aggregates not allowed in WHERE',
                          'SELECT *.$str0 FROM {0}' : 'syntax error',
                          'SELECT *.* FROM {0} ... ERROR' : 'syntax error',
                          'FROM %s SELECT $str0 WHERE id=null' : 'syntax error',}
        self.negative_common_body(queries_errors)

    def test_unnest(self):
        for bucket in self.buckets:
            query_template = 'SELECT emp.$int0, task FROM %s emp UNNEST emp.$nested_list_3l0 task' % bucket.name
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(sorted(actual_result['results']), sorted(expected_result))

    def test_subquery_select(self):
        for bucket in self.buckets:
            self.query = 'SELECT $str0, $subquery(SELECT COUNT($str0) cn FROM %s d USE KEYS $5) as names FROM %s' % (bucket.name,
                                                                                                                     bucket.name)
            actual_result, expected_result = self.run_query_with_subquery_select_from_template(self.query)
            self._verify_results(actual_result['results'], expected_result)

    def test_subquery_from(self):
        for bucket in self.buckets:
            self.query = 'SELECT tasks.$str0 FROM $subquery(SELECT $str0, $int0 FROM %s) as tasks' % (bucket.name)
            actual_result, expected_result = self.run_query_with_subquery_from_template(self.query)
            self._verify_results(actual_result['results'], expected_result)

    def test_consistent_simple_check(self):
        queries = [self.gen_results.generate_query('SELECT $str0, $int0, $int1 FROM %s ' +\
                    'WHERE $str0 IS NOT NULL AND $int0<10 ' +\
                    'OR $int1 = 6 ORDER BY $int0, $int1'),
                   self.gen_results.generate_query('SELECT $str0, $int0, $int1 FROM %s ' +\
                    'WHERE $int1 = 6 OR $str0 IS NOT NULL AND ' +\
                    '$int0<10 ORDER BY $int0, $int1')]
        for bucket in self.buckets:
            actual_result1 = self.run_cbq_query(queries[0] % bucket.name)
            actual_result2 = self.run_cbq_query(queries[1] % bucket.name)
            self.assertTrue(actual_result1['results'] == actual_result2['results'],
                              "Results are inconsistent.Difference: %s %s %s %s" %(
                                    len(actual_result1['results']), len(actual_result2['results']),
                                    actual_result1['results'][:100], actual_result2['results'][:100]))

    def test_simple_nulls(self):
        queries = ['SELECT id FROM %s WHERE id=NULL or id="null"']
        for bucket in self.buckets:
            for query in queries:
                actual_result = self.run_cbq_query(query % (bucket.name))
                self._verify_results(actual_result['results'], [])

##############################################################################################
#
#   LIMIT OFFSET CHECKS
##############################################################################################

    def test_limit_negative(self):
        #queries_errors = {'SELECT * FROM default LIMIT {0}' : ('Invalid LIMIT value 2.5', 5030)}
        queries_errors = {'SELECT ALL * FROM %s' : ('syntax error', 3000)}
        self.negative_common_body(queries_errors)

    def test_limit_offset(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10 OFFSET 10' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)

    def test_limit_offset_zero(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 0' % (bucket.name)
            self.query = self.gen_results.generate_query(query_template)
            actual_result = self.run_cbq_query()
            self.assertEquals(actual_result['results'], [],
                              "Results are incorrect.Actual %s.\n Expected: %s.\n" % (
                                        actual_result['results'], []))
            query_template = 'SELECT DISTINCT $str0 FROM %s ORDER BY $str0 LIMIT 10 OFFSET 0' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self.assertEquals(actual_result['results'], expected_result,
                              "Results are incorrect.Actual %s.\n Expected: %s.\n" % (
                                        actual_result['results'], expected_result))

    def test_limit_offset_negative_check(self):
        queries_errors = {'SELECT DISTINCT $str0 FROM {0} LIMIT 1.1' :
                          'Invalid LIMIT value 1.1',
                          'SELECT DISTINCT $str0 FROM {0} OFFSET 1.1' :
                          'Invalid OFFSET value 1.1'}
        self.negative_common_body(queries_errors)

    def test_limit_offset_sp_char_check(self):
        queries_errors = {'SELECT DISTINCT $str0 FROM {0} LIMIT ~' :
                          'syntax erro',
                          'SELECT DISTINCT $str0 FROM {0} OFFSET ~' :
                          'syntax erro'}
        self.negative_common_body(queries_errors)
##############################################################################################
#
#   ALIAS CHECKS
##############################################################################################

    def test_simple_alias(self):
        for bucket in self.buckets:
            query_template = 'SELECT COUNT($str0) AS COUNT_EMPLOYEE FROM %s' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self.assertEquals(actual_result['results'], expected_result,
                              "Results are incorrect.Actual %s.\n Expected: %s.\n" % (
                                        actual_result['results'], expected_result))

            query_template = 'SELECT COUNT(*) + 1 AS COUNT_EMPLOYEE FROM %s' % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            expected_result = [ { "COUNT_EMPLOYEE": expected_result[0]['COUNT_EMPLOYEE'] + 1 } ]
            self.assertEquals(actual_result['results'], expected_result,
                              "Results are incorrect.Actual %s.\n Expected: %s.\n" % (
                                        actual_result['results'], expected_result))

    def test_simple_negative_alias(self):
        queries_errors = {'SELECT $str0._last_name as *' : 'syntax error',
                          'SELECT $str0._last_name as DATABASE ?' : 'syntax error',
                          'SELECT $str0 AS NULL FROM {0}' : 'syntax error',
                          'SELECT $str1 as $str0, $str0 FROM {0}' :
                                'Duplicate result alias name',
                          'SELECT test.$obj0 as points FROM {0} AS TEST ' +
                           'GROUP BY $obj0 AS GROUP_POINT' :
                                'syntax error'}
        self.negative_common_body(queries_errors)

    def test_alias_from_clause(self):
        queries_templates = ['SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ORDER BY points',
                   'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test WHERE test.$int0 >0'  +\
                   ' ORDER BY points',
                   'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ' +\
                   'GROUP BY test.$obj0.$_obj0_int0 ORDER BY points']
        for bucket in self.buckets:
            for query_template in queries_templates:
                actual_result, expected_result = self.run_query_from_template(query_template  % (bucket.name))
                self._verify_results(actual_result['results'], expected_result)

    def test_alias_from_clause_group(self):
        for bucket in self.buckets:
            query_template = 'SELECT $obj0.$_obj0_int0 AS points FROM %s AS test ' %(bucket.name) +\
                         'GROUP BY $obj0.$_obj0_int0 ORDER BY points'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_order_desc(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0 AS name_new FROM %s AS test ORDER BY name_new DESC' %(
                                                                                bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_order_asc(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0 AS name_new FROM %s AS test ORDER BY name_new ASC' %(
                                                                                bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT COUNT(TEST.$str0) from %s AS TEST' %(bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_unnest(self):
        for bucket in self.buckets:
            query_template = 'SELECT count(skill) FROM %s AS emp UNNEST emp.$list_str0 AS skill' %(
                                                                            bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

            query_template = 'SELECT count(skill) FROM %s AS emp UNNEST emp.$list_str0 skill' %(
                                                                            bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

##############################################################################################
#
#   ORDER BY CHECKS
##############################################################################################

    def test_order_by_check(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $str1, $obj0.$_obj0_int0 points FROM %s'  % (bucket.name) +\
            ' ORDER BY $str1, $str0, $obj0.$_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)
            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $obj0.$_obj0_int0, $str0, $str1'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_alias(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1, $obj0 AS points FROM %s'  % (bucket.name) +\
            ' AS test ORDER BY $str1 DESC, points DESC'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_alias_arrays(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1, $obj0, $list_str0[0] AS SKILL FROM %s'  % (
                                                                            bucket.name) +\
            ' AS TEST ORDER BY SKILL, $str1, TEST.$obj0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_alias_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT $int0, $int1, count(*) AS emp_per_month from %s'% (
                                                                            bucket.name) +\
            ' WHERE $int1 >7 GROUP BY $int0, $int1 ORDER BY emp_per_month, $int1, $int0'  
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_aggr_fn(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str1 AS TITLE, min($int1) day FROM %s GROUP'  % (bucket.name) +\
            ' BY $str1 ORDER BY MIN($int1), $str1'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_precedence(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $str0, $str1'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

            query_template = 'SELECT $str0, $str1 FROM %s'  % (bucket.name) +\
            ' ORDER BY $str1, $str0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_order_by_satisfy(self):
        for bucket in self.buckets:
            query_template = 'SELECT $str0, $list_obj0 FROM %s AS employee ' % (bucket.name) +\
                        'WHERE ANY vm IN employee.$list_obj0 SATISFIES vm.$_list_obj0_int0 > 5 AND' +\
                        ' vm.$_list_obj0_str0 = "ubuntu" END ORDER BY $str0, $list_obj0[0].$_list_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

##############################################################################################
#
#   DISTINCT
##############################################################################################

    def test_distinct(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $str1 FROM %s ORDER BY $str1'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_distinct_nested(self):
        for bucket in self.buckets:
            query_template = 'SELECT DISTINCT $obj0.$_obj0_int0 as VAR FROM %s '  % (bucket.name) +\
                         'ORDER BY $obj0.$_obj0_int0'
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

            query_template = 'SELECT DISTINCT $list_str0[0] as skill' +\
                         ' FROM %s ORDER BY $list_str0[0]'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

            self.query = 'SELECT DISTINCT $obj0.* FROM %s'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

##############################################################################################
#
#   COMPLEX PATHS
##############################################################################################

    def test_simple_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 FROM %s.$obj0'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_alias_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 as new_attribute FROM %s.$obj0'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

    def test_where_complex_paths(self):
        for bucket in self.buckets:
            query_template = 'SELECT $_obj0_int0 FROM %s.$obj0 WHERE $_obj0_int0 = 1'  % (bucket.name)
            actual_result, expected_result = self.run_query_from_template(query_template)
            self._verify_results(actual_result['results'], expected_result)

##############################################################################################
#
#   COMMON FUNCTIONS
##############################################################################################

    def run_query_from_template(self, query_template):
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def run_query_with_subquery_select_from_template(self, query_template):
        subquery_template = re.sub(r'.*\$subquery\(', '', query_template)
        subquery_template = subquery_template[:subquery_template.rfind(')')]
        keys_num = int(re.sub(r'.*KEYS \$', '', subquery_template).replace('KEYS $', ''))
        subquery_full_list = self.generate_full_docs_list(gens_load=self.gens_load,keys=self._get_keys(keys_num))
        subquery_template = re.sub(r'USE KEYS.*', '', subquery_template)
        sub_results = TuqGenerators(self.log, subquery_full_list)
        self.query = sub_results.generate_query(subquery_template)
        expected_sub = sub_results.generate_expected_result()
        alias = re.sub(r',.*', '', re.sub(r'.*\$subquery\(.*\)', '', query_template))
        alias = re.sub(r'.*as','', re.sub(r'FROM.*', '', alias)).strip()
        if not alias:
            alias = '$1'
        for item in self.gen_results.full_set:
            item[alias] = expected_sub[0]
        query_template = re.sub(r',.*\$subquery\(.*\).*%s' % alias, ',%s' % alias, query_template)
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def run_query_with_subquery_from_template(self, query_template):
        subquery_template = re.sub(r'.*\$subquery\(', '', query_template)
        subquery_template = subquery_template[:subquery_template.rfind(')')]
        subquery_full_list = self.generate_full_docs_list(gens_load=self.gens_load)
        sub_results = TuqGenerators(self.log, subquery_full_list)
        self.query = sub_results.generate_query(subquery_template)
        expected_sub = sub_results.generate_expected_result()
        alias = re.sub(r',.*', '', re.sub(r'.*\$subquery\(.*\)', '', query_template))
        alias = re.sub(r'.*as ', '', alias).strip()
        self.gen_results = TuqGenerators(self.log, expected_sub)
        query_template = re.sub(r'\$subquery\(.*\).*%s' % alias, ' %s' % alias, query_template)
        self.query = self.gen_results.generate_query(query_template)
        expected_result = self.gen_results.generate_expected_result()
        actual_result = self.run_cbq_query()
        return actual_result, expected_result

    def negative_common_body(self, queries_errors={}):
        if not queries_errors:
            self.fail("No queries to run!")
        for bucket in self.buckets:
            for query_template, error in queries_errors.iteritems():
                try:
                    query = self.gen_results.generate_query(query_template)
                    actual_result = self.run_cbq_query(query.format(bucket.name))
                except CBQError as ex:
                    self.log.error(ex)
                    self.assertTrue(str(ex).find(error) != -1,
                                    "Error is incorrect.Actual %s.\n Expected: %s.\n" %(
                                                                str(ex).split(':')[-1], error))
                else:
                    self.fail("There were no errors. Error expected: %s" % error)

    def run_cbq_query(self, query=None, min_output_size=10, server=None):
        if query is None:
            query = self.query
        if server is None:
           server = self.master
           if server.ip == "127.0.0.1":
            self.n1ql_port = server.n1ql_port
        else:
            if server.ip == "127.0.0.1":
                self.n1ql_port = server.n1ql_port
            if self.input.tuq_client and "client" in self.input.tuq_client:
                server = self.tuq_client
        if self.n1ql_port == None or self.n1ql_port == '':
            self.n1ql_port = self.input.param("n1ql_port", 8093)
            if not self.n1ql_port:
                self.log.info(" n1ql_port is not defined, processing will not proceed further")
                raise Exception("n1ql_port is not defined, processing will not proceed further")
        query_params = {}
        cred_params = {'creds': []}
        for bucket in self.buckets:
            if bucket.saslPassword:
                cred_params['creds'].append({'user': '******' % bucket.name, 'pass': bucket.saslPassword})
        query_params.update(cred_params)
        if self.use_rest:
            query_params.update({'scan_consistency': self.scan_consistency})
            self.log.info('RUN QUERY %s' % query)
            result = RestConnection(server).query_tool(query, self.n1ql_port, query_params=query_params)
        else:
            if self.version == "git_repo":
                output = self.shell.execute_commands_inside("$GOPATH/src/github.com/couchbaselabs/tuqtng/" +\
                                                            "tuq_client/tuq_client " +\
                                                            "-engine=http://%s:8093/" % server.ip,
                                                       subcommands=[query,],
                                                       min_output_size=20,
                                                       end_msg='tuq_client>')
            else:
                output = self.shell.execute_commands_inside("/tmp/tuq/cbq -engine=http://%s:8093/" % server.ip,
                                                           subcommands=[query,],
                                                           min_output_size=20,
                                                           end_msg='cbq>')
            result = self._parse_query_output(output)
        if isinstance(result, str) or 'errors' in result:
            raise CBQError(result, server.ip)
        self.log.info("TOTAL ELAPSED TIME: %s" % result["metrics"]["elapsedTime"])
        return result

    def build_url(self, version):
        info = self.shell.extract_remote_info()
        type = info.distribution_type.lower()
        if type in ["ubuntu", "centos", "red hat"]:
            url = "https://s3.amazonaws.com/packages.couchbase.com/releases/couchbase-query/dp1/"
            url += "couchbase-query_%s_%s_linux.tar.gz" %(
                                version, info.architecture_type)
        #TODO for windows
        return url

    def _build_tuq(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                goroot = testconstants.LINUX_GOROOT
                gopath = testconstants.LINUX_GOPATH
            else:
                goroot = testconstants.WINDOWS_GOROOT
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if self.input.tuq_client and "goroot" in self.input.tuq_client:
                goroot = self.input.tuq_client["goroot"]
            cmd = "rm -rf {0}/src/github.com".format(gopath)
            self.shell.execute_command(cmd)
            cmd= 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'go get github.com/couchbaselabs/tuqtng;' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; ' +\
                'go get -d -v ./...; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng; go build; cd .'
            self.shell.execute_command(cmd)
            cmd = 'export GOROOT={0} && export GOPATH={1} &&'.format(goroot, gopath) +\
                ' export PATH=$PATH:$GOROOT/bin && ' +\
                'cd $GOPATH/src/github.com/couchbaselabs/tuqtng/tuq_client; go build; cd .'
            self.shell.execute_command(cmd)
        else:
            cbq_url = self.build_url(self.version)
            #TODO for windows
            cmd = "cd /tmp; mkdir tuq;cd tuq; wget {0} -O tuq.tar.gz;".format(cbq_url)
            cmd += "tar -xvf tuq.tar.gz;rm -rf tuq.tar.gz"
            self.shell.execute_command(cmd)

    def _start_command_line_query(self, server):
        if self.version == "git_repo":
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                gopath = testconstants.LINUX_GOPATH
            else:
                gopath = testconstants.WINDOWS_GOPATH
            if self.input.tuq_client and "gopath" in self.input.tuq_client:
                gopath = self.input.tuq_client["gopath"]
            if os == 'windows':
                cmd = "cd %s/src/github.com/couchbase/query/server/main; " % (gopath) +\
                "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s/src/github.com/couchbase/query//server/main; " % (gopath) +\
                "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" %(
                                                                server.ip, server.port)
            self.shell.execute_command(cmd)
        elif self.version == "sherlock":
            if self.services_init.find('n1ql') != -1:
                return
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                couchbase_path = testconstants.LINUX_COUCHBASE_BIN_PATH
            else:
                couchbase_path = testconstants.WIN_COUCHBASE_BIN_PATH
            if self.input.tuq_client and "sherlock_path" in self.input.tuq_client:
                couchbase_path = "%s/bin" % self.input.tuq_client["sherlock_path"]
                print "PATH TO SHERLOCK: %s" % couchbase_path
            if os == 'windows':
                cmd = "cd %s; " % (couchbase_path) +\
                "./cbq-engine.exe -datastore http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd %s; " % (couchbase_path) +\
                "./cbq-engine -datastore http://%s:%s/ >n1ql.log 2>&1 &" %(
                                                                server.ip, server.port)
                n1ql_port = self.input.param("n1ql_port", None)
                if server.ip == "127.0.0.1" and server.n1ql_port:
                    n1ql_port = server.n1ql_port
                if n1ql_port:
                    cmd = "cd %s; " % (couchbase_path) +\
                './cbq-engine -datastore http://%s:%s/ -http=":%s">n1ql.log 2>&1 &' %(
                                                                server.ip, server.port, n1ql_port)
            self.shell.execute_command(cmd)
        else:
            os = self.shell.extract_remote_info().type.lower()
            if os != 'windows':
                cmd = "cd /tmp/tuq;./cbq-engine -couchbase http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            else:
                cmd = "cd /cygdrive/c/tuq;./cbq-engine.exe -couchbase http://%s:%s/ >/dev/null 2>&1 &" %(
                                                                server.ip, server.port)
            self.shell.execute_command(cmd)

    def _parse_query_output(self, output):
        if output.find("cbq>") == 0:
            output = output[output.find("cbq>") + 4:].strip()
        if output.find("tuq_client>") == 0:
            output = output[output.find("tuq_client>") + 11:].strip()
        if output.find("cbq>") != -1:
            output = output[:output.find("cbq>")].strip()
        if output.find("tuq_client>") != -1:
            output = output[:output.find("tuq_client>")].strip()
        return json.loads(output)

    def generate_docs(self, num_items, start=0):
        try:
            return getattr(self, 'generate_docs_' + self.dataset)(num_items, start)
        except:
            self.fail("There is no dataset %s, please enter a valid one" % self.dataset)

    def generate_docs_default(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee(docs_per_day, start)

    def generate_docs_sabre(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_sabre(docs_per_day, start)

    def generate_docs_employee(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_data(docs_per_day = docs_per_day, start = start)

    def generate_docs_simple(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_simple_data(docs_per_day = docs_per_day, start = start)

    def generate_docs_sales(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_employee_sales_data(docs_per_day = docs_per_day, start = start)

    def generate_docs_bigdata(self, docs_per_day, start=0):
        json_generator = JsonGenerator()
        return json_generator.generate_docs_bigdata(end=(1000*docs_per_day), start=start, value_size=self.value_size)


    def _verify_results(self, actual_result, expected_result, missing_count = 1, extra_count = 1):
        if len(actual_result) != len(expected_result):
            missing, extra = self.check_missing_and_extra(actual_result, expected_result)
            self.log.error("Missing items: %s.\n Extra items: %s" % (missing[:missing_count], extra[:extra_count]))
            self.fail("Results are incorrect.Actual num %s. Expected num: %s.\n" % (
                                            len(actual_result), len(expected_result)))
        if self.max_verify is not None:
            actual_result = actual_result[:self.max_verify]
            expected_result = expected_result[:self.max_verify]

        msg = "Results are incorrect.\n Actual first and last 100:  %s.\n ... \n %s" +\
        "Expected first and last 100: %s.\n  ... \n %s"
        self.assertTrue(actual_result == expected_result,
                          msg % (actual_result[:100],actual_result[-100:],
                                 expected_result[:100],expected_result[-100:]))

    def check_missing_and_extra(self, actual, expected):
        missing = []
        extra = []
        for item in actual:
            if not (item in expected):
                 extra.append(item)
        for item in expected:
            if not (item in actual):
                missing.append(item)
        return missing, extra

    def sort_nested_list(self, result):
        actual_result = []
        for item in result:
            curr_item = {}
            for key, value in item.iteritems():
                if isinstance(value, list) or isinstance(value, set):
                    curr_item[key] = sorted(value)
                else:
                    curr_item[key] = value
            actual_result.append(curr_item)
        return actual_result

    def configure_gomaxprocs(self):
        max_proc = self.input.param("gomaxprocs", None)
        cmd = "export GOMAXPROCS=%s" % max_proc
        for server in self.servers:
            shell_connection = RemoteMachineShellConnection(self.master)
            shell_connection.execute_command(cmd)

    def create_primary_index_for_3_0_and_greater(self):
        self.log.info("CREATE PRIMARY INDEX")
        rest = RestConnection(self.master)
        versions = rest.get_nodes_versions()
        if versions[0].startswith("4") or versions[0].startswith("3"):
            for bucket in self.buckets:
                if self.primary_indx_drop:
                    self.log.info("Dropping primary index for %s ..." % bucket.name)
                    self.query = "DROP PRIMARY INDEX ON %s" % (bucket.name)
                    self.sleep(3, 'Sleep for some time after index drop')
                self.log.info("Creating primary index for %s ..." % bucket.name)
                self.query = "CREATE PRIMARY INDEX ON %s USING %s" % (bucket.name, self.primary_indx_type)
                try:
                    self.run_cbq_query()
                    if self.primary_indx_type.lower() == 'gsi':
                        self._wait_for_index_online(bucket, '#primary')
                except Exception, ex:
                    self.log.info(str(ex))