Exemplo n.º 1
0
 def test_import_function(self):
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size)
     # read the exported function
     script_dir = os.path.dirname(__file__)
     abs_file_path = os.path.join(
         script_dir,
         EXPORTED_FUNCTION.N1QL_INSERT_ON_UPDATE_WITH_CRON_TIMER)
     fh = open(abs_file_path, "r")
     body = json.loads(fh.read())
     # import the previously exported function
     # we don't have specific API for import, we reuse the API's
     self.function_name = "test_import_function"
     log.info("Saving the function for UI")
     self.rest.save_function(
         "test_import_function",
         body)  # we have hardcoded function name as it's imported
     log.info("Deploy the function")
     self.rest.deploy_function(
         "test_import_function",
         body)  # we have hardcoded function name as it's imported
     self.wait_for_bootstrap_to_complete(
         "test_import_function"
     )  # we have hardcoded function name as it's imported
     # Wait for eventing to catch up with all the create mutations and verify results
     self.verify_eventing_results("test_import_function",
                                  self.docs_per_day * 2016)
     self.undeploy_delete_all_functions()
Exemplo n.º 2
0
 def test_eventing_debugger(self):
     count = 0
     match = False
     body = self.create_save_function_body(self.function_name, HANDLER_CODE.BUCKET_OPS_ON_UPDATE)
     self.deploy_function(body)
     # Start eventing debugger
     out1 = self.rest.start_eventing_debugger(self.function_name)
     log.info(" Started eventing debugger : {0}".format(out1))
     # do some mutations
     self.load(self.gens_load, buckets=self.src_bucket, flag=self.item_flag, verify_data=False,
               batch_size=self.batch_size)
     # get debugger url
     pattern = re.compile(r'chrome-devtools://devtools/bundled/inspector.html(.*)')
     while count < 10:
         out2 = self.rest.get_eventing_debugger_url(self.function_name)
         matched = re.match(pattern, out2)
         if matched:
             log.info("Got debugger url : {0}{1}".format(matched.group(0), matched.group(1)))
             match = True
             break
         count += 1
         self.sleep(30)
     if not match:
         self.fail("Debugger url was not generated even after waiting for 300 secs...    ")
     # stop debugger
     self.rest.stop_eventing_debugger(self.function_name)
     # undeploy and delete the function
     self.undeploy_and_delete_function(body)
Exemplo n.º 3
0
 def delete_temp_handler_code(self, path=HANDLER_CODE.N1QL_TEMP_PATH):
     log.info("deleting all the handler codes")
     script_dir = os.path.dirname(__file__)
     dirPath = os.path.join(script_dir, path)
     fileList = os.listdir(dirPath)
     for fileName in fileList:
         os.remove(dirPath + "/" + fileName)
Exemplo n.º 4
0
 def setUp(self):
     super(EventingN1QL, self).setUp()
     if self.create_functions_buckets:
         self.bucket_size = 100
         log.info(self.bucket_size)
         bucket_params = self._create_bucket_params(
             server=self.server,
             size=self.bucket_size,
             replicas=self.num_replicas)
         self.cluster.create_standard_bucket(name=self.src_bucket_name,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.src_bucket = RestConnection(self.master).get_buckets()
         self.cluster.create_standard_bucket(name=self.dst_bucket_name,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(name=self.metadata_bucket_name,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.buckets = RestConnection(self.master).get_buckets()
     self.gens_load = self.generate_docs(self.docs_per_day)
     self.expiry = 3
     self.n1ql_node = self.get_nodes_from_services_map(service_type="n1ql")
     self.n1ql_helper = N1QLHelper(shell=self.shell,
                                   max_verify=self.max_verify,
                                   buckets=self.buckets,
                                   item_flag=self.item_flag,
                                   n1ql_port=self.n1ql_port,
                                   full_docs_list=self.full_docs_list,
                                   log=self.log,
                                   input=self.input,
                                   master=self.master,
                                   use_rest=True)
Exemplo n.º 5
0
 def test_n1ql_timeout(self):
     self.n1ql_helper.create_primary_index(using_gsi=True,
                                           server=self.n1ql_node)
     self.load_sample_buckets(self.server, "travel-sample")
     body = self.create_save_function_body(
         self.function_name,
         "handler_code/n1ql_op_timeout.js",
         dcp_stream_boundary="from_now",
         execution_timeout=10)
     self.deploy_function(body)
     key = datetime.datetime.now().time()
     query = "insert into src_bucket (KEY, VALUE) VALUES (\"" + str(
         key) + "\",\"doc created\")"
     self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
     self.sleep(30)
     stats = self.rest.get_all_eventing_stats()
     log.info("Stats {0}".format(json.dumps(stats, sort_keys=True,
                                            indent=4)))
     if stats[0]["failure_stats"]["timeout_count"] != 1:
         if stats[0]["lcb_exception_stats"]["201"] == 1:
             pass
         else:
             raise Exception(
                 "Timeout not happened for the long running query")
     elif stats[0]["failure_stats"]["timeout_count"] == 1:
         pass
     self.undeploy_and_delete_function(body)
Exemplo n.º 6
0
 def setUp(self):
     super(EventingN1QL, self).setUp()
     if self.create_functions_buckets:
         self.bucket_size = 100
         log.info(self.bucket_size)
         bucket_params = self._create_bucket_params(server=self.server, size=self.bucket_size,
                                                    replicas=self.num_replicas)
         self.cluster.create_standard_bucket(name=self.src_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.src_bucket = RestConnection(self.master).get_buckets()
         self.cluster.create_standard_bucket(name=self.dst_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(name=self.metadata_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.buckets = RestConnection(self.master).get_buckets()
     self.gens_load = self.generate_docs(self.docs_per_day)
     self.expiry = 3
     self.n1ql_node = self.get_nodes_from_services_map(service_type="n1ql")
     self.n1ql_helper = N1QLHelper(shell=self.shell,
                                   max_verify=self.max_verify,
                                   buckets=self.buckets,
                                   item_flag=self.item_flag,
                                   n1ql_port=self.n1ql_port,
                                   full_docs_list=self.full_docs_list,
                                   log=self.log, input=self.input,
                                   master=self.master,
                                   use_rest=True
                                   )
Exemplo n.º 7
0
 def setUp(self):
     super(EventingRQG, self).setUp()
     if self.create_functions_buckets:
         self.bucket_size = 100
         log.info(self.bucket_size)
         bucket_params = self._create_bucket_params(server=self.server, size=self.bucket_size,
                                                    replicas=self.num_replicas)
         self.cluster.create_standard_bucket(name=self.src_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.src_bucket = RestConnection(self.master).get_buckets()
         self.cluster.create_standard_bucket(name=self.dst_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(name=self.metadata_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.buckets = RestConnection(self.master).get_buckets()
     self.gens_load = self.generate_docs(self.docs_per_day)
     self.expiry = 3
     self.n1ql_node = self.get_nodes_from_services_map(service_type="n1ql")
     self.n1ql_helper = N1QLHelper(shell=self.shell,
                                   max_verify=self.max_verify,
                                   buckets=self.buckets,
                                   item_flag=self.item_flag,
                                   n1ql_port=self.n1ql_port,
                                   full_docs_list=self.full_docs_list,
                                   log=self.log, input=self.input,
                                   master=self.master,
                                   use_rest=True
                                   )
     self.number_of_handler = self.input.param('number_of_handler', 5)
     self.number_of_queries = self.input.param('number_of_queries',None)
     self.template_file=self.input.param('template_file','b/resources/rqg/simple_table_db/query_tests_using_templates/query_10000_fields.txt.zip')
Exemplo n.º 8
0
 def test_queries(self):
     test_file_path = self.template_file
     with open(test_file_path) as f:
         query_list = f.readlines()
     self.n1ql_helper.create_primary_index(using_gsi=True, server=self.n1ql_node)
     k = self.number_of_handler
     if self.number_of_queries is None:
         s = len(query_list)
     else:
         s = self.number_of_queries
     log.info(s)
     for j in range(0, s, k):
         try:
             threads = []
             for i in range(j, j + k):
                 if i >= s:
                     break
                 threads.append(Thread(target=self.create_function_and_deploy, args=(query_list[i],False)))
             for thread in threads:
                 thread.start()
             for thread in threads:
                 thread.join()
             key = datetime.datetime.now().time()
             query = "insert into src_bucket (KEY, VALUE) VALUES (\"" + str(key) + "\",\"doc created\")"
             self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
             self.sleep(10)
             self.eventing_stats()
         except Exception as e:
             log.error(e)
         finally:
             self.undeploy_delete_all_functions()
     self.delete_temp_handler_code()
     self.verify_n1ql_stats(s)
Exemplo n.º 9
0
 def delete_temp_handler_code(self, path=HANDLER_CODE.N1QL_TEMP_PATH):
     log.info("deleting all the handler codes")
     script_dir = os.path.dirname(__file__)
     dirPath = os.path.join(script_dir, path)
     fileList = os.listdir(dirPath)
     for fileName in fileList:
         os.remove(dirPath + "/" + fileName)
Exemplo n.º 10
0
 def setUp(self):
     super(EventingCollections, self).setUp()
     self.rest.set_service_memoryQuota(service='memoryQuota', memoryQuota=700)
     if self.create_functions_buckets:
         self.bucket_size = 100
         log.info(self.bucket_size)
         bucket_params = self._create_bucket_params(server=self.server, size=self.bucket_size,
                                                    replicas=0)
         self.cluster.create_standard_bucket(name=self.src_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.src_bucket = RestConnection(self.master).get_buckets()
         self.cluster.create_standard_bucket(name=self.dst_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(name=self.metadata_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.buckets = RestConnection(self.master).get_buckets()
     query = "create primary index on {}".format(self.src_bucket_name)
     self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
     query = "create primary index on {}".format(self.dst_bucket_name)
     self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
     query = "create primary index on {}".format(self.metadata_bucket_name)
     self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
     self.create_scope_collection(bucket=self.src_bucket_name,scope=self.src_bucket_name,collection=self.src_bucket_name)
     self.create_scope_collection(bucket=self.metadata_bucket_name,scope=self.metadata_bucket_name,collection=self.metadata_bucket_name)
     self.create_scope_collection(bucket=self.dst_bucket_name,scope=self.dst_bucket_name,collection=self.dst_bucket_name)
Exemplo n.º 11
0
 def setUp(self):
     super(AdvanceBucketOp, self).setUp()
     self.rest.set_service_memoryQuota(service='memoryQuota',
                                       memoryQuota=900)
     if self.create_functions_buckets:
         self.bucket_size = 200
         log.info(self.bucket_size)
         bucket_params = self._create_bucket_params(server=self.server,
                                                    size=self.bucket_size,
                                                    replicas=0)
         self.cluster.create_standard_bucket(name=self.src_bucket_name,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.src_bucket = RestConnection(self.master).get_buckets()
         self.cluster.create_standard_bucket(name=self.dst_bucket_name,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(name=self.dst_bucket_name1,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(name=self.metadata_bucket_name,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.buckets = RestConnection(self.master).get_buckets()
     self.gens_load = self.generate_docs(self.docs_per_day)
     self.handler_code = self.input.param('handler_code', None)
     self.expiry = 3
     query = "create primary index on {}".format(self.src_bucket_name)
     self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
     query = "create primary index on {}".format(self.dst_bucket_name)
     self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
     query = "create primary index on {}".format(self.metadata_bucket_name)
     self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
Exemplo n.º 12
0
 def test_eventing_debugger_curl(self):
     count = 0
     match = False
     body = self.create_save_function_body(self.function_name, HANDLER_CODE_CURL.BUCKET_OP_WITH_CURL_GET)
     body['depcfg']['curl'] = []
     body['depcfg']['curl'].append(
         {"hostname": self.hostname, "value": "server", "auth_type": self.auth_type, "username": self.curl_username,
          "password": self.curl_password, "cookies": self.cookies})
     self.deploy_function(body)
     #enable debugger
     self.rest.enable_eventing_debugger()
     # Start eventing debugger
     out1 = self.rest.start_eventing_debugger(self.function_name)
     log.info(" Started eventing debugger : {0}".format(out1))
     # do some mutations
     self.load(self.gens_load, buckets=self.src_bucket, flag=self.item_flag, verify_data=False,
               batch_size=self.batch_size)
     # get debugger url
     pattern = re.compile(r'chrome-devtools://devtools/bundled/js_app.html(.*)')
     while count < 10:
         out2 = self.rest.get_eventing_debugger_url(self.function_name)
         matched = re.match(pattern, out2)
         if matched:
             log.info("Got debugger url : {0}{1}".format(matched.group(0), matched.group(1)))
             match = True
             break
         count += 1
         self.sleep(30)
     if not match:
         self.fail("Debugger url was not generated even after waiting for 300 secs...    ")
     # stop debugger
     self.rest.stop_eventing_debugger(self.function_name)
     # undeploy and delete the function
     self.undeploy_and_delete_function(body)
Exemplo n.º 13
0
 def setUp(self):
     super(EventingLifeCycle, self).setUp()
     self.rest.set_service_memoryQuota(service='memoryQuota',
                                       memoryQuota=700)
     if self.create_functions_buckets:
         self.bucket_size = 100
         self.metadata_bucket_size = 400
         log.info(self.bucket_size)
         bucket_params = self._create_bucket_params(
             server=self.server,
             size=self.bucket_size,
             replicas=self.num_replicas)
         bucket_params_meta = self._create_bucket_params(
             server=self.server,
             size=self.metadata_bucket_size,
             replicas=self.num_replicas)
         self.cluster.create_standard_bucket(name=self.src_bucket_name,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.src_bucket = RestConnection(self.master).get_buckets()
         self.cluster.create_standard_bucket(name=self.dst_bucket_name,
                                             port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(
             name=self.metadata_bucket_name,
             port=STANDARD_BUCKET_PORT + 1,
             bucket_params=bucket_params_meta)
         self.buckets = RestConnection(self.master).get_buckets()
     self.gens_load = self.generate_docs(self.docs_per_day)
     self.expiry = 3
Exemplo n.º 14
0
 def setUp(self):
     super(EventingExpired, self).setUp()
     self.rest.set_service_memoryQuota(service='memoryQuota', memoryQuota=700)
     if self.create_functions_buckets:
         self.bucket_size = 200
         log.info(self.bucket_size)
         bucket_params = self._create_bucket_params(server=self.server, size=self.bucket_size,
                                                    replicas=0)
         self.cluster.create_standard_bucket(name=self.src_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.src_bucket = RestConnection(self.master).get_buckets()
         self.cluster.create_standard_bucket(name=self.dst_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(name=self.metadata_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.buckets = RestConnection(self.master).get_buckets()
     self.gens_load = self.generate_docs(self.docs_per_day)
     self.expiry = 3
     query = "create primary index on {}".format(self.src_bucket_name)
     self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node)
     if self.non_default_collection:
         self.collection_rest.create_scope_collection(bucket=self.src_bucket_name,scope=self.src_bucket_name,
                                                      collection=self.src_bucket_name,params={"maxTTL":20})
         self.collection_rest.create_scope_collection(bucket=self.metadata_bucket_name,scope=self.metadata_bucket_name,collection=self.metadata_bucket_name)
         self.collection_rest.create_scope_collection(bucket=self.dst_bucket_name,scope=self.dst_bucket_name,collection=self.dst_bucket_name)
Exemplo n.º 15
0
 def verify_n1ql_stats(self, total_query):
     n1ql_query = "select failed_query.query from dst_bucket where failed_query is not null"
     failed = self.n1ql_helper.run_cbq_query(query=n1ql_query, server=self.n1ql_node)
     n1ql_query = "select passed_query.query from dst_bucket where passed_query is not null"
     passed = self.n1ql_helper.run_cbq_query(query=n1ql_query, server=self.n1ql_node)
     log.info("passed: {}".format(len(passed["results"])))
     log.info("failed: {}".format(len(failed["results"])))
     assert len(passed["results"]) + len(failed["results"]) == total_query
     assert len(failed["results"]) == 0, "failed queries are {0}".format(failed["results"])
Exemplo n.º 16
0
 def test_eventing_debugger_default_settings(self):
     count = 0
     match = False
     body = self.create_save_function_body(
         self.function_name, HANDLER_CODE.BUCKET_OPS_ON_UPDATE)
     self.deploy_function(body)
     try:
         out1 = self.rest.start_eventing_debugger(self.function_name,
                                                  self.function_scope)
     except Exception as e:
         msg = json.dumps(str(e))
         assert "ERR_DEBUGGER_DISABLED" in msg
     #enable debugger
     self.rest.enable_eventing_debugger()
     #start debugger
     out1 = self.rest.start_eventing_debugger(self.function_name,
                                              self.function_scope)
     log.info(" Started eventing debugger : {0}".format(out1))
     # do some mutations
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size)
     eventing_ip = self.get_nodes_from_services_map(service_type="eventing")
     # get debugger url
     pattern = re.compile(eventing_ip.ip + ':9140(.*)')
     while count < 10:
         out2 = self.rest.get_eventing_debugger_url(self.function_name,
                                                    self.function_scope)
         url = json.loads(out2)
         matched = re.match(pattern, url["websocket"])
         if matched:
             log.info("Got debugger url : {0}{1}".format(
                 matched.group(0), matched.group(1)))
             match = True
             break
         count += 1
         self.sleep(30)
     if not match:
         self.fail(
             "Debugger url was not generated even after waiting for 300 secs...    "
         )
     # stop debugger
     self.rest.stop_eventing_debugger(self.function_name,
                                      self.function_scope)
     # disable debugger
     self.rest.disable_eventing_debugger()
     try:
         out1 = self.rest.start_eventing_debugger(self.function_name,
                                                  self.function_scope)
     except Exception as e:
         msg = json.dumps(str(e))
         assert "ERR_DEBUGGER_DISABLED" in msg
     # undeploy and delete the function
     self.undeploy_and_delete_function(body)
Exemplo n.º 17
0
 def create_function_and_deploy(self, query, replace=True):
     log.info("creating handler code for :{}".format(query))
     if replace:
         file_path = self.generate_eventing_file(self._convert_template_n1ql(query))
     else:
         file_path = self.generate_eventing_file(query)
     self.sleep(10)
     ts = datetime.datetime.now().strftime('%m%d%y%H%M%S%f')
     body = self.create_save_function_body(self.function_name + str(ts), file_path,
                                           dcp_stream_boundary="from_now", worker_count=1, execution_timeout=60)
     self.deploy_function(body)
Exemplo n.º 18
0
 def test_export_credentials(self):
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size)
     body = self.create_save_function_body(
         self.function_name, HANDLER_CODE_CURL.BUCKET_OP_WITH_CURL_GET)
     body['depcfg']['curl'] = []
     body['depcfg']['curl'].append({
         "hostname": self.hostname,
         "value": "server",
         "auth_type": self.auth_type,
         "username": self.curl_username,
         "password": self.curl_password,
         "cookies": self.cookies
     })
     self.deploy_function(body)
     # export the function that we have created
     output = self.rest.export_function(self.function_name)
     # Wait for eventing to catch up with all the create mutations and verify results
     self.verify_eventing_results(self.function_name,
                                  self.docs_per_day * 2016)
     log.info("exported function")
     log.info(output)
     log.info("imported function")
     log.info(body)
     # Validate that exported function data matches with the function that we created
     self.assertTrue(output['depcfg']['curl'][0]['password'] == "",
                     msg="password is not empty")
     self.assertTrue(output['depcfg']['curl'][0]['username'] == "",
                     msg="username is not empty")
     self.undeploy_and_delete_function(body)
Exemplo n.º 19
0
 def test_export_function(self):
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size)
     body = self.create_save_function_body(
         self.function_name, HANDLER_CODE.BUCKET_OPS_WITH_CRON_TIMER)
     self.deploy_function(body)
     # export the function that we have created
     output = self.rest.export_function(self.function_name)
     # Wait for eventing to catch up with all the create mutations and verify results
     self.verify_eventing_results(self.function_name,
                                  self.docs_per_day * 2016)
     log.info("exported function")
     log.info(output["settings"])
     log.info("imported function")
     log.info(body["settings"])
     # Validate that exported function data matches with the function that we created
     self.assertTrue(
         output["appname"] == self.function_name,
         msg="Function name mismatch from the exported function")
     self.assertTrue(output["appcode"] == body["appcode"],
                     msg="Handler code mismatch from the exported function")
     # Looks like exported functions add few more settings. So it will not be the same anymore
     # self.assertTrue(cmp(output["settings"], body["settings"]) == 0,
     #                 msg="Settings mismatch from the exported function")
     self.undeploy_and_delete_function(body)
Exemplo n.º 20
0
 def verify_user_noroles(self,username):
     status, content, header=rbacmain(self.master)._retrieve_user_roles()
     res = json.loads(content)
     userExist=False
     for ele in res:
         log.debug("user {0}".format(ele["name"]))
         log.debug(ele["name"] == username)
         if ele["name"] == username:
             log.debug("user roles {0}".format(ele["roles"]))
             if not ele["roles"]:
                 log.info("user {0} has no roles".format(username))
                 userExist=True
                 break
     if not userExist:
         raise Exception("user {0} roles are not empty".format(username))
Exemplo n.º 21
0
 def verify_user_noroles(self, username):
     status, content, header = rbacmain(self.master)._retrieve_user_roles()
     res = json.loads(content)
     userExist = False
     for ele in res:
         log.debug("user {0}".format(ele["name"]))
         log.debug(ele["name"] == username)
         if ele["name"] == username:
             log.debug("user roles {0}".format(ele["roles"]))
             if not ele["roles"]:
                 log.info("user {0} has no roles".format(username))
                 userExist = True
                 break
     if not userExist:
         raise Exception("user {0} roles are not empty".format(username))
Exemplo n.º 22
0
 def setUp(self):
     super(EventingLifeCycle, self).setUp()
     if self.create_functions_buckets:
         self.bucket_size = 100
         log.info(self.bucket_size)
         bucket_params = self._create_bucket_params(server=self.server, size=self.bucket_size,
                                                    replicas=self.num_replicas)
         self.cluster.create_standard_bucket(name=self.src_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.src_bucket = RestConnection(self.master).get_buckets()
         self.cluster.create_standard_bucket(name=self.dst_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.cluster.create_standard_bucket(name=self.metadata_bucket_name, port=STANDARD_BUCKET_PORT + 1,
                                             bucket_params=bucket_params)
         self.buckets = RestConnection(self.master).get_buckets()
     self.gens_load = self.generate_docs(self.docs_per_day)
     self.expiry = 3
Exemplo n.º 23
0
 def test_eventing_debugger_source_bucket_mutation(self):
     count = 0
     match = False
     body = self.create_save_function_body(
         self.function_name,
         HANDLER_CODE.BUCKET_OP_WITH_SOURCE_BUCKET_MUTATION)
     self.deploy_function(body)
     #enable debugger
     self.rest.enable_eventing_debugger()
     # Start eventing debugger
     out1 = self.rest.start_eventing_debugger(self.function_name,
                                              self.function_scope)
     log.info(" Started eventing debugger : {0}".format(out1))
     # do some mutations
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size)
     # get debugger url
     eventing_ip = self.get_nodes_from_services_map(service_type="eventing")
     # get debugger url
     pattern = re.compile(eventing_ip.ip + ':9140(.*)')
     while count < 10:
         out2 = self.rest.get_eventing_debugger_url(self.function_name,
                                                    self.function_scope)
         url = json.loads(out2)
         matched = re.match(pattern, url["websocket"])
         if matched:
             log.info("Got debugger url : {0}{1}".format(
                 matched.group(0), matched.group(1)))
             match = True
             break
         count += 1
         self.sleep(30)
     if not match:
         self.fail(
             "Debugger url was not generated even after waiting for 300 secs...    "
         )
     # stop debugger
     self.rest.stop_eventing_debugger(self.function_name,
                                      self.function_scope)
     # undeploy and delete the function
     self.undeploy_and_delete_function(body)
Exemplo n.º 24
0
 def test_export_function(self):
     self.load(self.gens_load, buckets=self.src_bucket, flag=self.item_flag, verify_data=False,
               batch_size=self.batch_size)
     body = self.create_save_function_body(self.function_name, HANDLER_CODE.BUCKET_OPS_WITH_CRON_TIMER)
     self.deploy_function(body)
     # export the function that we have created
     output = self.rest.export_function(self.function_name)
     # Wait for eventing to catch up with all the create mutations and verify results
     self.verify_eventing_results(self.function_name, self.docs_per_day * 2016)
     log.info("exported function")
     log.info(output["settings"])
     log.info("imported function")
     log.info(body["settings"])
     # Validate that exported function data matches with the function that we created
     self.assertTrue(output["appname"] == self.function_name, msg="Function name mismatch from the exported function")
     self.assertTrue(output["appcode"] == body["appcode"], msg="Handler code mismatch from the exported function")
     # Looks like exported functions add few more settings. So it will not be the same anymore
     self.assertTrue(cmp(output["settings"], body["settings"]) == 0,
                     msg="Settings mismatch from the exported function")
     self.undeploy_and_delete_function(body)