def get_backup_meta_json(self):
     """
     Gets the actual backup metadata json after backup create
     :return: backup meta map
     """
     remote_client = RemoteMachineShellConnection(self.backupset.backup_host)
     backup_meta_file_path = "{0}/{1}/backup-meta.json".format(self.backupset.directory, self.backupset.name)
     remote_client.copy_file_remote_to_local(backup_meta_file_path, "/tmp/backup-meta.json")
     backup_meta = json.load(open("/tmp/backup-meta.json"))
     return backup_meta
Beispiel #2
0
    def _find_least_loaded_index_node(self, count=1):
        index_node = self.get_nodes_from_services_map(service_type="index",
                                                      get_all_nodes=True)[0]
        remote = RemoteMachineShellConnection(index_node)
        output_file = '/tmp/index_plan.log'
        dest_file = 'index_plan.log'
        del_cmd = f'rm -rf {output_file}'
        self.log.info("Deleting index_plan.log from Remote host")
        remote.execute_command(del_cmd)
        if self.use_https:
            port = '18091'
        else:
            port = '8091'
        bin_path = os.path.join(LINUX_COUCHBASE_BIN_PATH, 'cbindexplan')
        if self.shell.extract_remote_info().type.lower() == 'linux':
            if self.nonroot:
                bin_path = os.path.join(LINUX_NONROOT_CB_BIN_PATH,
                                        'cbindexplan')
        elif self.shell.extract_remote_info().type.lower() == 'windows':
            bin_path = os.path.join(WIN_COUCHBASE_BIN_PATH, 'cbindexplan')
        cmd = f'{bin_path}  -command=retrieve -cluster="127.0.0.1:{port}" ' \
              f'-username="******" -password="******" -getUsage -numNewReplica {count}' \
              f' -output {output_file}'
        remote.execute_command(cmd)
        if os.path.exists(dest_file):
            self.log.info("Deleting index_plan.log from slave")
            os.remove(dest_file)
        remote.copy_file_remote_to_local(rem_path=output_file,
                                         des_path=dest_file)

        # cleaning file on remote host
        self.log.info("Deleting index_plan.log from Remote host after copying")
        remote.execute_command(del_cmd)

        if os.path.exists(dest_file):
            fh = open(dest_file)
            json_obj = json.load(fh)
            index_loads = {}

            for index in json_obj["placement"]:
                curr_index_load = index['usageRatio']
                index_node = index['nodeId'].split(':')[0]
                index_loads[index_node] = curr_index_load

            sorted_indexer_nodes = sorted(index_loads.items(),
                                          key=lambda x: x[1])
            ll_nodes_list = []
            for item in range(count):
                ll_nodes_list.append(sorted_indexer_nodes[item][0])
            return ll_nodes_list

        self.fail("Couldn't copy cbindexplan output to local directory")
Beispiel #3
0
 def get_backup_meta_json(self):
     """
     Gets the actual backup metadata json after backup create
     :return: backup meta map
     """
     remote_client = RemoteMachineShellConnection(
         self.backupset.backup_host)
     backup_meta_file_path = "{0}/{1}/backup-meta.json".format(
         self.backupset.directory, self.backupset.name)
     remote_client.copy_file_remote_to_local(backup_meta_file_path,
                                             "/tmp/backup-meta.json")
     backup_meta = json.load(open("/tmp/backup-meta.json"))
     return backup_meta
 def test_export_cli_import_rest(self):
     shell = RemoteMachineShellConnection(self.servers[0])
     info = shell.extract_remote_info().type.lower()
     if info == 'linux':
         self.cli_command_location = testconstants.LINUX_COUCHBASE_BIN_PATH
     elif info == 'windows':
         self.cmd_ext = ".exe"
         self.cli_command_location = testconstants.WIN_COUCHBASE_BIN_PATH_RAW
     elif info == 'mac':
         self.cli_command_location = testconstants.MAC_COUCHBASE_BIN_PATH
     else:
         raise Exception("OS not supported.")
     # create the json file need on the node
     eventing_node = self.get_nodes_from_services_map(
         service_type="eventing", get_all_nodes=False)
     # create and save function
     body = self.create_save_function_body(
         self.function_name,
         HANDLER_CODE.DELETE_BUCKET_OP_ON_DELETE,
         worker_count=3)
     self.deploy_function(body,
                          wait_for_bootstrap=False,
                          deployment_status=False,
                          processing_status=False)
     # export via cli
     self._couchbase_cli_eventing(eventing_node,
                                  self.function_name,
                                  "export",
                                  "SUCCESS: Function exported to: " +
                                  self.function_name + ".json",
                                  file_name=self.function_name + ".json")
     # delete the function
     self._couchbase_cli_eventing(
         eventing_node, self.function_name, "delete",
         "SUCCESS: Request to delete the function was accepted")
     # read exported function
     eventing_node = self.get_nodes_from_services_map(
         service_type="eventing", get_all_nodes=False)
     remote_client = RemoteMachineShellConnection(eventing_node)
     script_dir = os.path.dirname(__file__)
     abs_file_path = os.path.join(script_dir, "exported_functions/")
     output = remote_client.copy_file_remote_to_local(
         "/root/" + self.function_name + ".json",
         abs_file_path + "exported.json")
     self.log.info("exported function: {}".format(
         json.dumps(output, indent=4)))
     fh = open(abs_file_path + "exported.json", "r")
     body = fh.read()
     self.log.info("body {}".format(body))
     self.rest.import_function(body)
     #deploy function
     self.deploy_handler_by_name(self.function_name)
     # load some data in the source bucket
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size)
     # verify result
     self.verify_eventing_results(self.function_name,
                                  self.docs_per_day * 2016,
                                  skip_stats_validation=True)
     # pause function
     self._couchbase_cli_eventing(eventing_node, self.function_name,
                                  "pause", "SUCCESS: Function was paused")
     self.wait_for_handler_state(self.function_name, "paused")
     # delete all documents
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size,
               op_type='delete')
     # resume function
     self._couchbase_cli_eventing(eventing_node, self.function_name,
                                  "resume", "SUCCESS: Function was resumed")
     self.wait_for_handler_state(self.function_name, "deployed")
     # verify result
     self.verify_eventing_results(self.function_name,
                                  0,
                                  skip_stats_validation=True)
     # undeploy the function
     self._couchbase_cli_eventing(
         eventing_node, self.function_name, "undeploy",
         "SUCCESS: Request to undeploy the function was accepted")
     self.wait_for_handler_state(self.function_name, "undeployed")
     # delete the function
     self._couchbase_cli_eventing(
         eventing_node, self.function_name, "delete",
         "SUCCESS: Request to delete the function was accepted")