def main(resource, action=''): try: if action == 'meta-data': return resource.metadata() Conf.load(const.HA_GLOBAL_INDEX, Yaml(const.HA_CONFIG_FILE)) log_path = Conf.get(const.HA_GLOBAL_INDEX, f"LOG{_DELIM}path") log_level = Conf.get(const.HA_GLOBAL_INDEX, f"LOG{_DELIM}level") Log.init(service_name='resource_agent', log_path=log_path, level=log_level) with open(const.RESOURCE_SCHEMA, 'r') as f: resource_schema = json.load(f) os.makedirs(const.RA_LOG_DIR, exist_ok=True) resource_agent = resource(DecisionMonitor(), resource_schema) Log.debug(f"{resource_agent} initialized for action {action}") if action == 'monitor': return resource_agent.monitor() elif action == 'start': return resource_agent.start() elif action == 'stop': return resource_agent.stop() else: print('Usage %s [monitor] [start] [stop] [meta-data]' % sys.argv[0]) exit() except Exception as e: Log.error(f"{traceback.format_exc()}") return const.OCF_ERR_GENERIC
def __init__(self): """ Initialization of HA CLI. """ # TODO Check product env and load specific conf Conf.init() Conf.load(const.RESOURCE_GLOBAL_INDEX, Json(const.RESOURCE_SCHEMA)) Conf.load(const.RULE_GLOBAL_INDEX, Json(const.RULE_ENGINE_SCHAMA)) Conf.load(const.HA_GLOBAL_INDEX, Yaml(const.HA_CONFIG_FILE)) log_path = Conf.get(const.HA_GLOBAL_INDEX, "LOG.path") log_level = Conf.get(const.HA_GLOBAL_INDEX, "LOG.level") Log.init(service_name='cortxha', log_path=log_path, level=log_level)
def main(): from cortx.utils.ha.hac.compile import Compiler from cortx.utils.ha.hac import generate from cortx.utils.ha.hac import const provider = { "pcs": generate.PCSGeneratorResource, "k8s": generate.KubernetesGenerator } try: Conf.init() argParser = argparse.ArgumentParser( usage = "%(prog)s\n\n" + usage(), formatter_class = argparse.RawDescriptionHelpFormatter) argParser.add_argument("-v", "--validate", help="Check input files for syntax errors") argParser.add_argument("-t", "--target", default="pcs", help="HA target to use. Example: pcs") argParser.add_argument("-c", "--compile", help="Path of ha_spec files.") argParser.add_argument("-o", "--output", help="Final spec/rule file for generator/compiler") argParser.add_argument("-g", "--generate", help="Ganerate script/rule for targeted HA tool. Eg: pcs") argParser.add_argument("-a", "--args_file", help="Args file for generator for dynamic input values") argParser.add_argument("-r", "--resources", help="Enter resorce list") args = argParser.parse_args() if args.generate is None: c = Compiler(args.compile, args.output, args.validate) if args.validate is None: c.parse_files() c.compile_graph() c.verify_schema() c.create_schema() c.draw_graph() else: com = provider[args.target](args.generate, args.output, args.args_file, args.resources) com.create_script() except Exception as e: #TODO: print traceback error properly with open(const.HAC_LOG, "w") as log: current_time = str(datetime.now().strftime("%d/%m/%Y %H:%M:%S")) log.writelines(current_time + ":"+ str(traceback.format_exc())) print('Error: ' + str(e), file=sys.stderr) sys.exit(1)
def is_cleanup_required(self, node=None): """ Check if all alert resolved Args: node ([type]): [description] """ node = "all" if node is None else node Log.debug(f"Performing failback on {node}") resource_list = Conf.get(const.RESOURCE_GLOBAL_INDEX, "resources") status_list = {} for resource in resource_list: if node == "all": status_list[ resource] = self._decision_monitor.get_resource_status( resource) elif node in resource: status_list[ resource] = self._decision_monitor.get_resource_status( resource) else: pass Log.info(f"Resource status for node {node} is {status_list}") if Action.FAILED in status_list.values(): Log.debug("Some component are not yet recovered skipping failback") elif Action.RESOLVED in status_list.values(): Log.info("Failback is required as some of alert are resolved.") return True else: Log.debug( f"{node} node already in good state no need for failback") return False
def cleanup_db(self, node, data_only): """ Args: node ([string]): Node name. data_only ([boolean]): Remove data only. Action: consul data: {'entity': 'enclosure', 'entity_id': '0', 'component': 'controller', 'component_id': 'node1'} if data_only is True then remove data else remove data and perform cleanup. """ resources = Conf.get(const.RESOURCE_GLOBAL_INDEX, "resources") node = "all" if node is None else node Log.debug(f"Performing cleanup for {node} node") for key in resources.keys(): if node == "all": self._decision_monitor.acknowledge_resource(key, data_only) elif node in key: self._decision_monitor.acknowledge_resource(key, data_only) else: pass if not data_only: Log.info(f"Reseting HA decision event for {node}") self.reset_failover(node)
def _init_kafka_conf(self, **kwargs): kafka_cluster = Conf.get(const.CONFIG_INDEX, \ f"{const.KAFKA}.{const.CLUSTER}") bootstrap_servers = "" count = 1 for values in kafka_cluster: if len(kafka_cluster) <= count: bootstrap_servers = bootstrap_servers + f"{values[const.SERVER]}:{values[const.PORT]}" else: bootstrap_servers = bootstrap_servers + f"{values[const.SERVER]}:{values[const.PORT]}, " count = count + 1 self._hosts = bootstrap_servers self._client_id = kwargs.get(const.CLIENT_ID) self._group_id = kwargs.get(const.GROUP_ID) self._consumer_name = kwargs.get(const.CONSUMER_NAME) self._retry_counter = Conf.get(const.CONFIG_INDEX, \ f"{const.KAFKA}.{const.RETRY_COUNTER}") Log.info(f"Message bus config initialized. Hosts: {self._hosts}, "\ f"Client ID: {self._client_id}, Group ID: {self._group_id}")
def __init__(self, compiled_file, output_file, args_file): """ compiled_file : Compiled file generate by hac compiler output_file : Output file for target ha tool args_file : Provision file for dynamic input """ if compiled_file is None: raise Exception("compiled_file is missing") if output_file is None: raise Exception("output_file is missing") if args_file is None: raise Exception("args_file is missing") self._is_file(compiled_file) self._is_file(args_file) Conf.load(const.PROV_CONF_INDEX, Yaml(args_file)) self._script = output_file with open(compiled_file, "r") as f: self.compiled_json = json.load(f) self._modify_schema() self._provision_compiled_schema(self.compiled_json) self._resource_set = self.compiled_json["resources"]
def _provision_compiled_schema(self, compiled_schema): """ Scan schema and replace ${var} in compiled schema to configuration provided by provision. """ keys = re.findall(r"\${[^}]+}(?=[^]*[^]*)", str(compiled_schema)) new_compiled_schema = str(compiled_schema) for element in keys: key = element.replace("${", "").replace("}", "") new_compiled_schema = new_compiled_schema.replace( element, str(Conf.get(const.PROV_CONF_INDEX, key, element))) self.compiled_json = ast.literal_eval(new_compiled_schema)
def _assign_var(self): """ Assign value to runtime variable """ keys = list(set(re.findall(r"\${[^}]+}(?=[^]*[^]*)", str(self.compiled_json)))) args = {} with open(self._script, "a") as script_file: script_file.writelines("pcs_status=$(pcs constraint)\n") script_file.writelines("pcs_location=$(pcs constraint location)\n") for element in keys: if "." not in element: variable = element.replace("${", "").replace("}", "") key = variable.replace("_", ".") script_file.writelines(variable+ "="+ str(Conf.get(const.PROV_CONF_INDEX, key))+"\n")
def _init_config(self, **kwargs): try: #TODO: Write a script to fetch kafka bootstarp cluster and write in #the config. Provide the script to Provisioner to copy it to desired #location. ConfInit() self._message_bus_type = Conf.get(const.CONFIG_INDEX, const.TYPE) if self._message_bus_type == const.KAFKA: self._init_kafka_conf(**kwargs) elif self._message_bus_type == const.RMQ: self._init_rmq_conf(**kwargs) else: raise InvalidConfigError("Invalid config") except Exception as ex: Log.error(f"Invalid config error. {ex}") raise InvalidConfigError(f"Invalid config. {ex}")
def __init__(self): if ConfInit.__instance == None: ConfInit.__instance = self Conf.init() Conf.load(const.CONFIG_INDEX, Json(const.MESSAGE_BUS_CONF))