def insert_data(self, *args): """ insert data to database :param args: args[0] is agent data, args[1] is kafka data :return: None """ data_list = [data for data in args if data] cursor = None conn = None try: self.logger.info("Start to insert data:%s." % data_list) database_path = Common.parser_config_file("database", "database_path") agent_collection_type = Common.parser_config_file("agent", "collection_type") if not os.path.isdir(os.path.realpath(database_path)): os.makedirs(os.path.realpath(database_path), mode=0o700) for data in data_list: # connect or create db conn = sqlite3.connect(os.path.realpath(os.path.join(database_path, data["database"] + ".db"))) self.logger.info("Successfully connect: %s." % data["database"] + "db") # get cursor cursor = conn.cursor() for collection_type in self.collection_mapping[agent_collection_type]: if not data.get(collection_type, None): self.logger.warn("The %s data does not exist." % collection_type) continue columns = [] values = [] # get table columns and values for key, value in data[collection_type].items(): columns.append("%s text" % key) values.append("'" + str(value) + "'") # create table create_tb_sql = "CREATE TABLE IF NOT EXISTS %s (%s);" % (collection_type, ", ".join(columns)) self.logger.info("Command for create table: %s" % create_tb_sql) cursor.execute(create_tb_sql) self.logger.info("Successfully create table: %s." % collection_type) # insert data to table insert_data_sql = "insert into %s values (%s);" % (collection_type, ", ".join(values)) self.logger.info("Command for insert data: %s" % insert_data_sql) cursor.execute(insert_data_sql) conn.commit() self.logger.info("Successfully insert data.\n%s" % ("-" * 90)) except Exception as err_msg: self.logger.error("Failed to insert data to sqlite, \nError: %s" % str(err_msg)) self.logger.info("Rolling back...") conn.rollback() self.logger.info("Successfully rollback.\n") raise Exception(str(err_msg)) finally: cursor.close()
def memory_usage(self): """ Obtaining the memory Usage of the GaussDB :return: current memory usage of the GaussDb """ proc_pid = Common.get_proc_pid(self.ip, self.port) cmd = "ps -ux | awk '{if ($2==\"%s\")print}' |awk '{print $4}'" % proc_pid std, _ = Common.execute_cmd(cmd) if not std: return "0.0" return std.decode("utf-8").strip()
def io_read(self): """ Obtaining the io_read info of the GaussDB :return: io_read info """ proc_pid = Common.get_proc_pid(self.ip, self.port) cmd = "pidstat -d | awk '{if ($4==\"%s\")print}' | awk '{print $5}'" % proc_pid std, _ = Common.execute_cmd(cmd) if not std: return "0.0" return std.decode("utf-8").strip()
def __init__(self, logger): self.logger = logger try: self.host = Common.parser_config_file("database", "host") self.port = Common.parser_config_file("database", "port") self.user = Common.parser_config_file("database", "user") except Exception as err_msg: logger.error(err_msg) raise Exception(err_msg) self.pwd = "" self.collection_mapping = {"os": ["OSExporter"], "database": ["DatabaseExporter"], "all": ["OSExporter", "DatabaseExporter"] }
def run(self, agent_data="", kafka_data=""): database = Common.parser_config_file("database", "name") if database.lower() == "mongodb": MongoDb(self.logger).insert_data(agent_data, kafka_data) elif database == "influxdb": InfluxDb(self.logger).insert_data(agent_data, kafka_data) elif database == "sqlite": Sqlite(self.logger).insert_data(agent_data, kafka_data)
def __init__(self, logger): try: self.port = Common.acquire_collection_info()["port"] except Exception as err_msg: logger.error(str(err_msg)) raise Exception(str(err_msg)) self.logger = logger self.cursor = DBAgent(port=self.port, database="postgres")
def __init__(self, logger): try: collection_info = Common.acquire_collection_info() except Exception as err_msg: logger.error(str(err_msg)) raise Exception(str(err_msg)) self.logger = logger self.ip = collection_info["ip"] self.port = collection_info["port"]
def disk_used_size(self): """ Obtaining the system disk used size :return: current disk used size of the GaussDb """ proc_pid = Common.get_proc_pid(self.ip, self.port) get_data_path = "ps -ux | awk '{if ($2==\"%s\")print}'" % proc_pid std, _ = Common.execute_cmd(get_data_path) if not std: self.logger.warn("There is no process of: %s." % proc_pid) return "0.0M" std = std.decode() data_dir = std.split()[std.split().index("-D") + 1] if not os.path.isdir(data_dir): self.logger.warn("The data dir does not exist: %s." % data_dir) return "0.0M" disk_info, _ = Common.execute_cmd("du -sh %s" % data_dir.strip()) usage = Common.unify_byte_unit(disk_info.decode("utf-8").split()[0]) return usage
def insert_data(self, *args): """ insert data to database :param args: args[0] is agent data, args[1] is kafka data :return: None """ data_list = [data for data in args if data] client = None max_size = Common.parser_config_file("database", "size") max_rows = Common.parser_config_file("database", "max_rows") try: max_size = int(max_size) if max_size else None max_rows = int(max_rows) if max_rows else None self.logger.info("Start to insert data: %s." % data_list) agent_collection_type = Common.parser_config_file("agent", "collection_type") # connect MongoDB client = pymongo.MongoClient(host=self.host, port=int(self.port)) for data in data_list: # create and use db my_db = client[data["database"]] if self.pwd: my_db.authenticate(self.user, self.pwd) coll_list = my_db.list_collection_names() for collection_type in self.collection_mapping[agent_collection_type]: if not data.get(collection_type, None): self.logger.warn("The %s data does not exist." % collection_type) continue # create collection if collection does not exist if collection_type not in coll_list: my_set = my_db.create_collection(collection_type, capped=True, size=max_size, max=max_rows) # get collection if collection exist else: my_set = my_db.get_collection(collection_type) # insert data my_set.insert_one(data[collection_type]) self.logger.info("Successfully insert data.\n%s" % ("-" * 90)) except Exception as err_msg: self.logger.error(err_msg) raise Exception(err_msg) finally: if client: client.close()
def io_wait(): """ Obtaining the system io_wait :return: io_wait info """ std, _ = Common.execute_cmd("iostat") if not std: return "0.0" usage = std.decode("utf-8").split("\n")[3].split()[3] return usage
def check_agent_parameter(self, config): """ Check if the agent parameter is valid, if the parameter is valid, then return parameters dict, otherwise exit process. :param config: config handler for config file. :return: agent parameters dict. """ agent_parameters = {} try: host = config.get('server', 'host') listen_port = config.get('server', 'listen_port') Common.check_ip_and_port(host, listen_port) except (NoOptionError, NoSectionError) as e: self.logger.error(e) sys.exit(1) else: agent_parameters['host'] = host agent_parameters['listen_port'] = listen_port default_agent_parameter_dicts = { 'sink_timer_interval': '10S', 'source_timer_interval': '10S', 'channel_capacity': 1000 } for parameter, default_value in default_agent_parameter_dicts.items(): try: if parameter == 'channel_capacity': agent_parameter_value = config.getint('agent', parameter) agent_parameters[parameter] = agent_parameter_value else: agent_parameter_value = config.get('agent', parameter) agent_parameters[parameter] = Common.transform_time_string( agent_parameter_value, mode='to_second') except Exception as e: self.logger.error( "error occur when acquire %s: %s, use default_value: %s" % (parameter, str(e), default_value)) agent_parameters[parameter] = default_agent_parameter_dicts[ parameter] return agent_parameters
def __init__(self, interval, url, context, logger): """ :param interval: int, time interval when send data. :param url: string, http/https url. :param context: certificate context for https method. """ Sink.__init__(self) self._interval = interval self.running = False self._url = url self.context = context self.logger = logger try: self.host = Common.acquire_collection_info()["ip"] self.port = Common.acquire_collection_info()["port"] self.data_type = Common.acquire_collection_info()["data_type"] self.cluster_name = Common.parser_config_file( "agent", "cluster_name") except Exception as err_msg: logger.error(str(err_msg)) raise Exception(str(err_msg))
def create_log(self): config = ConfigParser() config.read(CONFIG_PATH) log_path = os.path.realpath(config.get("log", "log_path")) if not os.path.isdir(log_path): os.makedirs(log_path) logger = Common.create_logger(level=self.level, log_name=self.log_name, log_path=os.path.join( log_path, self.log_name)) return logger
def stop_service(self): try: if not os.path.isfile(self.pid_file): std = Common.check_proc_exist("role server") if not std: raise Exception("ERROR: Process not running.") else: kill_proc = "kill -9 %s" % std Common.execute_cmd(kill_proc) else: with open(self.pid_file, mode='r') as f: pid = int(f.read()) os.kill(pid, signal.SIGTERM) os.remove(self.pid_file) self.logger.info("Successfully stopped server.") except Exception as err_msg: self.logger.error("Failed to stop service, Error: %s" % str(err_msg)) sys.stdout.write("Error: " + str(err_msg) + "\n") if os.path.isfile(self.pid_file): os.remove(self.pid_file)
def stop_agent(self): try: if not os.path.exists(self.pid_file): self.logger.warn("The pid file does not exists.") std = Common.check_proc_exist("role agent") if not std: raise Exception("ERROR: Process not running.") else: kill_proc = "kill -9 %s" % std Common.execute_cmd(kill_proc) else: with open(self.pid_file, mode='r') as f: pid = int(f.read()) os.kill(pid, signal.SIGTERM) os.remove(self.pid_file) self.logger.info("Successfully stopped agent.") except Exception as e: self.logger.error("Failed to stop agent, Error: %s" % str(e)) sys.stdout.write("Error: " + str(e) + "\n") if os.path.exists(self.pid_file): os.remove(self.pid_file)
def start_service(self, config_path): # check service is running or not. if os.path.isfile(self.pid_file): pid = Common.check_proc_exist("role server") if pid: raise Exception( "Error: Process already running, can't start again.") else: os.remove(self.pid_file) # check config file exists if not os.path.isfile(config_path): raise Exception("Config file: %s does not exists." % config_path) # get listen host and port config = ConfigParser() config.read(config_path) listen_host = config.get("server", "listen_host") port = config.get("server", "listen_port") # write process pid to file if not os.path.isdir(os.path.dirname(self.pid_file)): os.makedirs(os.path.dirname(self.pid_file), 0o700) with open(self.pid_file, mode='w') as f: f.write(str(os.getpid())) # start service self.initialize_app() self.add_resource() try: context = Common.check_certificate_setting(self.logger, config_path, "server") self.logger.info("Start service...") self.app.run(host=listen_host, port=int(port), ssl_context=context) self.logger.warn( "Service stopped, please check main.log for more information.") except (Exception, KeyboardInterrupt) as err_msg: self.logger.error(str(err_msg)) raise Exception(err_msg) finally: if os.path.isfile(self.pid_file): os.remove(self.pid_file)
def insert_data(self, *args): """ insert data to database :param args: args[0] is agent data, args[1] is kafka data :return: None """ data_list = [data for data in args if data] client = None try: self.logger.info("Start to insert data:%s." % data_list) agent_collection_type = Common.parser_config_file("agent", "collection_type") # connect influxDb if self.pwd: client = InfluxDBClient(host=self.host, port=int(self.port), username=self.user, password=self.pwd) else: client = InfluxDBClient(host=self.host, port=int(self.port)) db_list = client.get_list_database() for data in data_list: db_name = data["database"] if db_name not in db_list: # create database if database does not exist client.create_database(db_name) # Create Data Retention Policy client.create_retention_policy("there_days", "3d", "1", db_name, True) for collection_type in self.collection_mapping[agent_collection_type]: if not data.get(collection_type, None): self.logger.warn("The %s data does not exist." % collection_type) continue json_body = [ { "measurement": collection_type, "fields": data[collection_type] } ] client.write_points(json_body, database=db_name) self.logger.info("Successfully insert data.\n%s" % ("-" * 90)) except Exception as err_msg: self.logger.error(err_msg) raise Exception(err_msg) finally: if client: client.close()
def manage_service(args): server_pid_file = os.path.join(current_dirname, './tmp/server.pid') agent_pid_file = os.path.join(current_dirname, './tmp/agent.pid') if args.role == 'server': from service.my_app import MyApp if args.mode == 'start': MyApp(server_pid_file, LOGGER).start_service(CONFIG_PATH) else: MyApp(server_pid_file, LOGGER).stop_service() elif args.role == 'agent': from agent.manage_agent import Agent if args.mode == 'start': get_data_path = "ps -ux | grep -v grep | grep gaussdb" std, _ = Common.execute_cmd(get_data_path) if not std: raise Exception( "The GaussDb process does not exists, please check it.") Agent(agent_pid_file, LOGGER).start_agent(CONFIG_PATH) else: Agent(agent_pid_file, LOGGER).stop_agent() else: print('FATAL: incorrect parameter.') print(usage()) return -1
def start_agent(self, config_path): """ Start agent service. :param config_path: string, config path. :return: NA """ if not os.path.isfile(config_path): raise Exception('Config file: %s does not exists.' % config_path) # check agent is running or not. if os.path.isfile(self.pid_file): pid = Common.check_proc_exist("role agent") if pid: self.logger.warn("Process already exist, pid:[%s]" % pid) raise Exception("Process already running, can't start again.") else: os.remove(self.pid_file) # write process pid to file if not os.path.isdir(os.path.dirname(self.pid_file)): os.makedirs(os.path.dirname(self.pid_file), 0o700) with open(self.pid_file, mode='w') as f: f.write(str(os.getpid())) try: config = ConfigParser() config.read(config_path) collection_type = config.get("agent", "collection_type") agent_parameters = self.check_agent_parameter(config) context = Common.check_certificate_setting(self.logger, config_path, "agent") protocol = "http" if context is not None: protocol = "https" url = "%s://" % protocol + agent_parameters["host"] + ":" + \ agent_parameters["listen_port"] + "/sink" chan = ChannelManager(LOGGER) source = DBSource() http_sink = HttpSink( interval=agent_parameters['sink_timer_interval'], url=url, context=context, logger=LOGGER) source.channel_manager = chan http_sink.channel_manager = chan if collection_type == "all": tasks = [("OSExporter", OSExporter(LOGGER).__call__), ("DatabaseExporter", DatabaseExporter(LOGGER).__call__)] elif collection_type == "os": tasks = [("OSExporter", OSExporter(LOGGER).__call__)] else: tasks = [("DatabaseExporter", DatabaseExporter(LOGGER).__call__)] for task_name, task_func in tasks: source.add_task( name=task_name, interval=agent_parameters['source_timer_interval'], task=task_func, maxsize=agent_parameters['channel_capacity'], logger=LOGGER) try: # start to collect data source.start() except Exception as e: self.logger.error("Failed to start agent task, Error: %s." % e) raise Exception("Failed to start agent, Error: %s." % e) # push data to server http_sink.start() except Exception as err_msg: self.logger.error(str(err_msg)) sys.stdout.write("Error: " + str(err_msg) + "\n") except KeyboardInterrupt: self.logger.warn( "Keyboard exception is received. The process ends.") finally: if os.path.isfile(self.pid_file): os.remove(self.pid_file)