Example #1
0
def init_oracle_exporter(server_ip, server_username, server_password):
    server_manage_result = json.loads(server_manage.get_decrypt())
    server_manage_data = server_manage_result[0]["server_manage"]
    oracle_result = json.loads(oracle.get_decrypt())[0]
    oracle_data = oracle_result["oracle"]
    oracle_data_env = oracle_data["dev"]
    for item in oracle_data_env:
        oracle_item_data = oracle_data_env[item]
        oracle_conn_name = oracle_item_data["name"]
        oracle_conn_host_port = oracle_item_data["host_port"]
        oracle_conn_host_port_part = oracle_conn_host_port.split(":")
        oracle_conn_host = oracle_conn_host_port_part[0]
        oracle_conn_port = oracle_conn_host_port_part[1]

        oracle_conn_username = oracle_item_data["username"]
        oracle_conn_password = oracle_item_data["password"]
        # 初始化所有的oracle都有exporter账号 暂时省略
        oracle_conn_host_str = oracle_conn_host.replace(".", "_")
        server_manage_item_data = server_manage_data[oracle_conn_host_str]
        server_manage_username = list(server_manage_item_data.keys())[0]
        server_manage_password = server_manage_item_data[
            server_manage_username]

        # 初始化所有的oracle都安装oracle_exporter
        init_oracle_install_oracle_exporter(oracle_conn_host,
                                            server_manage_username,
                                            server_manage_password,
                                            oracle_conn_host_port)
    # 确认prometheus server 配置了oracle_exporter
    init_prometheus_server_config_oracle_exporter(server_ip, server_username,
                                                  server_password,
                                                  oracle_data_env)
def get_crontab_file_content_list():
    try:
        request_data = common_service.check_request_dat_not_null(["env", "host"])
        env = request_data["env"]
        host = request_data["host"]
        host = host.replace(".", "_")
        # 判断来访数据合理性
        schedule_server_config_datas = json.loads(schedule_server_config.get())
        schedule_server_config_data = schedule_server_config_datas[0]
        schedule_server_config_data_data = schedule_server_config_data["schedule_server_config"]
        level_env = schedule_server_config_data_data[env]
        level_host = level_env[host]
        if not level_host:
            raise common_service.MyServiceException("非法入侵, 该服务器不存在调度服务器配置数据中")
        # 获取服务器连接信息
        server_manage_datas = json.loads(server_manage.get_decrypt())
        server_manage_data = server_manage_datas[0]
        server_manage_data_data = server_manage_data["server_manage"]
        server_manage_level_host = server_manage_data_data[host]
        conn_host = host.replace("_", ".")
        for key in server_manage_level_host:
            username = key
            password = server_manage_level_host[username]
            my_server = MyServer(conn_host, username, password)
            filepath = "/etc/crontab"
            file_content = my_server.get_file_content(filepath)
            result = {}
            for index, item in enumerate(file_content):
                result[index] = item
            return result
            break
    except common_service.MyServiceException as e:
        return common_service.ResResult.return500(str(e))
def init_mysql_exporter(server_ip, server_username, server_password):
    server_manage_result = json.loads(server_manage.get_decrypt())
    server_manage_data = server_manage_result[0]["server_manage"]
    mysql_result = json.loads(mysql.get_decrypt())[0]
    mysql_data = mysql_result["mysql"]
    mysql_data_env = mysql_data["dev"]
    for item in mysql_data_env:
        mysql_item_data = mysql_data_env[item]
        mysql_conn_name = mysql_item_data["name"]
        mysql_conn_host_port = mysql_item_data["host_port"]
        mysql_conn_host_port_part = mysql_conn_host_port.split(":")
        mysql_conn_host = mysql_conn_host_port_part[0]
        mysql_conn_port = mysql_conn_host_port_part[1]

        mysql_conn_username = mysql_item_data["username"]
        mysql_conn_password = mysql_item_data["password"]
        # 初始化所有的mysql都有exporter账号
        init_mysql_exporter_account(mysql_conn_name, mysql_conn_host,
                                    mysql_conn_port, mysql_conn_username,
                                    mysql_conn_password)
        mysql_conn_host_str = mysql_conn_host.replace(".", "_")
        server_manage_item_data = server_manage_data[mysql_conn_host_str]
        server_manage_username = list(server_manage_item_data.keys())[0]
        server_manage_password = server_manage_item_data[
            server_manage_username]

        # 初始化所有的mysql都安装mysql_exporter
        init_mysql_install_mysql_exporter(mysql_conn_host,
                                          server_manage_username,
                                          server_manage_password,
                                          mysql_conn_host_port)
    # 确认prometheus server 配置了mysql_exporter
    init_prometheus_server_config_mysql_exporter(server_ip, server_username,
                                                 server_password,
                                                 mysql_data_env)
Example #4
0
def timer_init():
    print("开始检测资产管理中服务器是否安装最新版本的prometheus监控组件")
    server_manage_orin = json.loads(server_manage.get_decrypt())
    if not server_manage_orin:
        return
    server_manage_orin = server_manage_orin[0]
    server_manage_data = server_manage_orin["server_manage"]

    for item_key in server_manage_data:
        server_ip = item_key.replace("_", ".")
        print("开始: 检测: 服务器(%s)" % server_ip)
        server_conn_info = server_manage_data[item_key]
        for server_username in server_conn_info:
            server_password = server_conn_info[server_username]
            do_init_prometheus_component(server_ip, server_username,
                                         server_password)
            break

    print("完成检测资产管理中服务器是否安装最新版本的prometheus监控组件")
    return {}
def add_all_server_to_node_exporter():
    # 获取资产管理中所有的服务器
    server_manage_data = json.loads(server_manage.get_decrypt())[0]
    server_manage_data = server_manage_data["server_manage"]
    server_list = []
    for server_item_key in server_manage_data:
        server_ip = server_item_key.replace("_", ".")
        server_list.append(server_ip)
    # 获取当前prometheus server的配置文件内容
    prometheus_config_file_content = json.loads(config.get_config_file())
    config_content = prometheus_config_file_content[0]["config_file"]
    # 加载yml文件为对象
    prometheus_server_config_content_obj = yaml.safe_load(config_content)
    # 添加资产管理中的服务器到prometheus server中
    scrape_configs = prometheus_server_config_content_obj["scrape_configs"]

    old_server_ip_list = []
    for item in scrape_configs:
        static_configs = item["static_configs"]
        static_configs_one = static_configs[0]
        static_configs_targets = static_configs_one["targets"]
        static_configs_targets_one = static_configs_targets[0]
        old_server_ip_list.append(static_configs_targets_one)

    is_update = False
    for item in server_list:
        if item + ":9100" not in old_server_ip_list:
            scrape_configs.append({
                "job_name":
                server_ip,
                "static_configs": [{
                    "targets": [server_ip + ":9100"]
                }]
            })
            is_update = True

    if not is_update:
        return

    config.put_config_file(
        {"config_file": yaml.safe_dump(prometheus_server_config_content_obj)})
Example #6
0
def modify_server_file(service_type, local_dirs, local_file_path,
                       target_file_path, server_ip_tag, file_content,
                       load_config_file_command):
    local_temp_service_dir_path = project_root_path + "/" + "temp" + "/" + service_type
    # 生成本地文件
    mkdir_multi_level_dir(local_temp_service_dir_path, local_dirs)  # 生成基础文件夹
    local_file_path = local_temp_service_dir_path + "/" + local_file_path
    local_file = open(local_file_path, "w", encoding="utf-8")
    local_file.write(file_content)
    local_file.flush()
    local_file.close()

    # 明确文件路径
    filepath_new_backup = target_file_path + "-new"
    filepath_old_backup = target_file_path + "-old"
    filepath = target_file_path

    # 获取服务器连接信息
    server_conn_info = json.loads(
        server_manage.get_decrypt())[0]["server_manage"][server_ip_tag]
    server_ip = server_ip_tag.replace("_", ".")
    server_username = list(server_conn_info.keys())[0]
    server_password = server_conn_info[server_username]
    my_server = MyServer(server_ip, server_username, server_password)

    # 判断远程文件是否存在
    if not my_server.is_exists_path(filepath):
        my_server.exe_command("echo ''>%s" % filepath)
    # 备份远程服务器文件
    my_server.backup_file(filepath, filepath_old_backup)
    # 上传文件
    my_server.upload_local_file(local_file_path, filepath_new_backup)
    # 覆盖原文件
    my_server.move_file(filepath_new_backup, filepath)
    # 加载配置文件
    my_server.exe_command(load_config_file_command)
Example #7
0
def put_config_file(request_data=None):
    # 校验入参
    try:
        if not request_data:
            request_data = common_service.check_request_dat_not_null(
                ["config_file"])
        config_file = request_data["config_file"]
        if not config_file or config_file.strip() == "":
            raise common_service.MyServiceException("config_file不能为空")
        # 加载prometheus server服务器
        prometheus_server_config = json.loads(
            get())[0]["monitoring__config"]["prometheus_server"]
        server = prometheus_server_config["server"]
        config_filepath = prometheus_server_config["config_filepath"]
        load_config_file_command = prometheus_server_config[
            "load_config_file_command"]
        # 推断数据
        config_dir_path = config_filepath[0:config_filepath.rfind("/")]

        # 生成本地文件
        local_monitoring_path = project_root_path + "/" + "temp" + "/" + "monitoring"
        if not os.path.exists(local_monitoring_path):
            os.mkdir(local_monitoring_path)
        local_monitoring_config_file_path = project_root_path + "/" + "temp" + "/" + "monitoring" + "/" + "config_file"
        if not os.path.exists(local_monitoring_config_file_path):
            os.mkdir(local_monitoring_config_file_path)

        cur_file_name = "prometheus-%s.yml" % datetime.datetime.strftime(
            datetime.datetime.now(), '%Y%m%d_%H%M_%S')
        local_prometheus_yml = local_monitoring_config_file_path + "/" + cur_file_name
        local_file = open(local_prometheus_yml, "w", encoding="utf-8")
        local_file.write(config_file)
        local_file.flush()
        local_file.close()
        # 修改远程服务器的文件内容
        filepath_new_backup = config_dir_path + "/" + cur_file_name
        filepath_old_backup = config_dir_path + "/" + "prometheus-old-backup.yml"
        filepath = config_filepath

        # 获取服务器连接信息
        server_conn_info = json.loads(
            server_manage.get_decrypt())[0]["server_manage"][server]
        server_ip = server.replace("_", ".")
        server_username = list(server_conn_info.keys())[0]
        server_password = server_conn_info[server_username]

        my_server = MyServer(server_ip, server_username, server_password)
        # # 备份文件
        my_server.backup_file(filepath, filepath_old_backup)
        # # 上传文件
        my_server.upload_local_file(local_prometheus_yml, filepath_new_backup)
        # # 移动文件
        my_server.move_file(filepath_new_backup, filepath)
        # 运行加载配置文件的指令
        my_server.exe_command(load_config_file_command)

        request_data = common_service.clear_id(request_data)
        monitoring__config_file_co.insert(request_data)
        return {}
    except common_service.MyServiceException as e:
        print(e)
        return common_service.ResResult.return500(str(e))
def do_process(host, modify_schedule_line, process_instance_id):
    # 形成新的文件内容
    # 查找原始文件内容
    server_manage_datas = json.loads(server_manage.get_decrypt())
    server_manage_data = server_manage_datas[0]
    server_manage_data_data = server_manage_data["server_manage"]
    server_manage_level_host = server_manage_data_data[host]
    conn_host = host.replace("_", ".")
    for key in server_manage_level_host:
        username = key
        password = server_manage_level_host[username]
        my_server = MyServer(conn_host, username, password)
        filepath = "/etc/crontab"
        file_content = my_server.get_file_content(filepath)
        break
    # 修改原始文件内容为新的文件内容
    for key in modify_schedule_line:
        file_content[int(key)] = modify_schedule_line[key]
    project_manage__work_order_process__run_manage_result_log_co.insert_one({
        "process_id": process_instance_id,
        "log": "crontab文件内容为:" + str(file_content) + "<br/>",
        "is_exe_success": True,
    })
    # 将数据保存到本地文件
    local_schedule_path = os.path.join(project_root_path, "temp", "schedule")
    if not os.path.exists(local_schedule_path):
        os.mkdir(local_schedule_path)
    local_schedule_host_path = os.path.join(local_schedule_path, host)
    with open(local_schedule_host_path, "w", encoding="utf-8") as local_file:
        local_file.writelines(file_content)
    project_manage__work_order_process__run_manage_result_log_co.insert_one({
        "process_id": process_instance_id,
        "log": "在本地创建文件",
        "is_exe_success": True,
    })
    # 修改远程服务器的crontab文件内容
    filepath_new_backup = "/etc/crontab_new_backup"
    filepath_old_backup = "/etc/crontab_old_backup"
    filepath = "/etc/crontab"
    my_server = MyServer(conn_host, username, password)
    # # 备份文件
    my_server.backup_file(filepath, filepath_old_backup)
    project_manage__work_order_process__run_manage_result_log_co.insert_one({
        "process_id": process_instance_id,
        "log": "备份远程服务器的crontab文件(%s -> %s)" % (filepath, filepath_old_backup),
        "is_exe_success": True,
    })
    # # 上传文件
    my_server.upload_local_file(local_schedule_host_path, filepath_new_backup)
    project_manage__work_order_process__run_manage_result_log_co.insert_one({
        "process_id": process_instance_id,
        "log": "上传本地文件到远程服务(%s -> %s)" % (local_schedule_host_path, filepath_new_backup),
        "is_exe_success": True,
    })
    # 移动文件
    my_server.backup_file(filepath_new_backup, filepath)
    project_manage__work_order_process__run_manage_result_log_co.insert_one({
        "process_id": process_instance_id,
        "log": "移动覆盖原来的crontab文件(%s -> %s)" % (filepath_new_backup, filepath),
        "is_exe_success": True,
    })
    # 解锁
    project_manage__schedule__schedule_work_order__lock_co.delete_one(
        {"host": host, "process_id": process_instance_id})