def xtrabackup_install(os_args, version):
    if version == '8.0':
        soft_name = 'xtrabackup\percona-xtrabackup-8.0.tar.gz'
    else:
        soft_name = 'xtrabackup\percona-xtrabackup-2.4.tar.gz'
    ssh_input_noprint(os_args, 'rm -rf /usr/local/xtrabackup')
    ssh_input_noprint(os_args, 'mkdir -p /usr/local/xtrabackup')
    print(f"\nINFO: {os_args[0]} 开始上传xtrabackup介质文件")
    ssh_ftp(os_args, '/usr/local/xtrabackup/percona-xtrabackup.tar.gz',
            soft_name, 'put')
    print(f"\nINFO: {os_args[0]} xtrabackup介质文件上传完成")
    print(f"\nINFO: {os_args[0]} 开始解压配置xtrabackup介质包")
    ssh_input_noprint(
        os_args,
        'cd /usr/local/xtrabackup/\ntar -zxf percona-xtrabackup.tar.gz')
    ssh_input_noprint(
        os_args,
        'cd /usr/local/xtrabackup/percona-xtrabackup*/\nmv * /usr/local/xtrabackup'
    )
    print(f"\nINFO: {os_args[0]} xtrabackup介质包解压配置完成")

    check_res = ''.join(
        ssh_input_noprint(os_args, '/usr/local/xtrabackup/bin/xtrabackup -v'))
    if 'version' in check_res:
        print(f"\nINFO: {os_args[0]} xtrabackup软件可用")
        return 'xb install s'
    else:
        print(f"\nWARNING: {os_args[0]} xtrabackup软件不可用")
        return 'xb install f'
def err_log(mysql_args,os_args):
    log_dir = get_all(mysql_args,"show global variables where variable_name in ('datadir','log_error');")
    if './' in log_dir[1][1]:
        errlog = (log_dir[0][1]+log_dir[1][1]).replace('./','/').replace('//','/')
    else:
        errlog = log_dir[1][1]
    errlog_size = round(int(ssh_input_noprint(os_args,f"du -sk {errlog}|awk '{{print $1}}'")[0].replace('\n',''))/1024,3)
    if errlog_size > 2048:
        advice = "Need clean error log"
    else:
        advice = "Do not need clean error log"
    print("\nINFO:\n目前MySQL错误日志大小如下:")
    title = ['Error_log_path','Size (MB)','Advice']
    before_err_res = [[errlog,errlog_size,advice]]

    before_errlog_table = res_table(before_err_res,title)
    print(before_errlog_table)

    if advice == "Need clean error log":
        print("\nINFO:\n清理MySQL错误日志:")
        clean_log_file(os_args,errlog)
        run_noprint(mysql_args,"flush logs")
        print("\nINFO:\nMySQL错误日志清理完毕.")
        err_log(mysql_args,os_args)

    return before_err_res
예제 #3
0
def xtrabackup(src_args, src_os_args, tag_os_args, xtra_args, obj, tag_args):
    source_socket, source_dmp_dir, source_cnf_dir, target_data_dir, target_dmp_dir, target_base_dir = xtra_args
    print("\nINFO:源端开始配置xtrabackup软件")
    version = get_version(src_args)
    install_res = xtrabackup_install(src_os_args, version)
    if install_res == "xb install s":
        print("\nINFO:目标端开始配置xtrabackup软件")
        t_install_res = xtrabackup_install(tag_os_args, version)
        if t_install_res == "xb install s":
            xtrabackup_cmd = get_xtrabackup_cmd(src_args, xtra_args, obj)
            xtrabackup_res = ''.join(
                ssh_input_noprint(src_os_args, xtrabackup_cmd))
            if 'completed OK' in xtrabackup_res:
                print("\nINFO:源端xtrabackup备份完成")
                print("\nINFO:开始往目标端传输物理备份文件")
                ssh_scp(src_os_args, tag_os_args,
                        f"{source_dmp_dir}/xtrabackup.xbstream",
                        f"{target_dmp_dir}/xtrabackup.xbstream")
                target_xtra_res = target_xtra(tag_os_args, xtra_args, tag_args)
                return target_xtra_res
            else:
                print("\nINFO:源端xtrabackup备份失败,详情请查看ops_mysql.log")
                return 'xtrabackup f'
        else:
            return t_install_res
    else:
        return t_install_res
예제 #4
0
def mysqldump(src_args, tag_args, tag_os_args, obj):
    run_res = ''
    if '.' not in obj:
        size = int(get_size(src_args, obj))
        data_dir_free = int(get_datadir_free(tag_args, tag_os_args))
        if size < data_dir_free * 0.8:
            print(f"\nINFO:对象大小{obj}约为{size}MB,目标环境磁盘空间充足,开始执行mysqldump")
            mysqldump_cmd = get_mysqldump_cmd(src_args, tag_args, tag_os_args,
                                              obj)

            imp_res = ssh_input_noprint(tag_os_args, mysqldump_cmd)
            if 'ERROR' in ''.join(imp_res):
                print("\nERROR:数据导入出错!")
                print(''.join(imp_res))
                run_res = 'mysqldump f'
            else:
                print(f"\nINFO:{obj} 数据导入完成.")
                run_res = 'mysqldump s'
        else:
            print(f"\nINFO:对象大小{obj}约为{size}MB,目标环境磁盘空间不足")
            run_res = 'no free'
    else:
        obj_list = obj.split(',')
        for obj in obj_list:
            size = int(get_size(src_args, obj))
            data_dir_free = int(get_datadir_free(tag_args, tag_os_args))

            if size < data_dir_free * 0.8:
                check_table(tag_args, tag_os_args, obj)
                print(f"\nINFO:对象{obj} 大小约为{size}MB,目标环境磁盘空间充足,开始执行mysqldump")
                mysqldump_cmd = get_mysqldump_cmd(src_args, tag_args,
                                                  tag_os_args, obj)
                imp_res = ssh_input_noprint(tag_os_args, mysqldump_cmd)
                if 'ERROR' in ''.join(imp_res):
                    print("\nERROR:数据导入出错!")
                    print(''.join(imp_res))
                    run_res = 'mysqldump f'
                else:
                    print(f"\nINFO:{obj} 数据导入完成.")
                    run_res = 'mysqldump s'

            else:
                print(f"\nINFO:对象大小{obj}约为{size}MB,目标环境磁盘空间不足")
                run_res = 'no free'

    return run_res
def os_dir_use(os_args):
    fs_set = ssh_input_noprint(os_args,"df -hP")
    space_param = []
    for space in fs_set[1:]:
        space_tmp = space.split(' ')
        space = [x.replace('\n','') for x in space_tmp if x != ''][0:6]
        if space!=[]:
            space_param.append(space)
    print("\nINFO:\n操作系统目录使用情况如下:\n")
    dir_title = ["Filesystem","Size" ,"Used" ,"Avail" ,"Use%" ,"Mounted on"]
    dir_table = res_table(space_param,dir_title)
    print(dir_table)
    return space_param
def mysql_dir_use(mysql_args,os_args):
    dirs = get_all(mysql_args,"show global variables where variable_name in ('basedir','datadir')")

    get_dirs = []
    for dir in dirs:
        dir = list(dir)
        size = ''
        if dir[1]!='':
            size = ssh_input_noprint(os_args,"du -sh %s|awk '{print $1}'"%dir[1])[0].replace('\n','')
        else:
            size = 0
        dir.append(size)
        get_dirs.append(dir)
    print("\nINFO:\nMySQL软件及数据目录使用情况如下:\n")
    dir_title = ["Directory_name","Directory_path","Size"]
    dir_table = res_table(get_dirs,dir_title)
    print(dir_table)
    return get_dirs
예제 #7
0
def check_table(tag_args, tag_os_args, obj):
    dbname, tbname = obj.split('.')
    t_ip, t_db_user, t_db_port, t_db_pwd = tag_args
    t_db_pwd = t_db_pwd.replace('!', '\\!')
    bin_dir = get_bin_dir(tag_args)
    check_tb_cmd = f"{bin_dir}/mysql -e 'show tables' -u{t_db_user} -p{t_db_pwd} -h{t_ip} -P{t_db_port} -D{dbname} |grep -w {tbname}"
    res = ssh_input_noprint(tag_os_args, check_tb_cmd)
    if res == []:
        return 'tb no exist'
    else:
        rename_yn = input(f'\nINFO:对象{obj} 在目标库已存在,是否重命名? Y/N ').upper()
        if rename_yn == 'Y':
            rename_time = time.strftime("%Y%m%d%H%M%S", time.localtime())
            rename_tnb = f"{obj}_old_{rename_time}"
            rename_sql = f"rename table {obj} to {rename_tnb}"
            run_noprint(tag_args, rename_sql)
            print(f"\nINFO:目标库对象{obj} 已重命名为{rename_tnb}")
        else:
            print(f"\nINFO:目标库已存在对象{obj} 将被覆盖.")
        return 'tb exist'
def bin_log(mysql_args,os_args):
    logbin = get_all(mysql_args,"show global variables where Variable_name in ('log_bin')")[0][1]
    db_version = get_version(mysql_args)
    if logbin == 'ON':
        binlog = get_all(mysql_args,"show global variables where \
        variable_name in ('log_bin','LOG_BIN_BASENAME','expire_logs_days','binlog_format','sync_binlog',\
        'log_bin_index','binlog_cache_size','max_binlog_cache_size','max_binlog_size','Binlog_cache_disk_use','Binlog_cache_use')")
        if db_version != '5.5':
            bin_dir = get_all(mysql_args,"show global variables where\
            variable_name in ('log_bin_basename')")
            cmd = "ls -lhs --time-style '+%%Y/%%m/%%d %%H:%%M:%%S' %s*|awk '{print $9,$7,$8,$1}'"%bin_dir[0][1]
            bin_size = ssh_input_noprint(os_args,cmd)
            bin_set = []
            for space in bin_size:
                space = [x for x in space.split(' ') if x != '']
                if space!=[]:
                    bin_set.append(space)
        else:
            bin_set = [('None', 'None')]
            bin_size = [('None', 'None')]
    else:
        binlog = [('log_bin', 'OFF')]
        bin_dir = [('None', 'None')]
        bin_size = [('None', 'None')]
        bin_set = [('None', 'None')]
    print("\nINFO:\nMySQL Binlog日志参数如下:")
    binlog_title = ["Variable_name","Value"]

    binlog_table = res_table(binlog,binlog_title)
    print(binlog_table)
    if binlog[2][1] == '0' and logbin =='ON':
        print("\n小结:\nLOG_BIN参数值为ON,且expire_logs_days参数值为0,建议设置保留时间.")
    if bin_set!=[('None', 'None')]:
        print("\nINFO:\nMySQL Binlog空间使用情况如下")
        binlog_dir_title = ["Binlog_path","Use_date","Use_time","Size"]
        binlog_dir_table = res_table(bin_set,binlog_dir_title)
        print(binlog_dir_table)

    return binlog,bin_set,bin_size
def target_xtra(tag_os_args, xtra_args, tag_args):
    source_socket, source_dmp_dir, source_cnf_dir, target_data_dir, target_dmp_dir, target_base_dir = xtra_args
    host, db_username, db_port, db_password = tag_args
    check_dir = ''.join(ssh_input_noprint(tag_os_args,
                                          f'ls {target_data_dir}'))
    if 'No such file or directory' in check_dir:
        ssh_input_noprint(tag_os_args, f'mkdir  -p {target_data_dir}')
    else:
        print(f"\nINFO:目标端{target_data_dir} 已存在文件,将其备份后重新创建")
        rename_time = time.strftime("%Y%m%d%H%M%S", time.localtime())
        ssh_input_noprint(
            tag_os_args,
            f'mv {target_data_dir} {target_data_dir}.bak_{rename_time}\nmkdir  -p {target_data_dir}'
        )

    print("\nINFO:目标端开始解压备份流文件")
    ssh_input_noprint(
        tag_os_args,
        f'cd {target_dmp_dir}\n/usr/local/xtrabackup/bin/xbstream -x < xtrabackup.xbstream  -C {target_data_dir}'
    )
    print("\nINFO:目标端解压备份流文件完成")

    print("\nINFO:目标端开始应用备份流文件日志")
    apply_res = ''.join(
        ssh_input_noprint(
            tag_os_args,
            f'cd {target_dmp_dir}\n/usr/local/xtrabackup/bin/xtrabackup --prepare --target-dir={target_data_dir}'
        ))
    if 'completed OK' in apply_res:
        print("\nINFO:目标端应用备份流文件日志完成")
        ssh_input_noprint(tag_os_args,
                          f"chown -R mysql:mysql {target_data_dir}")
        cnf_str = '''
[mysqld]
basedir=%s
datadir=%s
port=%s
socket=%s/mysql_%s.socket
        ''' % (target_base_dir, target_data_dir, db_port, target_data_dir,
               db_port)
        ssh_input_noprint(tag_os_args,
                          f"echo '{cnf_str}'>/etc/my_xtrabak_{db_port}.cnf")
        print("\nINFO:目标端生成mysql配置文件完成")
        print("\nINFO:目标端开始启动数据库")
        ssh_input_noprint(
            tag_os_args,
            f"nohup {target_base_dir}/bin/mysqld_safe --defaults-file=/etc/my_xtrabak_{db_port}.cnf --user=mysql > /dev/null 2>&1 >>/tmp/startmysql{tag_os_args[0]}_xtra.log &"
        )
        time.sleep(10)
        start_res = ''.join(
            ssh_input_noprint(
                tag_os_args, f'cat /tmp/startmysql{tag_os_args[0]}_xtra.log '))
        if ' ended ' not in start_res or start_res == '':
            print(f"\nNFO:目标端数据库启动完成.")
            ssh_input_noprint(
                tag_os_args, f'rm -f /tmp/startmysql{tag_os_args[0]}_xtra.log')
            return 'start success'
        else:
            print(f"\nWARNING:目标端数据库启动失败,详情请查看ops_mysql.log")
            ssh_input_noprint(
                tag_os_args, f'rm -f /tmp/startmysql{tag_os_args[0]}_xtra.log')
            return 'start failed'

    else:
        print("\nWARNING:目标端应用备份流文件日志失败,详情请查看ops_mysql.log")
        return 'apply f'
def clean_log_file(os_args,log_file_path):
    clean_time =  time.strftime("%Y%m%d%H%M%S",time.localtime())
    ssh_input_noprint(os_args,f"tail -10000 {log_file_path} >>{log_file_path}_bak_{clean_time}")
    ssh_input_noprint(os_args,f"rm -f {log_file_path}")
    return 'cleanned'
def slow_log(mysql_args,os_args):
    slowlog = get_all(mysql_args,"show global variables where variable_name in ('slow_query_log','slow_query_log_file','log_output')")
    data_dir = get_all(mysql_args,"show global variables like 'datadir'")
    log_param,log_on,log_path = slowlog
    print("\nINFO:\nMySQL慢日志参数如下:")
    title = ['Variable_name','Value']

    if log_on[1] == 'OFF':
        slowlog_table = res_table(slowlog,title)
        print (slowlog_table)
        print("\nINFO:\n该MySQL数据库未开启慢日志,无需清理.")
    else:
        if 'NONE' in log_param[1]:
            slowlog_table = res_table(slowlog,title)
            print (slowlog_table)
            print("\nINFO:\n该MySQL数据库输出存在NONE,无需清理.") 
        elif log_param[1] == 'FILE':
            log_size = round(int(ssh_input_noprint(os_args,f"du -sk {log_path[1]}|awk '{{print $1}}'")[0].replace('\n',''))/1024,3)
            slowlog.append(('Size of file(MB)',log_size))
            slowlog_table = res_table(slowlog,title)
            print (slowlog_table)
            if log_size > 1024:
                print("\nINFO:\n清理MySQL慢日志:")
                clean_log_file(os_args,log_path[1])
                run_noprint(mysql_args,"flush slow logs")
                print("\nINFO:\nMySQL慢日志清理完毕.")
                slow_log(mysql_args,os_args)
        elif log_param[1] == 'TABLE':
            table_file_path = f"{data_dir[0][1]}/mysql/slow_log.CSV".replace('//','/')
            table_size = round(int(ssh_input_noprint(os_args,f"ls -l {table_file_path}|awk '{{print $5}}'")[0].replace('\n',''))/1024/1024,3)
            slowlog.append(('Size of table(MB)',table_size))
            slowlog_table = res_table(slowlog,title)
            print (slowlog_table)
            if table_size > 1024:
                print("\nINFO:\n清理MySQL慢日志表:")
                more_sql(mysql_args,['drop table IF EXISTS  mysql.mc_slow_log_old','set global slow_query_log=off','rename table mysql.slow_log to mysql.mc_slow_log_old',
                'create table mysql.slow_log like mysql.mc_slow_log_old','set global slow_query_log=on','drop table IF EXISTS  mysql.mc_slow_log_old'])
                print("\nINFO:\nMySQL慢日志表清理完毕.")
                slow_log(mysql_args,os_args)
        elif 'FILE' in log_param[1] and 'TABLE' in log_param[1]:

            log_size = round(int(ssh_input_noprint(os_args,f"du -sk {log_path[1]}|awk '{{print $1}}'")[0].replace('\n',''))/1024,3)
            slowlog.append(('Size of file(MB)',log_size))
            table_file_path = f"{data_dir[0][1]}/mysql/slow_log.CSV".replace('//','/')
            table_size = round(int(ssh_input_noprint(os_args,f"ls -l {table_file_path}|awk '{{print $5}}'")[0].replace('\n',''))/1024/1024,3)
            slowlog.append(('Size of table(MB)',table_size))
            slowlog_table = res_table(slowlog,title)
            print (slowlog_table)
            if log_size > 1024:
                print("\nINFO:\n清理MySQL慢日志:")
                clean_log_file(os_args,log_path[1])
                run_noprint(mysql_args,"flush slow logs")
                print("\nINFO:\nMySQL慢日志清理完毕.")
                
            if table_size > 1024:
                print("\nINFO:\n清理MySQL慢日志表:")
                more_sql(mysql_args,['drop table IF EXISTS  mysql.mc_slow_log_old','set global slow_query_log=off','rename table mysql.slow_log to mysql.mc_slow_log_old',
                'create table mysql.slow_log like mysql.mc_slow_log_old','set global slow_query_log=on','drop table IF EXISTS mysql.mc_slow_log_old'])
                print("\nINFO:\nMySQL慢日志表清理完毕.")
            if table_size > 1024 or log_size > 1024:
                print ("\nINFO:\nMySQL慢日志参数如下:")
                slowlog_aft = get_all(mysql_args,"show global variables where variable_name in ('slow_query_log','slow_query_log_file','log_output')")
                log_size_aft = round(int(ssh_input_noprint(os_args,f"du -sk {log_path[1]}|awk '{{print $1}}'")[0].replace('\n',''))/1024,3)
                slowlog_aft.append(('Size of file(MB)',log_size_aft))
                table_size_aft = round(int(ssh_input_noprint(os_args,f"ls -l {table_file_path}|awk '{{print $5}}'")[0].replace('\n',''))/1024/1024,3)
                slowlog_aft.append(('Size of table(MB)',table_size_aft))
                slowlog_table_aft = res_table(slowlog_aft,title)
                print (slowlog_table_aft)
    

    return "clean slow log ok"
예제 #12
0
def get_datadir_free(db_args, os_args):
    data_dir = get_all(db_args, " show variables like 'datadir'")[0][1]
    datadir_free = ssh_input_noprint(
        os_args, f"df -m {data_dir}|awk '{{ print $4}}'")[-1]
    return datadir_free