Пример #1
0
def main():

    parser = get_parser()
    args = parser.parse_args()

    if args.test_conf:
        test_config(args.path_to_config)
    elif args.cmd == 'start':
        try:
            do_backup(args.path_to_config, args.jobs_name)
        except Exception:
            full_traceback = traceback.format_exc()
            log_and_mail.writelog(
                'ERROR', f"An unexpected error occurred: {full_traceback}",
                config.filelog_fd)
        finally:
            if config.filelog_fd:
                log_and_mail.send_report()
                config.filelog_fd.close()
            if config.lock_file_fd:
                general_function.get_unlock()
    elif args.cmd == 'generate':
        generate_config.generate(args.backup_type, args.storages,
                                 args.path_to_generate_file)
    else:
        parser.print_help()
Пример #2
0
def get_job_parameters(job_data):
    """

    :param job_data:
    :return:
    """
    job_name = job_data.get('job', 'Unknown')
    options = {}
    try:
        options['backup_type'] = job_data['type']
        options['tmp_dir'] = job_data['tmp_dir']
        options['sources'] = job_data['sources']
        options['storages'] = job_data['storages']
    except KeyError as e:
        log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name)
        return False

    options['safety_backup'] = job_data.get('safety_backup', False)
    deferred_copying_level = job_data.get('deferred_copying_level', 0)
    try:
        options['deferred_copying_level'] = int(deferred_copying_level)
    except TypeError:
        options['deferred_copying_level'] = 0

    if options['backup_type'] == 'inc_files':
        try:
            months_to_store = job_data.get('inc_months_to_store')
            options['months_to_store'] = int(months_to_store)
        except TypeError:
            options['months_to_store'] = 12

    return True, job_name, options
Пример #3
0
def is_time_to_backup(job_data):
    """ A function that determines whether or not to run copy collection according to the plan.
    It receives a dictionary with data for a particular section at the input.

    """

    job_name = job_data['job']
    job_type = job_data['type']
    storages = job_data['storages']

    result = True

    if job_type == 'inc_files':
        return result

    dow = general_function.get_time_now("dow")
    dom = general_function.get_time_now("dom")

    day_flag = False
    week_flag = False
    month_flag = False

    for i in range(len(storages)):
        if storages[i]['enable']:
            if storages[i]['store']['days'] or storages[i]['store'][
                    'weeks'] or storages[i]['store']['month']:
                if int(storages[i]['store']['days']) > 0:
                    day_flag = True
                if int(storages[i]['store']['weeks']) > 0:
                    week_flag = True
                if int(storages[i]['store']['month']) > 0:
                    month_flag = True
            else:
                log_and_mail.writelog(
                    'ERROR',
                    f'There are no stores data for storage {job_type} in the job {job_name}!',
                    config.filelog_fd, job_name)
                continue
    if not day_flag:
        if not week_flag:
            if not month_flag:
                result = False
            else:
                if dom == config.dom_backup:
                    result = True
                else:
                    result = False
        else:
            if dow == config.dow_backup:
                result = True
            else:
                if not month_flag:
                    result = False
                else:
                    if dom == config.dom_backup:
                        result = True
    else:
        result = True

    return result
Пример #4
0
def validation_storage_data(job_data):
    ''' The function checks that in the job there is at least one active storage
    according to the schedule of which, it is necessary to collect a backup.

    '''

    result = True
    job_name = job_data['job']

    flag = False
    for storage in range(len(job_data['storages'])):
        if job_data['storages'][storage]['enable']:
            flag = True
            break

    if not flag:
        log_and_mail.writelog(
            'ERROR', f'There are no active storages in the job {job_name}!',
            config.filelog_fd, job_name)
        result = False
    else:
        if not is_time_to_backup(job_data):
            log_and_mail.writelog(
                'INFO',
                "According to the backup plan today new backups are not created in this job.",
                config.filelog_fd, job_name)
            result = False

    return result
Пример #5
0
def is_time_to_backup(job_data):
    ''''' Фукнция, которая определяет необходимо ли запускать на выполнение сбор копий согласно плану.
    На вход получает словарь с данными для конкретной секции

    ''' ''

    job_name = job_data['job']
    job_type = job_data['type']
    storages = job_data['storages']

    if job_type == 'inc_files':
        return True

    dow = general_function.get_time_now("dow")
    dom = general_function.get_time_now("dom")

    day_flag = False
    week_flag = False
    month_flag = False

    for i in range(len(storages)):
        if storages[i]['enable']:
            if storages[i]['store']['days'] or storages[i]['store'][
                    'weeks'] or storages[i]['store']['month']:
                if int(storages[i]['store']['days']) > 0:
                    day_flag = True
                if int(storages[i]['store']['weeks']) > 0:
                    week_flag = True
                if int(storages[i]['store']['month']) > 0:
                    month_flag = True
            else:
                log_and_mail.writelog(
                    'ERROR',
                    f'There are no stores data for storage {job_type} in the job {job_name}!',
                    config.filelog_fd, job_name)
                continue
    if not day_flag:
        if not week_flag:
            if not month_flag:
                result = False
            else:
                if dom == config.dom_backup:
                    result = True
                else:
                    result = False
        else:
            if dow == config.dow_backup:
                result = True
            else:
                if not month_flag:
                    result = False
                else:
                    if dom == config.dom_backup:
                        result = True
    else:
        result = True

    return result
Пример #6
0
def external_backup(job_data):
    ''''' Function, creates a external backup.
    At the entrance receives a dictionary with the data of the job.

    ''' ''

    try:
        job_name = job_data['job']
        backup_type = job_data['type']
        dump_cmd = job_data['dump_cmd']
        storages = job_data['storages']
    except KeyError as e:
        log_and_mail.writelog('ERROR', "Missing required key:'%s'!" % (e),
                              config.filelog_fd, job_name)
        return 1

    periodic_backup.remove_old_local_file(storages, '', job_name)

    command = general_function.exec_cmd(dump_cmd)
    stderr = command['stderr']
    stdout = command['stdout']
    code = command['code']

    if code != 0:
        log_and_mail.writelog(
            'ERROR',
            "Bad result code external process '%s': %s'" % (dump_cmd, code),
            config.filelog_fd, job_name)
        return 1

    source_dict = get_value_from_stdout(stderr, stdout, job_name)

    if source_dict is None:
        return 1

    full_tmp_path = source_dict['full_path']
    basename = source_dict['basename']
    extension = source_dict['extension']
    gzip = source_dict['gzip']

    new_name = os.path.basename(
        general_function.get_full_path('', basename, extension, gzip))
    new_full_tmp_path = os.path.join(os.path.dirname(full_tmp_path), new_name)

    general_function.move_ofs(full_tmp_path, new_full_tmp_path)

    periodic_backup.general_desc_iteration(new_full_tmp_path, storages, '',
                                           job_name)

    # After all the manipulations, delete the created temporary directory and
    # data inside the directory with cache davfs, but not the directory itself!
    general_function.del_file_objects(backup_type, '/var/cache/davfs2/*')
Пример #7
0
def get_lock():
    try:
        create_lock_file()
    except BlockingIOError:
        if config.loop_timeout is None:
            raise MyError("Script already is running!")
        else:
            msg = "Script already is running! Waiting until completion."
            log_and_mail.writelog('WARNING', f"{msg}", config.filelog_fd, '')
            print_info(f"{msg}")
            unlock_waiting()

    return 1
Пример #8
0
def get_lock():
    """
    Creates a lock file to prevent more than one nxs-backup instance from executing.
    """
    try:
        create_lock_file()
    except BlockingIOError:
        if config.loop_timeout is None:
            raise MyError("Script already is running!")
        else:
            msg = "Script already is running! Waiting until completion."
            log_and_mail.writelog('WARNING', f"{msg}", config.filelog_fd, '')
            print_info(f"{msg}")
            unlock_waiting()
Пример #9
0
def is_success_status_xtrabackup(status_file, job_name):
    try:
        with open(status_file) as f:
            status = list(deque(f, 1))[0]
    except Exception as e:
        log_and_mail.writelog('ERROR', f"Can't read status file '{status_file}':{e}",
                              config.filelog_fd, job_name)
        return False

    else:
        if re.match("^.*completed OK!\n$", status, re.I):
            return True
        else:
            return False
Пример #10
0
def is_success_pgdump(db, extra_keys, str_auth, backup_full_path, gzip,
                      job_name):
    if gzip:
        dump_cmd = f"pg_dump {extra_keys} {str_auth} | gzip > {backup_full_path}"
    else:
        dump_cmd = f"pg_dump {extra_keys} {str_auth} > {backup_full_path}"

    command = general_function.exec_cmd(dump_cmd)
    stderr = command['stderr']
    code = command['code']

    if stderr and (stderr.find('error') != -1):
        log_and_mail.writelog(
            'ERROR',
            f"Can't create '{db}' database dump in tmp directory with the next error: "
            f"{stderr}", config.filelog_fd, job_name)
        return False
    elif code != 0:
        log_and_mail.writelog(
            'ERROR', f"Bad result code external process '{dump_cmd}':'{code}'",
            config.filelog_fd, job_name)
        return False
    elif stderr:
        log_and_mail.writelog(
            'INFO',
            f"Successfully created '{db}' database dump in tmp directory with the next "
            f"message: {stderr}", config.filelog_fd, job_name)
        return True
    else:
        log_and_mail.writelog(
            'INFO',
            f"Successfully created '{db}' database dump in tmp directory.",
            config.filelog_fd, job_name)
        return True
Пример #11
0
def is_success_bgsave(str_auth, backup_full_tmp_path, gzip, job_name):

    backup_full_tmp_path_tmp = backup_full_tmp_path.split('.gz')[0]

    dump_cmd = f"redis-cli {str_auth} --rdb {backup_full_tmp_path_tmp}"

    command = general_function.exec_cmd(dump_cmd)
    stderr = command['stderr']

    check_success_cmd =  "echo $?"
    check_command = general_function.exec_cmd(check_success_cmd)
    stdout = check_command.get('stdout')

    if stdout == 1:
        log_and_mail.writelog('ERROR', f"Can't create redis database dump '{backup_full_tmp_path_tmp}' in tmp directory:{stderr}",
                              config.filelog_fd, job_name)
        return False
    else:
        if gzip:
            try:
                general_files_func.gzip_file(backup_full_tmp_path_tmp, backup_full_tmp_path)
            except general_function.MyError as stderr:
                log_and_mail.writelog('ERROR', f"Can't gzip redis database dump '{backup_full_tmp_path_tmp}' in tmp directory:{stderr}.",
                                      config.filelog_fd, job_name)
                return False
            else:
                log_and_mail.writelog('INFO', f"Successfully created redis database dump '{backup_full_tmp_path}' in tmp directory.",
                                      config.filelog_fd, job_name)
                return True
            finally:
                general_function.del_file_objects(job_name, backup_full_tmp_path_tmp)
        else:
            log_and_mail.writelog('INFO', f"Successfully created redis database dump '{backup_full_tmp_path_tmp}' in tmp directory.",
                              config.filelog_fd, job_name)
            return True
Пример #12
0
def create_files(backup_type, *files):
    ''' Function for creating files.

    '''

    for i in list(files):
        create_dirs(job_name=backup_type, dirs_pairs={os.path.dirname(i):''})
        if not (os.path.isfile(i) or os.path.islink(i)):
            try:
                with open(i, 'tw', encoding='utf-8'):
                    pass
            except (PermissionError) as err:
                log_and_mail.writelog('ERROR', "Can't create file %s:%s!" %(i, err),
                                      config.filelog_fd)
Пример #13
0
def mysql_xtradb_backup(job_data):
    try:
        job_name = job_data['job']
        backup_type = job_data['type']
        tmp_dir = job_data['tmp_dir']
        sources = job_data['sources']
        storages = job_data['storages']
    except KeyError as e:
        log_and_mail.writelog('ERROR', "Missing required key:'%s'!" % (e),
                              config.filelog_fd, job_name)
        return 1

    full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type)

    for i in range(len(sources)):
        try:
            connect = sources[i]['connect']
            gzip = sources[i]['gzip']
            extra_keys = sources[i]['extra_keys']
        except KeyError as e:
            log_and_mail.writelog('ERROR', "Missing required key:'%s'!" % (e),
                                  config.filelog_fd, job_name)
            continue

        db_user = connect.get('db_user')
        db_password = connect.get('db_password')
        path_to_conf = connect.get('path_to_conf')

        if not (path_to_conf and db_user and db_password):
            log_and_mail.writelog(
                'ERROR',
                "Can't find the authentication data, please fill the required fields",
                config.filelog_fd, job_name)
            continue

        if not os.path.isfile(path_to_conf):
            log_and_mail.writelog('ERROR',
                                  "Configuration file '%s' not found!",
                                  config.filelog_fd, job_name)
            continue

        str_auth = '--defaults-file=%s --user=%s --password=%s' % (
            path_to_conf, db_user, db_password)

        backup_full_tmp_path = general_function.get_full_path(
            full_path_tmp_dir, 'xtrabackup', 'tar', gzip)

        periodic_backup.remove_old_local_file(storages, '', job_name)

        if is_success_mysql_xtrabackup(extra_keys, str_auth,
                                       backup_full_tmp_path, gzip, job_name):
            periodic_backup.general_desc_iteration(backup_full_tmp_path,
                                                   storages, '', job_name)

    # After all the manipulations, delete the created temporary directory and
    # data inside the directory with cache davfs, but not the directory itself!
    general_function.del_file_objects(backup_type, full_path_tmp_dir,
                                      '/var/cache/davfs2/*')
Пример #14
0
def create_links_and_copies(link_dict, copy_dict, job_name):
    if link_dict:
        for dst, src in link_dict.items():
            try:
                general_function.create_symlink(src, dst)
            except general_function.MyError as err:
                log_and_mail.writelog('ERROR', f"Can't create symlink {src} -> {dst}: {err}",
                                      config.filelog_fd, job_name)

    if copy_dict:
        for dst, src in copy_dict.items():
            try:
                general_function.copy_ofs(src, dst)
            except general_function.MyError as err:
                log_and_mail.writelog('ERROR', f"Can't copy {src} -> {dst}: {err}",
                                      config.filelog_fd, job_name)
Пример #15
0
def execute_job(jobs_name, jobs_data):
    ''' The function makes a backup of a particular job.
    The input receives a dictionary with data of this job.

    '''

    log_and_mail.writelog('INFO', "Starting backup for job '%s'." % jobs_name,
                          config.filelog_fd, jobs_name)
    config.all_executed_jobs.add(jobs_name)

    if not specific_function.validation_storage_data(jobs_data):
        return 1

    backup_type = jobs_data['type']

    if backup_type == 'mysql':
        mysql_backup.mysql_backup(jobs_data)

    elif backup_type == 'mysql_xtradb':
        mysql_xtradb_backup.mysql_xtradb_backup(jobs_data)

    elif backup_type == 'postgresql':
        postgresql_backup.postgresql_backup(jobs_data)

    elif backup_type == 'postgresql_hot':
        postgresql_hot_backup.postgresql_hot_backup(jobs_data)

    elif backup_type == 'mongodb':
        mongodb_backup.mongodb_backup(jobs_data)

    elif backup_type == 'redis':
        redis_backup.redis_backup(jobs_data)

    elif backup_type == 'desc_files':
        desc_files_backup.desc_files_backup(jobs_data)

    elif backup_type == 'inc_files':
        inc_files_backup.inc_files_backup(jobs_data)

    else:
        external_backup.external_backup(jobs_data)

    log_and_mail.writelog('INFO', "Finishing backup for job '%s'." % jobs_name,
                          config.filelog_fd, jobs_name)

    return 0
Пример #16
0
def create_dirs (**kwargs):
    ''' Function for creating directories.

    '''

    job_name = kwargs['job_name']
    dirs_pairs = kwargs['dirs_pairs']  # Dictionary with pairs 'local_dir' = 'remote_dir'

    for i in dirs_pairs:
        if not os.path.exists(i):
            try:
                os.makedirs(i)
            except PermissionError as err:
                if dirs_pairs[i]:  # Means create on the remote storage and the way it is necessary to specify this
                    i = dirs_pairs[i]

                    log_and_mail.writelog('ERROR', "Can't create directory %s:%s!" %(i, err),
                                          config.filelog_fd, job_name)
Пример #17
0
def get_job_parameters(job_data):
    job_name = job_data.get('job', 'Unknown')
    try:
        backup_type = job_data['type']
        tmp_dir = job_data['tmp_dir']
        sources = job_data['sources']
        storages = job_data['storages']
    except KeyError as e:
        log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name)
        return False

    safety_backup = job_data.get('safety_backup', False)
    deferred_copying_level = job_data.get('deferred_copying_level', 0)

    try:
        deferred_copying_level = int(deferred_copying_level)
    except ValueError:
        deferred_copying_level = 0
    return True, job_name, backup_type, tmp_dir, sources, storages, safety_backup, deferred_copying_level
Пример #18
0
def get_value_from_stdout(stderr, stdout, job_name):
    ''' On the input receives the data that the script sent to the stdout, stderr.
    Analyzes them and if everything is OK, then it returns the dictionary from the stdout.

    '''

    if stderr:
        log_and_mail.writelog(
            'ERROR',
            "Can't create external backup in tmp directory:%s" % (stderr),
            config.filelog_fd, job_name)
        return None
    else:
        try:
            source_dict = json.loads(stdout)
        except (ValueError) as err:
            log_and_mail.writelog('ERROR',
                                  "Can't parse output str: %s" % (err),
                                  config.filelog_fd, job_name)
            return None
        else:
            try:
                full_path = source_dict['full_path']
                source_dict['basename']
                source_dict['extension']
                source_dict['gzip']
            except (KeyError) as err:
                log_and_mail.writelog('ERROR',
                                      "Can't find required key: %s" % (err),
                                      config.filelog_fd, job_name)
                return None
            else:
                if not os.path.isfile(full_path):
                    log_and_mail.writelog('ERROR',
                                          "File '%s' not found!" % (full_path),
                                          config.filelog_fd, job_name)
                    return None
                else:
                    log_and_mail.writelog(
                        'INFO',
                        "Successfully created external backup in tmp directory.",
                        config.filelog_fd, job_name)
                    return source_dict
Пример #19
0
def external_backup(job_data):
    """ Function, creates a external backup.
    At the entrance receives a dictionary with the data of the job.

    """

    job_name = 'undefined'
    try:
        job_name = job_data['job']
        backup_type = job_data['type']
        dump_cmd = job_data['dump_cmd']
        storages = job_data['storages']
    except KeyError as e:
        log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!",
                              config.filelog_fd, job_name)
        return

    safety_backup = job_data.get('safety_backup', False)
    skip_backup_rotate = job_data.get('skip_backup_rotate', False)

    periodic_backup.remove_local_file(storages, '', job_name)

    command = general_function.exec_cmd(dump_cmd)
    stderr = command['stderr']
    stdout = command['stdout']
    code = command['code']

    if code != 0:
        log_and_mail.writelog('ERROR',
                              f"Bad result code external process '{dump_cmd}': '{code}' with next STDERR:\n"
                              f"'{stderr}'",
                              config.filelog_fd, job_name)
        return

    if skip_backup_rotate:
        log_and_mail.writelog('INFO', f"Command '{dump_cmd}' finished success with the next result:\n{stdout}",
                              config.filelog_fd, job_name)
        return

    source_dict = get_value_from_stdout(stderr, stdout, job_name)

    if source_dict is None:
        return

    full_tmp_path = source_dict['full_path']
    basename = source_dict['basename']
    extension = source_dict['extension']
    gzip = source_dict['gzip']

    new_name = os.path.basename(general_function.get_full_path('', basename, extension, gzip))
    new_full_tmp_path = os.path.join(os.path.dirname(full_tmp_path), new_name)

    general_function.move_ofs(full_tmp_path, new_full_tmp_path)

    periodic_backup.general_desc_iteration(new_full_tmp_path, storages, '',
                                           job_name, safety_backup)

    # After all the manipulations, delete the created temporary directory and
    # data inside the directory with cache davfs, but not the directory itself!
    general_function.del_file_objects(backup_type, '/var/cache/davfs2/*')
Пример #20
0
def is_success_mysql_xtrabackup(extra_keys, str_auth, backup_full_path, gzip, job_name):
    date_now = general_function.get_time_now('backup')
    tmp_status_file = f'/tmp/xtrabackup_status/{date_now}.log'

    dom = int(general_function.get_time_now('dom'))
    if dom == 1:
        dir_for_status_file = os.path.dirname(tmp_status_file)
        if os.path.isdir(dir_for_status_file):
            listing = glob.glob(dir_for_status_file)
            periodic_backup.delete_oldest_files(listing, 31, job_name)

    general_function.create_files(job_name, tmp_status_file)

    if gzip:
        dump_cmd = f"innobackupex {str_auth} {extra_keys} 2>{tmp_status_file} | gzip > {backup_full_path}"
    else:
        dump_cmd = f"innobackupex {str_auth} {extra_keys} > {backup_full_path} 2>{tmp_status_file} "

    command = general_function.exec_cmd(dump_cmd)
    code = command['code']

    if not is_success_status_xtrabackup(tmp_status_file, job_name):
        log_and_mail.writelog(
            'ERROR', f"Can't create xtrabackup in tmp directory! More information in status file {tmp_status_file}.",
            config.filelog_fd, job_name)
        return False
    elif code != 0:
        log_and_mail.writelog('ERROR', f"Bad result code external process '{dump_cmd}':'{code}'",
                              config.filelog_fd, job_name)
        return False
    else:
        log_and_mail.writelog('INFO', "Successfully created xtrabackup in tmp directory.",
                              config.filelog_fd, job_name)
        return True
Пример #21
0
def get_connection(db_host, db_port, db_user, db_password, auth_file, socket, job_name):
    if auth_file:
        try:
            connection = MySQLdb.connect(read_default_file=auth_file)
        except MySQLdb.Error as err:
            log_and_mail.writelog('ERROR', f"Can't connect to MySQL instances with '{auth_file}' auth file:{err}",
                                  config.filelog_fd, job_name)
            return
        str_auth = f' --defaults-extra-file={auth_file} '
    else:
        if db_host:
            try:
                connection = MySQLdb.connect(host=db_host, port=int(db_port), user=db_user, passwd=db_password)
            except MySQLdb.Error as err:
                log_and_mail.writelog('ERROR',
                                      f"Can't connect to MySQL instances with following data host='{db_host}', "
                                      f"port='{db_port}', user='******', passwd='{db_password}':{err}",
                                      config.filelog_fd, job_name)
                return
            str_auth = f' --host={db_host} --port={db_port} --user={db_user} --password={db_password} '
        else:
            try:
                connection = MySQLdb.connect(unix_socket=socket, user=db_user, passwd=db_password)
            except MySQLdb.Error as err:
                log_and_mail.writelog('ERROR',
                                      f"Can't connect to MySQL instances with following data: socket='{socket}', "
                                      f"user='******', passwd='{db_password}':{err}",
                                      config.filelog_fd, job_name)
                return
            str_auth = f' --socket={socket} --user={db_user} --password={db_password} '

    return connection, str_auth
Пример #22
0
def is_success_pgbasebackup(extra_keys, str_auth, backup_full_path, gzip,
                            job_name):

    if gzip:
        dump_cmd = f"pg_basebackup {str_auth} {extra_keys} | gzip > {backup_full_path}"
    else:
        dump_cmd = f"pg_basebackup {str_auth} {extra_keys} > {backup_full_path}"

    command = general_function.exec_cmd(dump_cmd)
    stderr = command['stderr']
    code = command['code']

    if stderr:
        log_and_mail.writelog(
            'ERROR',
            f"Can't create postgresql_basebackup in tmp directory:{stderr}",
            config.filelog_fd, job_name)
        return False
    elif code != 0:
        log_and_mail.writelog(
            'ERROR', f"Bad result code external process '{dump_cmd}':'{code}'",
            config.filelog_fd, job_name)
        return False
    else:
        log_and_mail.writelog(
            'INFO',
            f"Successfully created postgresql_basebackup in tmp directory.",
            config.filelog_fd, job_name)
        return True
Пример #23
0
def is_success_mongodump(collection, db, extra_keys, str_auth, backup_full_path, gzip, job_name):
    if gzip:
        dump_cmd = f"mongodump --db {db} {extra_keys} {str_auth}  --out -| gzip > {backup_full_path}"
    else:
        dump_cmd = f"mongodump --db {db} {extra_keys} {str_auth} --out - > {backup_full_path}"

    command = general_function.exec_cmd(dump_cmd)

    stderr = command['stderr']
    code = command['code']

    if stderr and is_real_mongo_err(stderr):
        log_and_mail.writelog(
            'ERROR',
            f"Can't create collection '{collection}' in '{db}' database dump in tmp directory:{stderr}",
            config.filelog_fd, job_name)
        return False
    elif code != 0:
        log_and_mail.writelog(
            'ERROR', f"Bad result code external process '{dump_cmd}':'{code}'",
            config.filelog_fd, job_name)
        return False
    else:
        log_and_mail.writelog(
            'INFO',
            f"Successfully created collection '{collection}' in '{db}' database dump in tmp directory.",
            config.filelog_fd, job_name)
        return True
Пример #24
0
def is_save_to_storage(job_name, storage_data):
    ''' Checks the need for collection in a SPECIFIC storage.

    '''

    try:
        storage = storage_data['storage']
        enable_storage = storage_data['enable']
        backup_dir = storage_data['backup_dir']

        if not storage in config.supported_storages:
            log_and_mail.writelog('ERROR', f"For '{job_name}' job set incorrect type of storage." +\
                                  f"Only one of this type storage is allowed:{config.supported_storages}",
                                  config.filelog_fd, job_name)
            result = False

        elif not enable_storage:
            result = False
        elif not backup_dir:
            log_and_mail.writelog(
                'ERROR',
                f"Field 'backup_dir' in job '{job_name}' for storage '{storage_data['storage']}' can't be empty!",
                config.filelog_fd, job_name)
            result = False
        else:
            result = True
    except KeyError as err:
        log_and_mail.writelog(
            'ERROR',
            f"Missing required key '{err}' in '{job_name}' job storages block.",
            config.filelog_fd, job_name)
        result = False

    return result
Пример #25
0
def is_success_pgdump(db, extra_keys, str_auth, backup_full_path, gzip,
                      job_name):

    if gzip:
        dump_cmd = "pg_dump %s %s | gzip > %s" % (extra_keys, str_auth,
                                                  backup_full_path)
    else:
        dump_cmd = "pg_dump %s %s > %s" % (extra_keys, str_auth,
                                           backup_full_path)

    command = general_function.exec_cmd(dump_cmd)
    stderr = command['stderr']
    code = command['code']

    if stderr:
        log_and_mail.writelog(
            'ERROR', "Can't create '%s' database dump in tmp directory:%s" %
            (db, stderr), config.filelog_fd, job_name)
        return False
    elif code != 0:
        log_and_mail.writelog(
            'ERROR',
            "Bad result code external process '%s':'%s'" % (dump_cmd, code),
            config.filelog_fd, job_name)
        return False
    else:
        log_and_mail.writelog(
            'INFO',
            "Successfully created '%s' database dump in tmp directory." % (db),
            config.filelog_fd, job_name)
        return True
Пример #26
0
def is_success_mysql_xtrabackup(extra_keys, str_auth, backup_full_path, gzip,
                                job_name):

    date_now = general_function.get_time_now('backup')
    tmp_status_file = '/tmp/xtrabackup_status/%s.log' % (date_now)

    dom = int(general_function.get_time_now('dom'))
    if dom == 1:
        dir_for_status_file = os.path.dirname(tmp_status_file)
        if os.path.isdir(dir_for_status_file):
            listing = glob.glob(dir_for_status_file)
            periodic_backup.delete_oldest_files(listing, 31, job_name)

    general_function.create_files(job_name, tmp_status_file)

    if gzip:
        dump_cmd = "innobackupex %s %s 2>%s | gzip > %s" % (
            str_auth, extra_keys, tmp_status_file, backup_full_path)
    else:
        dump_cmd = "innobackupex %s %s > %s 2>%s " % (
            str_auth, extra_keys, backup_full_path, tmp_status_file)

    command = general_function.exec_cmd(dump_cmd)
    code = command['code']

    if code != 0:
        log_and_mail.writelog(
            'ERROR',
            "Bad result code external process '%s':'%s'" % (dump_cmd, code),
            config.filelog_fd, job_name)
        return False

    if not is_success_status_xtrabackup(tmp_status_file, job_name):
        log_and_mail.writelog(
            'ERROR',
            "Can't create xtrabackup in tmp directory! More information in status file %s."
            % (tmp_status_file), config.filelog_fd, job_name)
        return False
    elif code != 0:
        log_and_mail.writelog(
            'ERROR',
            "Bad result code external process '%s':'%s'" % (dump_cmd, code),
            config.filelog_fd, job_name)
        return False
    else:
        log_and_mail.writelog(
            'INFO', "Successfully created xtrabackup in tmp directory.",
            config.filelog_fd, job_name)
        return True
Пример #27
0
def get_connection(db_host, db_port, db_user, db_password, auth_file, socket,
                   job_name):

    if auth_file:
        try:
            connection = MySQLdb.connect(read_default_file=auth_file)
        except MySQLdb.Error as err:
            log_and_mail.writelog(
                'ERROR',
                "Can't connect to MySQL instances with '%s' auth file:%s" %
                (auth_file, err), config.filelog_fd, job_name)
            return 1
        str_auth = ' --defaults-extra-file=%s ' % (auth_file)
    else:
        if db_host:
            try:
                connection = MySQLdb.connect(host=db_host,
                                             port=int(db_port),
                                             user=db_user,
                                             passwd=db_password)
            except MySQLdb.Error as err:
                log_and_mail.writelog(
                    'ERROR',
                    "Can't connect to MySQL instances with following data host='%s', port='%s', user='******', passwd='%s':%s"
                    % (db_host, db_port, db_user, db_password, err),
                    config.filelog_fd, job_name)
                return 1
            str_auth = ' --host=%s --port=%s --user=%s --password=%s ' % (
                db_host, db_port, db_user, db_password)
        else:
            try:
                connection = MySQLdb.connect(unix_socket=socket,
                                             user=db_user,
                                             passwd=db_password)
            except MySQLdb.Error as err:
                log_and_mail.writelog(
                    'ERROR',
                    "Can't connect to MySQL instances with following data: socket='%s', user='******', passwd='%s':%s"
                    % (socket, db_user, db_password, err), config.filelog_fd,
                    job_name)
                return 1
            str_auth = ' --socket=%s --user=%s --password=%s ' % (
                socket, db_user, db_password)

    return (connection, str_auth)
Пример #28
0
def mysql_xtrabackup(job_data):
    is_prams_read, job_name, backup_type, tmp_dir, sources, storages, safety_backup, deferred_copying_level = \
        general_function.get_job_parameters(job_data)
    if not is_prams_read:
        return

    full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type)

    for i in range(len(sources)):
        try:
            connect = sources[i]['connect']
            gzip = sources[i]['gzip']
            extra_keys = sources[i]['extra_keys']
        except KeyError as e:
            log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name)
            continue

        db_user = connect.get('db_user')
        db_password = connect.get('db_password')
        path_to_conf = connect.get('path_to_conf')

        if not (path_to_conf and db_user and db_password):
            log_and_mail.writelog('ERROR', "Can't find the authentication data, please fill the required fields",
                                  config.filelog_fd, job_name)
            continue

        if not os.path.isfile(path_to_conf):
            log_and_mail.writelog('ERROR', f"Configuration file '{path_to_conf}' not found!",
                                  config.filelog_fd, job_name)
            continue

        str_auth = f'--defaults-file={path_to_conf} --user={db_user} --password={db_password}'

        backup_full_tmp_path = general_function.get_full_path(full_path_tmp_dir, 'xtrabackup', 'tar', gzip)

        periodic_backup.remove_old_local_file(storages, '', job_name)

        if is_success_mysql_xtrabackup(extra_keys, str_auth, backup_full_tmp_path, gzip, job_name):
            periodic_backup.general_desc_iteration(backup_full_tmp_path, storages, '', job_name, safety_backup)

    # After all the manipulations, delete the created temporary directory and
    # data inside the directory with cache davfs, but not the directory itself!
    general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
Пример #29
0
def is_success_mongodump(collection, db, extra_keys, str_auth, backup_full_path, gzip, job_name):

    if gzip:
        dump_cmd = "mongodump --db %s %s %s  --out -| gzip > %s" %(db, extra_keys, str_auth, backup_full_path)
    else:
        dump_cmd = "mongodump --db %s %s %s --out - > %s" %(db, extra_keys, str_auth, backup_full_path)

    command = general_function.exec_cmd(dump_cmd)

    stderr = command['stderr']
    code = command['code']

    if stderr and is_real_mongo_err(stderr):
        log_and_mail.writelog('ERROR', "Can't create collection '%s' in %s' database dump in tmp directory:%s" %(collection, db, stderr),
                              config.filelog_fd, job_name)
        return False
    elif code != 0:
        log_and_mail.writelog('ERROR', "Bad result code external process '%s':'%s'" %(dump_cmd, code),
                              config.filelog_fd, job_name)
        return False
    else:
        log_and_mail.writelog('INFO', "Successfully created collection '%s' in '%s' database dump in tmp directory." %(collection, db),
                              config.filelog_fd, job_name)
        return True
Пример #30
0
def postgresql_basebackup(job_data):
    try:
        job_name = job_data['job']
        backup_type = job_data['type']
        tmp_dir = job_data['tmp_dir']
        sources = job_data['sources']
        storages = job_data['storages']
    except KeyError as e:
        log_and_mail.writelog('ERROR', "Missing required key:'%s'!" %(e), config.filelog_fd, job_name)
        return 1

    full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type)

    for i in range(len(sources)):
        try:
            connect = sources[i]['connect']
            gzip =  sources[i]['gzip']
            extra_keys = sources[i]['extra_keys']
        except KeyError as e:
            log_and_mail.writelog('ERROR', "Missing required key:'%s'!" %(e), config.filelog_fd, job_name)
            continue

        db_host = connect.get('db_host')
        db_port = connect.get('db_port')
        db_user = connect.get('db_user')
        db_password = connect.get('db_password')

        if not (db_user and db_host and db_password):
            log_and_mail.writelog('ERROR', "Can't find the authentication data, please fill in the required fields", 
                                  config.filelog_fd, job_name) 
            continue

        if not db_port:
            db_port = general_function.get_default_port('postgresql')

        try:
            connection = psycopg2.connect(dbname="postgres", user=db_user, password=db_password, host=db_host, port=db_port)
        except psycopg2.Error as err:
            log_and_mail.writelog('ERROR', "Can't connect to PostgreSQL instances with with following data host='%s', port='%s', user='******', passwd='%s':%s" %(db_host, db_port, db_user, db_password, err),
                                  config.filelog_fd, job_name)
            continue
        else:
            connection.close()

        backup_full_tmp_path = general_function.get_full_path(
                                                            full_path_tmp_dir,
                                                            'postgresq_hot', 
                                                            'tar',
                                                            gzip)

        periodic_backup.remove_old_local_file(storages, '', job_name)

        str_auth = ' --dbname=postgresql://%s:%s@%s:%s/ ' %(db_user, db_password, db_host, db_port)

        if is_success_pgbasebackup(extra_keys, str_auth, backup_full_tmp_path, gzip, job_name):
            periodic_backup.general_desc_iteration(backup_full_tmp_path, 
                                                    storages, '',
                                                    job_name)

    # After all the manipulations, delete the created temporary directory and
    # data inside the directory with cache davfs, but not the directory itself!
    general_function.del_file_objects(backup_type,
                                      full_path_tmp_dir, '/var/cache/davfs2/*')