コード例 #1
0
    def include(self, node):
        if isinstance(node, yaml.ScalarNode):
            return self.extractFile(self.construct_scalar(node))

        elif isinstance(node, yaml.SequenceNode):
            result = []

            for i in self.construct_sequence(node):
                i = general_function.get_absolute_path(i, self._root)
                for j in general_files_func.get_ofs(i):
                    result += self.extractFile(j)
            return result

        elif isinstance(node, yaml.MappingNode):
            result = {}
            for k, v in self.construct_mapping(node).iteritems():
                result[k] = self.extractFile(v)
            return result

        else:
            print('Error:: unrecognised node type in !include statement')
            raise yaml.constructor.ConstructorError
コード例 #2
0
def desc_files_backup(job_data):
    ''' Function, creates a desc backup of directories.
    At the entrance receives a dictionary with the data of the job.

    '''

    try:
        job_name = job_data['job']
        backup_type = job_data['type']
        tmp_dir = job_data['tmp_dir']
        sources = job_data['sources']
        storages = job_data['storages']
    except KeyError as e:
        log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!",
                              config.filelog_fd, job_name)
        return 1


    full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type)

    for i in range(len(sources)):
        exclude_list = sources[i].get('excludes', '')
        try:
            target_list = sources[i]['target']
            gzip =  sources[i]['gzip']
        except KeyError as e:
            log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!",
                                  config.filelog_fd, job_name)
            continue

        # Keeping an exception list in the global variable due to the specificity of
        # the `filter` key of the `add` method of the `tarfile` class
        general_files_func.EXCLUDE_FILES = general_files_func.get_exclude_ofs(target_list,
                                                                              exclude_list)

        # The backup name is selected depending on the particular glob patterns from
        # the list `target_list`
        for regex in target_list:
            target_ofs_list = general_files_func.get_ofs(regex)

            if not target_ofs_list:
                log_and_mail.writelog('ERROR', "No file system objects found that" +\
                                      f"match the regular expression '{regex}'!",
                                      config.filelog_fd, job_name)
                continue

            for i in target_ofs_list:
                # Create a backup only if the directory is not in the exception list
                # so as not to generate empty backups
                if not general_files_func.is_excluded_ofs(i):
                    # A function that by regularity returns the name of 
                    # the backup WITHOUT EXTENSION AND DATE
                    backup_file_name = general_files_func.get_name_files_backup(regex, i)
                    # Get the part of the backup storage path for this archive relative to
                    # the backup dir
                    part_of_dir_path = backup_file_name.replace('___', '/')

                    backup_full_tmp_path = general_function.get_full_path(
                                                                    full_path_tmp_dir,
                                                                    backup_file_name, 
                                                                    'tar',
                                                                    gzip)

                    periodic_backup.remove_old_local_file(storages, part_of_dir_path, job_name)

                    if general_files_func.create_tar('files', backup_full_tmp_path, i,
                                                  gzip, backup_type, job_name):
                        # If the dump collection in the temporary directory has successfully
                        # transferred the data to the specified storage
                        periodic_backup.general_desc_iteration(backup_full_tmp_path,
                                                               storages, part_of_dir_path,
                                                               job_name)
                else:
                    continue

    # After all the manipulations, delete the created temporary directory and
    # data inside the directory with cache davfs, but not the directory itself!
    general_function.del_file_objects(backup_type,
                                      full_path_tmp_dir, '/var/cache/davfs2/*')
コード例 #3
0
def inc_files_backup(job_data):
    ''' The function collects an incremental backup for the specified partition.

    '''

    try:
        job_name = job_data['job']
        sources = job_data['sources']
        storages = job_data['storages']
    except KeyError as e:
        log_and_mail.writelog('ERROR', "Missing required key:'%s'!" % (e),
                              config.filelog_fd, job_name)
        return 1

    for i in range(len(sources)):
        target_list = sources[i]['target']
        exclude_list = sources[i].get('excludes', '')
        gzip = sources[i]['gzip']

        # Keeping an exception list in the global variable due to the specificity of
        # the `filter` key of the `add` method of the `tarfile` class
        general_files_func.EXCLUDE_FILES = general_files_func.get_exclude_ofs(
            target_list, exclude_list)

        # The backup name is selected depending on the particular glob patterns from
        # the list `target_list`
        for regex in target_list:
            target_ofs_list = general_files_func.get_ofs(regex)

            for i in target_ofs_list:
                if not general_files_func.is_excluded_ofs(i):
                    # Create a backup only if the directory is not in the exception list
                    # so as not to generate empty backups

                    # A function that by regularity returns the name of
                    # the backup WITHOUT EXTENSION AND DATE
                    backup_file_name = general_files_func.get_name_files_backup(
                        regex, i)

                    # Get the part of the backup storage path for this archive relative to
                    # the backup dir
                    part_of_dir_path = backup_file_name.replace('___', '/')

                    for j in range(len(storages)):
                        if specific_function.is_save_to_storage(
                                job_name, storages[j]):
                            try:
                                current_storage_data = mount_fuse.get_storage_data(
                                    job_name, storages[j])
                            except general_function.MyError as err:
                                log_and_mail.writelog('ERROR', '%s' % (err),
                                                      config.filelog_fd,
                                                      job_name)
                                continue
                            else:
                                storage = current_storage_data['storage']
                                backup_dir = current_storage_data['backup_dir']
                                # Если storage активный - монтируем его
                                try:
                                    mount_fuse.mount(current_storage_data)
                                except general_function.MyError as err:
                                    log_and_mail.writelog(
                                        'ERROR',
                                        "Can't mount remote '%s' storage :%s" %
                                        (storage, err), config.filelog_fd,
                                        job_name)
                                    continue
                                else:
                                    remote_dir = ''  # Only for logging
                                    if storage != 'local':
                                        local_dst_dirname = mount_fuse.mount_point
                                        remote_dir = backup_dir
                                        if storage != 's3':
                                            host = current_storage_data['host']
                                        else:
                                            host = ''
                                        share = current_storage_data.get(
                                            'share')
                                    else:
                                        host = ''
                                        share = ''
                                        local_dst_dirname = backup_dir
                                    # We collect an incremental copy
                                    # For storage: local, sshfs, nfs backup_dir is the mount point and must already be created before mounting.
                                    # For storage: ftp, smb, webdav, s3 is NOT a mount point, but actually a relative path relative to the mount point
                                    if not storage in ('local', 'scp', 'nfs'):
                                        local_dst_dirname = os.path.join(
                                            local_dst_dirname,
                                            backup_dir.lstrip('/'))

                                    create_inc_file(
                                        local_dst_dirname, remote_dir,
                                        part_of_dir_path, backup_file_name, i,
                                        exclude_list, gzip, job_name, storage,
                                        host, share)  #general_inc_iteration

                                    try:
                                        mount_fuse.unmount()
                                    except general_function.MyError as err:
                                        log_and_mail.writelog(
                                            'ERROR',
                                            "Can't umount remote '%s' storage :%s"
                                            % (storage, err),
                                            config.filelog_fd, job_name)
                                        continue
                        else:
                            continue
                else:
                    continue
コード例 #4
0
def inc_files_backup(job_data):
    """ The function collects an incremental backup for the specified partition.

    """

    is_prams_read, job_name, backup_type, tmp_dir, sources, storages, safety_backup, deferred_copying_level = \
        general_function.get_job_parameters(job_data)
    if not is_prams_read:
        return

    for i in range(len(sources)):
        target_list = sources[i]['target']
        exclude_list = sources[i].get('excludes', '')
        gzip = sources[i]['gzip']

        # Keeping an exception list in the global variable due to the specificity of
        # the `filter` key of the `add` method of the `tarfile` class
        general_files_func.EXCLUDE_FILES = general_files_func.get_exclude_ofs(target_list,
                                                                              exclude_list)

        # The backup name is selected depending on the particular glob patterns from
        # the list `target_list`
        for regex in target_list:
            target_ofs_list = general_files_func.get_ofs(regex)

            for ofs in target_ofs_list:
                if not general_files_func.is_excluded_ofs(ofs):
                    # Create a backup only if the directory is not in the exception list
                    # so as not to generate empty backups

                    # A function that by regularity returns the name of
                    # the backup WITHOUT EXTENSION AND DATE
                    backup_file_name = general_files_func.get_name_files_backup(regex, ofs)

                    # Get the part of the backup storage path for this archive relative to
                    # the backup dir
                    part_of_dir_path = backup_file_name.replace('___', '/')

                    for j in range(len(storages)):
                        if specific_function.is_save_to_storage(job_name, storages[j]):
                            try:
                                current_storage_data = mount_fuse.get_storage_data(job_name,
                                                                                   storages[j])
                            except general_function.MyError as err:
                                log_and_mail.writelog('ERROR', f'{err}',
                                                      config.filelog_fd, job_name)
                                continue
                            else:
                                storage = current_storage_data['storage']
                                backup_dir = current_storage_data['backup_dir']
                                # Если storage активный - монтируем его
                                try:
                                    mount_fuse.mount(current_storage_data)
                                except general_function.MyError as err:
                                    log_and_mail.writelog('ERROR', f"Can't mount remote '{storage}' storage :{err}",
                                                          config.filelog_fd, job_name)
                                    continue
                                else:
                                    remote_dir = ''  # Only for logging
                                    if storage != 'local':
                                        local_dst_dirname = mount_fuse.mount_point
                                        remote_dir = backup_dir
                                        if storage != 's3':
                                            host = current_storage_data['host']
                                        else:
                                            host = ''
                                        share = current_storage_data.get('share')
                                    else:
                                        host = ''
                                        share = ''
                                        local_dst_dirname = backup_dir

                                    if storage not in ('local', 'scp', 'nfs'):
                                        local_dst_dirname = os.path.join(local_dst_dirname, backup_dir.lstrip('/'))

                                    create_inc_file(local_dst_dirname, remote_dir, part_of_dir_path, backup_file_name,
                                                    ofs, exclude_list, gzip, job_name, storage, host, share)

                                    try:
                                        mount_fuse.unmount()
                                    except general_function.MyError as err:
                                        log_and_mail.writelog('ERROR',
                                                              f"Can't umount remote '{storage}' storage :{err}",
                                                              config.filelog_fd, job_name)
                                        continue
                        else:
                            continue
                else:
                    continue
コード例 #5
0
def desc_files_backup(job_data):
    """ Function, creates a desc backup of directories.
    At the entrance receives a dictionary with the data of the job.

    """
    is_prams_read, job_name, backup_type, tmp_dir, sources, storages, safety_backup, deferred_copying_level = \
        general_function.get_job_parameters(job_data)
    if not is_prams_read:
        return

    full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type)

    dumped_ofs = {}
    for i in range(len(sources)):
        exclude_list = sources[i].get('excludes', '')
        try:
            target_list = sources[i]['target']
            gzip = sources[i]['gzip']
        except KeyError as e:
            log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!",
                                  config.filelog_fd, job_name)
            continue

        # Keeping an exception list in the global variable due to the specificity of
        # the `filter` key of the `add` method of the `tarfile` class
        general_files_func.EXCLUDE_FILES = general_files_func.get_exclude_ofs(
            target_list, exclude_list)

        # The backup name is selected depending on the particular glob patterns from
        # the list `target_list`
        for regex in target_list:
            target_ofs_list = general_files_func.get_ofs(regex)

            if not target_ofs_list:
                log_and_mail.writelog(
                    'ERROR', "No file system objects found that" +
                    f"match the regular expression '{regex}'!",
                    config.filelog_fd, job_name)
                continue

            for ofs in target_ofs_list:
                # Create a backup only if the directory is not in the exception list
                # so as not to generate empty backups
                if not general_files_func.is_excluded_ofs(ofs):
                    # A function that by regularity returns the name of
                    # the backup WITHOUT EXTENSION AND DATE
                    backup_file_name = general_files_func.get_name_files_backup(
                        regex, ofs)
                    # Get the part of the backup storage path for this archive relative to
                    # the backup dir
                    part_of_dir_path = backup_file_name.replace('___', '/')

                    backup_full_tmp_path = general_function.get_full_path(
                        full_path_tmp_dir, backup_file_name, 'tar', gzip)

                    periodic_backup.remove_old_local_file(
                        storages, part_of_dir_path, job_name)

                    if general_files_func.create_tar('files',
                                                     backup_full_tmp_path, ofs,
                                                     gzip, backup_type,
                                                     job_name):
                        dumped_ofs[ofs] = {
                            'success': True,
                            'tmp_path': backup_full_tmp_path,
                            'part_of_dir_path': part_of_dir_path
                        }
                    else:
                        dumped_ofs[ofs] = {'success': False}

                    if deferred_copying_level <= 0 and dumped_ofs[ofs][
                            'success']:
                        periodic_backup.general_desc_iteration(
                            backup_full_tmp_path, storages, part_of_dir_path,
                            job_name, safety_backup)
                else:
                    continue

            for ofs, result in dumped_ofs.items():
                if deferred_copying_level == 1 and result['success']:
                    periodic_backup.general_desc_iteration(
                        result['tmp_path'], storages,
                        result['part_of_dir_path'], job_name, safety_backup)

        for ofs, result in dumped_ofs.items():
            if deferred_copying_level == 2 and result['success']:
                periodic_backup.general_desc_iteration(
                    result['tmp_path'], storages, result['part_of_dir_path'],
                    job_name, safety_backup)

    for ofs, result in dumped_ofs.items():
        if deferred_copying_level >= 3 and result['success']:
            periodic_backup.general_desc_iteration(result['tmp_path'],
                                                   storages,
                                                   result['part_of_dir_path'],
                                                   job_name, safety_backup)

    # After all the manipulations, delete the created temporary directory and
    # data inside the directory with cache davfs, but not the directory itself!
    general_function.del_file_objects(backup_type, full_path_tmp_dir,
                                      '/var/cache/davfs2/*')