Exemple #1
0
    def test_validate_all_args(self):

        elements1 = ['test1', 'test2', 'test3']
        elements2 = ['test1', '', False, None]
        elements3 = None

        assert validate_all_args(elements1) is True
        assert validate_all_args(elements2) is False
        pytest.raises(Exception, validate_all_args, elements3)
Exemple #2
0
def object_to_stream(backup_opt_dict, write_pipe, obj_name):
    '''
    Take a payload downloaded from Swift
    and generate a stream to be consumed from other processes
    '''

    required_list = [
        backup_opt_dict.container]

    if not validate_all_args(required_list):
        logging.critical('[*] Error: Please provide ALL the following \
            arguments: {0}'.format(','.join(required_list)))
        raise ValueError

    sw_connector = backup_opt_dict.sw_connector
    logging.info('[*] Downloading data stream...')

    # As the file is download by chunks and each chunk will be appened
    # to file_name_abs_path, we make sure file_name_abs_path does not
    # exists by removing it before
    #stream_write_pipe = os.fdopen(stream_write_pipe, 'w', 0)
    for obj_chunk in sw_connector.get_object(
            backup_opt_dict.container, obj_name,
            resp_chunk_size=backup_opt_dict.max_seg_size)[1]:

        write_pipe.send(obj_chunk)
Exemple #3
0
def object_to_stream(backup_opt_dict, write_pipe, obj_name):
    '''
    Take a payload downloaded from Swift
    and generate a stream to be consumed from other processes
    '''

    required_list = [backup_opt_dict.container]

    if not validate_all_args(required_list):
        logging.critical('[*] Error: Please provide ALL the following \
            arguments: {0}'.format(','.join(required_list)))
        raise ValueError

    sw_connector = backup_opt_dict.sw_connector
    logging.info('[*] Downloading data stream...')

    # As the file is download by chunks and each chunk will be appened
    # to file_name_abs_path, we make sure file_name_abs_path does not
    # exists by removing it before
    #stream_write_pipe = os.fdopen(stream_write_pipe, 'w', 0)
    for obj_chunk in sw_connector.get_object(
            backup_opt_dict.container,
            obj_name,
            resp_chunk_size=backup_opt_dict.max_seg_size)[1]:

        write_pipe.send(obj_chunk)
Exemple #4
0
def object_to_stream(backup_opt_dict, write_pipe, read_pipe, obj_name):
    """
    Take a payload downloaded from Swift
    and generate a stream to be consumed from other processes
    """

    required_list = [
        backup_opt_dict.container]

    if not validate_all_args(required_list):
        raise ValueError('Error in object_to_stream(): Please provide '
                         'ALL the following argument: --container')

    backup_opt_dict = get_client(backup_opt_dict)
    logging.info('[*] Downloading data stream...')

    # Close the read pipe in this child as it is unneeded
    # and download the objects from swift in chunks. The
    # Chunk size is set by RESP_CHUNK_SIZE and sent to che write
    # pipe
    read_pipe.close()
    for obj_chunk in backup_opt_dict.sw_connector.get_object(
            backup_opt_dict.container, obj_name,
            resp_chunk_size=RESP_CHUNK_SIZE)[1]:
        write_pipe.send_bytes(obj_chunk)

    # Closing the pipe after checking no data
    # is still vailable in the pipe.
    while True:
        if not write_pipe.poll():
            write_pipe.close()
            break
        time.sleep(1)
Exemple #5
0
def show_objects(backup_opt_dict):
    """
    Retreive the list of backups from backup_opt_dict for the specified \
    container and print them nicely to std out.
    """

    if not backup_opt_dict.list_objects:
        return False

    required_list = [
        backup_opt_dict.remote_obj_list]

    if not validate_all_args(required_list):
        raise Exception('Remote Object list not avaiblale')

    ordered_objects = {}
    remote_obj = backup_opt_dict.remote_obj_list

    for obj in remote_obj:
        ordered_objects['object_name'] = obj['name']
        ordered_objects['upload_date'] = obj['last_modified']
        print json.dumps(
            ordered_objects, indent=4,
            separators=(',', ': '), sort_keys=True)

    return True
Exemple #6
0
def show_objects(backup_opt_dict):
    '''
    Retreive the list of backups from backup_opt_dict for the specified \
    container and print them nicely to std out.
    '''

    if not backup_opt_dict.list_objects:
        return False

    required_list = [
        backup_opt_dict.remote_obj_list]

    if not validate_all_args(required_list):
        logging.critical('[*] Error: Remote Object list not avaiblale')
        raise Exception

    ordered_objects = {}
    remote_obj = backup_opt_dict.remote_obj_list

    for obj in remote_obj:
        ordered_objects['object_name'] = obj['name']
        ordered_objects['upload_date'] = obj['last_modified']
        print json.dumps(
            ordered_objects, indent=4,
            separators=(',', ': '), sort_keys=True)

    return True
Exemple #7
0
def object_to_file(backup_opt_dict, file_name_abs_path):
    """
    Take a payload downloaded from Swift
    and save it to the disk as file_name
    """

    required_list = [
        backup_opt_dict.container,
        file_name_abs_path]

    if not validate_all_args(required_list):
        raise ValueError('Error in object_to_file(): Please provide ALL the '
                         'following arguments: --container file_name_abs_path')

    sw_connector = backup_opt_dict.sw_connector
    file_name = file_name_abs_path.split('/')[-1]
    logging.info('[*] Downloading object {0} on {1}'.format(
        file_name, file_name_abs_path))

    # As the file is download by chunks and each chunk will be appened
    # to file_name_abs_path, we make sure file_name_abs_path does not
    # exists by removing it before
    if os.path.exists(file_name_abs_path):
        os.remove(file_name_abs_path)

    with open(file_name_abs_path, 'ab') as obj_fd:
        for obj_chunk in sw_connector.get_object(
                backup_opt_dict.container, file_name,
                resp_chunk_size=16000000)[1]:
            obj_fd.write(obj_chunk)

    return True
Exemple #8
0
def object_to_file(backup_opt_dict, file_name_abs_path):
    '''
    Take a payload downloaded from Swift
    and save it to the disk as file_name
    '''

    required_list = [
        backup_opt_dict.container,
        file_name_abs_path]

    if not validate_all_args(required_list):
        logging.critical('[*] Error: Please provide ALL the following \
            arguments: {0}'.format(','.join(required_list)))
        raise ValueError

    sw_connector = backup_opt_dict.sw_connector
    file_name = file_name_abs_path.split('/')[-1]
    logging.info('[*] Downloading object {0} on {1}'.format(
        file_name, file_name_abs_path))

    # As the file is download by chunks and each chunk will be appened
    # to file_name_abs_path, we make sure file_name_abs_path does not
    # exists by removing it before
    if os.path.exists(file_name_abs_path):
        os.remove(file_name_abs_path)

    with open(file_name_abs_path, 'ab') as obj_fd:
        for obj_chunk in sw_connector.get_object(
                backup_opt_dict.container, file_name,
                resp_chunk_size=16000000)[1]:
            obj_fd.write(obj_chunk)

    return True
Exemple #9
0
def gen_tar_command(
    opt_dict, meta_data_backup_file=False, time_stamp=int(time.time()),
    remote_manifest_meta=False):
    '''
    Generate tar command options.
    '''

    required_list = [
        opt_dict.backup_name,
        opt_dict.src_file,
        os.path.exists(opt_dict.src_file)]

    if not validate_all_args(required_list):
        logging.critical(
            'Error: Please ALL the following options: {0}'.format(
                ','.join(required_list)))
        raise Exception

    # Change che current working directory to op_dict.src_file
    os.chdir(os.path.normpath(opt_dict.src_file.strip()))

    logging.info('[*] Changing current working directory to: {0} \
    '.format(opt_dict.src_file))
    logging.info('[*] Backup started for: {0} \
    '.format(opt_dict.src_file))

    # Tar option for default behavoir. Please refer to man tar to have
    # a better options explanation
    tar_command = ' {0} --create -z --warning=none \
        --dereference --hard-dereference --no-check-device --one-file-system \
        --preserve-permissions --same-owner --seek \
        --ignore-failed-read '.format(opt_dict.tar_path)

    file_name = add_host_name_ts_level(opt_dict, time_stamp)
    meta_data_backup_file = u'tar_metadata_{0}'.format(file_name)
    # Incremental backup section
    if not opt_dict.no_incremental:
        (tar_command, opt_dict, remote_manifest_meta) = tar_incremental(
            tar_command, opt_dict, meta_data_backup_file,
            remote_manifest_meta)

    # End incremental backup section
    if opt_dict.exclude:
        tar_command = ' {0} --exclude="{1}" '.format(
            tar_command,
            opt_dict.exclude)

    tar_command = ' {0} . '.format(tar_command)
    # Encrypt data if passfile is provided
    if opt_dict.encrypt_pass_file:
        openssl_cmd = "{0} enc -aes-256-cfb -pass file:{1}".format(
            opt_dict.openssl_path, opt_dict.encrypt_pass_file)
        tar_command = '{0} | {1} '.format(tar_command, openssl_cmd)

    return opt_dict, tar_command, remote_manifest_meta
Exemple #10
0
def gen_tar_command(opt_dict,
                    meta_data_backup_file=False,
                    time_stamp=int(time.time()),
                    remote_manifest_meta=False):
    '''
    Generate tar command options.
    '''

    required_list = [
        opt_dict.backup_name, opt_dict.src_file,
        os.path.exists(opt_dict.src_file)
    ]

    if not validate_all_args(required_list):
        logging.critical('Error: Please ALL the following options: {0}'.format(
            ','.join(required_list)))
        raise Exception

    # Change che current working directory to op_dict.src_file
    os.chdir(os.path.normpath(opt_dict.src_file.strip()))

    logging.info('[*] Changing current working directory to: {0} \
    '.format(opt_dict.src_file))
    logging.info('[*] Backup started for: {0} \
    '.format(opt_dict.src_file))

    # Tar option for default behavoir. Please refer to man tar to have
    # a better options explanation
    tar_command = ' {0} --create -z --warning=none \
        --dereference --hard-dereference --no-check-device --one-file-system \
        --preserve-permissions --same-owner --seek \
        --ignore-failed-read '.format(opt_dict.tar_path)

    file_name = add_host_name_ts_level(opt_dict, time_stamp)
    meta_data_backup_file = u'tar_metadata_{0}'.format(file_name)
    # Incremental backup section
    if not opt_dict.no_incremental:
        (tar_command, opt_dict,
         remote_manifest_meta) = tar_incremental(tar_command, opt_dict,
                                                 meta_data_backup_file,
                                                 remote_manifest_meta)

    # End incremental backup section
    if opt_dict.exclude:
        tar_command = ' {0} --exclude="{1}" '.format(tar_command,
                                                     opt_dict.exclude)

    tar_command = ' {0} . '.format(tar_command)
    # Encrypt data if passfile is provided
    if opt_dict.encrypt_pass_file:
        openssl_cmd = "{0} enc -aes-256-cfb -pass file:{1}".format(
            opt_dict.openssl_path, opt_dict.encrypt_pass_file)
        tar_command = '{0} | {1} '.format(tar_command, openssl_cmd)

    return opt_dict, tar_command, remote_manifest_meta
Exemple #11
0
def tar_restore_args_valid(backup_opt_dict):
    required_list = [os.path.exists(backup_opt_dict.restore_abs_path)]
    try:
        valid_args = validate_all_args(required_list)  # might raise
        if not valid_args:
            raise Exception(('please provide ALL of the following '
                             'argument: --restore-abs-path'))
    except Exception as err:
        valid_args = False
        logging.critical('[*] Critical Error: {0}'.format(err))
    return valid_args
Exemple #12
0
def tar_restore_args_valid(backup_opt_dict):
    required_list = [
        os.path.exists(backup_opt_dict.restore_abs_path)]
    try:
        valid_args = utils.validate_all_args(required_list)   # might raise
        if not valid_args:
            raise Exception(('please provide ALL of the following '
                             'argument: --restore-abs-path'))
    except Exception as err:
        valid_args = False
        logging.critical('[*] Critical Error: {0}'.format(err))
    return valid_args
Exemple #13
0
def check_container_existance(backup_opt_dict):
    '''
    Check if the provided container is already available on Swift.
    The verification is done by exact matching between the provided container
    name and the whole list of container available for the swift account.
    If the container is not found, it will be automatically create and used
    to execute the backup
    '''

    required_list = [
        backup_opt_dict.container_segments, backup_opt_dict.container
    ]

    if not validate_all_args(required_list):
        logging.critical("[*] Error: please provide ALL the following args \
            {0}".format(','.join(required_list)))
        raise Exception
    logging.info("[*] Retrieving container {0}".format(
        backup_opt_dict.container))
    sw_connector = backup_opt_dict.sw_connector
    containers_list = sw_connector.get_account()[1]
    match_container = None
    match_container_seg = None

    match_container = [
        container_object['name'] for container_object in containers_list
        if container_object['name'] == backup_opt_dict.container
    ]
    match_container_seg = [
        container_object['name'] for container_object in containers_list
        if container_object['name'] == backup_opt_dict.container_segments
    ]

    # If no container is available, create it and write to logs
    if not match_container:
        logging.warning("[*] No such container {0} available... ".format(
            backup_opt_dict.container))
        logging.warning("[*] Creating container {0}".format(
            backup_opt_dict.container))
        sw_connector.put_container(backup_opt_dict.container)
    else:
        logging.info("[*] Container {0} found!".format(
            backup_opt_dict.container))

    if not match_container_seg:
        logging.warning("[*] Creating segments container {0}".format(
            backup_opt_dict.container_segments))
        sw_connector.put_container(backup_opt_dict.container_segments)
    else:
        logging.info("[*] Container Segments {0} found!".format(
            backup_opt_dict.container_segments))

    return backup_opt_dict
Exemple #14
0
def restore_fs(backup_opt_dict):
    '''
    Restore data from swift server to your local node. Data will be restored
    in the directory specified in backup_opt_dict.restore_abs_path. The
    object specified with the --get-object option will be downloaded from
    the Swift server and will be downloaded inside the parent directory of
    backup_opt_dict.restore_abs_path. If the object was compressed during
    backup time, then it is decrypted, decompressed and de-archived to
    backup_opt_dict.restore_abs_path. Before download the file, the size of
    the local volume/disk/partition will be computed. If there is enough space
    the full restore will be executed. Please remember to stop any service
    that require access to the data before to start the restore execution
    and to start the service at the end of the restore execution
    '''

    # List of mandatory values
    required_list = [
        os.path.exists(backup_opt_dict.restore_abs_path),
        backup_opt_dict.remote_obj_list,
        backup_opt_dict.container,
        backup_opt_dict.backup_name
        ]

    # Arugment validation. Raise ValueError is all the arguments are not True
    if not validate_all_args(required_list):
        logging.critical("[*] Error: please provide ALL the following \
            arguments: {0}".format(' '.join(required_list)))
        raise ValueError

    if not backup_opt_dict.restore_from_date:
        logging.warning('[*] Restore date time not available. Setting to \
            current datetime')
        backup_opt_dict.restore_from_date = \
            re.sub(
                r'^(\S+?) (.+?:\d{,2})\.\d+?$', r'\1T\2',
                str(datetime.datetime.now()))

    # If restore_from_host is set to local hostname is not set in
    # backup_opt_dict.restore_from_host
    if backup_opt_dict.restore_from_host:
        backup_opt_dict.hostname = backup_opt_dict.restore_from_host

    # Check if there's a backup matching. If not raise Exception
    backup_opt_dict = get_match_backup(backup_opt_dict)
    if not backup_opt_dict.remote_match_backup:
        logging.critical(
            '[*] Not backup found matching with name: {0},\
                hostname: {1}'.format(
                backup_opt_dict.backup_name, backup_opt_dict.hostname))
        raise ValueError

    restore_fs_sort_obj(backup_opt_dict)
Exemple #15
0
def check_container_existance(backup_opt_dict):
    '''
    Check if the provided container is already available on Swift.
    The verification is done by exact matching between the provided container
    name and the whole list of container available for the swift account.
    If the container is not found, it will be automatically create and used
    to execute the backup
    '''

    required_list = [
        backup_opt_dict.container_segments,
        backup_opt_dict.container]

    if not validate_all_args(required_list):
        logging.critical("[*] Error: please provide ALL the following args \
            {0}".format(','.join(required_list)))
        raise Exception
    logging.info(
        "[*] Retrieving container {0}".format(backup_opt_dict.container))
    sw_connector = backup_opt_dict.sw_connector
    containers_list = sw_connector.get_account()[1]
    match_container = None
    match_container_seg = None

    match_container = [
        container_object['name'] for container_object in containers_list
        if container_object['name'] == backup_opt_dict.container]
    match_container_seg = [
        container_object['name'] for container_object in containers_list
        if container_object['name'] == backup_opt_dict.container_segments]

    # If no container is available, create it and write to logs
    if not match_container:
        logging.warning("[*] No such container {0} available... ".format(
            backup_opt_dict.container))
        logging.warning(
            "[*] Creating container {0}".format(backup_opt_dict.container))
        sw_connector.put_container(backup_opt_dict.container)
    else:
        logging.info(
            "[*] Container {0} found!".format(backup_opt_dict.container))

    if not match_container_seg:
        logging.warning("[*] Creating segments container {0}".format(
            backup_opt_dict.container_segments))
        sw_connector.put_container(backup_opt_dict.container_segments)
    else:
        logging.info("[*] Container Segments {0} found!".format(
            backup_opt_dict.container_segments))

    return backup_opt_dict
Exemple #16
0
def restore_fs(backup_opt_dict):
    '''
    Restore data from swift server to your local node. Data will be restored
    in the directory specified in backup_opt_dict.restore_abs_path. The
    object specified with the --get-object option will be downloaded from
    the Swift server and will be downloaded inside the parent directory of
    backup_opt_dict.restore_abs_path. If the object was compressed during
    backup time, then it is decrypted, decompressed and de-archived to
    backup_opt_dict.restore_abs_path. Before download the file, the size of
    the local volume/disk/partition will be computed. If there is enough space
    the full restore will be executed. Please remember to stop any service
    that require access to the data before to start the restore execution
    and to start the service at the end of the restore execution
    '''

    # List of mandatory values
    required_list = [
        os.path.exists(backup_opt_dict.restore_abs_path),
        backup_opt_dict.remote_obj_list, backup_opt_dict.container,
        backup_opt_dict.backup_name
    ]

    # Arguments validation. Raise ValueError is all the arguments are not True
    if not validate_all_args(required_list):
        raise ValueError('[*] Error: please provide ALL the following '
                         'arguments: a valid --restore-abs-path '
                         '--container --backup-name')

    if not backup_opt_dict.restore_from_date:
        logging.warning(('[*] Restore date time not available. Setting to '
                         'current datetime'))
        backup_opt_dict.restore_from_date = \
            re.sub(
                r'^(\S+?) (.+?:\d{,2})\.\d+?$', r'\1T\2',
                str(datetime.datetime.now()))

    # If restore_from_host is set to local hostname is not set in
    # backup_opt_dict.restore_from_host
    if backup_opt_dict.restore_from_host:
        backup_opt_dict.hostname = backup_opt_dict.restore_from_host

    # Check if there's a backup matching. If not raise Exception
    backup_opt_dict = get_match_backup(backup_opt_dict)
    if not backup_opt_dict.remote_match_backup:
        raise ValueError('No backup found matching for '
                         'backup name: {0}, hostname: {1}'.format(
                             backup_opt_dict.backup_name,
                             backup_opt_dict.hostname))
    restore_fs_sort_obj(backup_opt_dict)
Exemple #17
0
def check_container_existance(backup_opt_dict):
    """
    Check if the provided container is already available on Swift.
    The verification is done by exact matching between the provided container
    name and the whole list of container available for the swift account.
    """

    required_list = [
        backup_opt_dict.container_segments,
        backup_opt_dict.container]

    if not validate_all_args(required_list):
        raise Exception('please provide the following arg: --container')

    logging.info(
        "[*] Retrieving container {0}".format(backup_opt_dict.container))
    sw_connector = backup_opt_dict.sw_connector
    containers_list = sw_connector.get_account()[1]

    match_container = [
        container_object['name'] for container_object in containers_list
        if container_object['name'] == backup_opt_dict.container]
    match_container_seg = [
        container_object['name'] for container_object in containers_list
        if container_object['name'] == backup_opt_dict.container_segments]

    # Initialize container dict
    containers = {'main_container': False, 'container_segments': False}

    if not match_container:
        logging.warning("[*] No such container {0} available... ".format(
            backup_opt_dict.container))
    else:
        logging.info(
            "[*] Container {0} found!".format(backup_opt_dict.container))
        containers['main_container'] = True

    if not match_container_seg:
        logging.warning(
            "[*] No segments container {0} available...".format(
                backup_opt_dict.container_segments))
    else:
        logging.info("[*] Container Segments {0} found!".format(
            backup_opt_dict.container_segments))
        containers['container_segments'] = True

    return containers
Exemple #18
0
def tar_restore(backup_opt_dict, read_pipe):
    '''
    Restore the provided file into backup_opt_dict.restore_abs_path
    Descrypt the file if backup_opt_dict.encrypt_pass_file key is provided
    '''

    # Validate mandatory arguments
    required_list = [
        os.path.exists(backup_opt_dict.restore_abs_path)]

    if not validate_all_args(required_list):
        logging.critical("[*] Error: please provide ALL of the following \
            arguments: {0}".format(' '.join(required_list)))
        raise ValueError

    # Set the default values for tar restore
    tar_cmd = ' {0} -z --incremental --extract  \
        --unlink-first --ignore-zeros --warning=none --overwrite \
        --directory {1} '.format(
            backup_opt_dict.tar_path, backup_opt_dict.restore_abs_path)

    # Check if encryption file is provided and set the openssl decrypt
    # command accordingly
    if backup_opt_dict.encrypt_pass_file:
        openssl_cmd = " {0} enc -d -aes-256-cfb -pass file:{1}".format(
            backup_opt_dict.openssl_path,
            backup_opt_dict.encrypt_pass_file)
        tar_cmd = ' {0} | {1} '.format(openssl_cmd, tar_cmd)

    tar_cmd_proc = subprocess.Popen(
        tar_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
        stderr=subprocess.PIPE, shell=True,
        executable=backup_opt_dict.bash_path)

    # Start loop reading the pipe and pass the data to the tar std input
    while True:
        data_stream = read_pipe.recv()
        tar_cmd_proc.stdin.write(data_stream)
        if len(data_stream) < int(backup_opt_dict.max_seg_size):
            break

    tar_err = tar_cmd_proc.communicate()[1]

    if 'error' in tar_err.lower():
        logging.critical('[*] Restore error: {0}'.format(tar_err))
        raise Exception
Exemple #19
0
def tar_restore(backup_opt_dict, read_pipe):
    '''
    Restore the provided file into backup_opt_dict.restore_abs_path
    Descrypt the file if backup_opt_dict.encrypt_pass_file key is provided
    '''

    # Validate mandatory arguments
    required_list = [os.path.exists(backup_opt_dict.restore_abs_path)]

    if not validate_all_args(required_list):
        logging.critical("[*] Error: please provide ALL of the following \
            arguments: {0}".format(' '.join(required_list)))
        raise ValueError

    # Set the default values for tar restore
    tar_cmd = ' {0} -z --incremental --extract  \
        --unlink-first --ignore-zeros --warning=none --overwrite \
        --directory {1} '.format(backup_opt_dict.tar_path,
                                 backup_opt_dict.restore_abs_path)

    # Check if encryption file is provided and set the openssl decrypt
    # command accordingly
    if backup_opt_dict.encrypt_pass_file:
        openssl_cmd = " {0} enc -d -aes-256-cfb -pass file:{1}".format(
            backup_opt_dict.openssl_path, backup_opt_dict.encrypt_pass_file)
        tar_cmd = ' {0} | {1} '.format(openssl_cmd, tar_cmd)

    tar_cmd_proc = subprocess.Popen(tar_cmd,
                                    stdin=subprocess.PIPE,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE,
                                    shell=True,
                                    executable=backup_opt_dict.bash_path)

    # Start loop reading the pipe and pass the data to the tar std input
    while True:
        data_stream = read_pipe.recv()
        tar_cmd_proc.stdin.write(data_stream)
        if len(data_stream) < int(backup_opt_dict.max_seg_size):
            break

    tar_err = tar_cmd_proc.communicate()[1]

    if 'error' in tar_err.lower():
        logging.critical('[*] Restore error: {0}'.format(tar_err))
        raise Exception
Exemple #20
0
def lvm_eval(backup_opt_dict):
    """
    Evaluate if the backup must be executed using lvm snapshot
    or just directly on the plain filesystem. If no lvm options are specified
    the backup will be executed directly on the file system and without
    use lvm snapshot. If one of the lvm options are set, then the lvm snap
    will be used to execute backup. This mean all the required options
    must be set accordingly
    """

    required_list = [
        backup_opt_dict.lvm_volgroup,
        backup_opt_dict.lvm_srcvol,
        backup_opt_dict.lvm_dirmount]

    if not validate_all_args(required_list):
        logging.warning('[*] Required lvm options not set. The backup will \
            execute without lvm snapshot.')
        return False

    # Create lvm_dirmount dir if it doesn't exists and write action in logs
    create_dir(backup_opt_dict.lvm_dirmount)

    return True