Example #1
0
def cleanup(state):
    '''Cleanup state'''

    # Dump DBr
    '''
    dbr.dump(state.states.dbr)
    '''

    # Disconnect DBr
    dbr.cleanup(state.states.dbr)

    # Get DB file size for statistics
    db_size = fmtutil.byte_size(dbr.get_db_file_size(state.states.dbr))
    state.set_stats_category('Tracking DB', 'Size', db_size)

    # Store DB (local --> storage)
    err, msg = storager.store(
            state.states.storager,
            state.states.dbr.model.filepath,
            "{}/{}".format(state.model.dirstorage, '_aeroback'),
            None)
    if err:
        _D.ERROR(
                __name__,
                "Error storing File Versioned Tracking DB",
                'file', state.states.dbr.model.filename,
                'msg', msg
                )
Example #2
0
def _store_archive(state):
    err, msg = storager.store(
            state.states.storager,
            state.archive_filepath,
            state.model.dirstorage,
            state.model.atype)
    if err:
        return 1, "Error storing archive: {}".format(msg)

    state.total_stored_files += 1
    size = os.stat(state.archive_filepath).st_size
    state.set_stats('Uploaded', fmtutil.byte_size(size))

    # Add file to DB
    dbr.add_version(
            state.states.dbr,
            state.archive_filename,
            size)

    # Get list of older versions to remove
    filenames = dbr.remove_versions_older_than(
            state.states.dbr,
            state.model.history_size)

    # Remove files from storage
    for filename in filenames:
        storager.unstore(
                state.states.storager,
                state.model.dirstorage,
                filename)

    return 0, None
Example #3
0
def cleanup(state):
    '''Cleanup state'''

    # Clear DB list of local files and uploaded files to minimize DB size
    dbr.clear_locals_uploads(state.states.dbr)

    # Dump DB
    dbr.dump_params(state.states.dbr)
    #dbr.dump_files_storage(state.states.dbr)

    # Get statistics of what's in storage now
    storfiles, storsize = dbr.stats_storage(state.states.dbr)
    print '#### STOR FILES', storfiles
    print '#### STOR SIZE', storsize

    progress = int(float(storsize) / float(state.total_local_size) * 100.0)
    storsize = fmtutil.byte_size(storsize)

    state.set_stats_category('Storage Total', 'Progress', '{}%'.format(progress))
    state.set_stats_category('Storage Total', 'Files count', storfiles)
    state.set_stats_category('Storage Total', 'Files size', storsize)

    # Disconnect DBr
    dbr.cleanup(state.states.dbr)

    # Get DB file size for statistics
    db_size = fmtutil.byte_size(dbr.get_db_file_size(state.states.dbr))
    state.set_stats_category('Tracking DB', 'DB size', db_size)

    # Store DB (local --> storage)
    err, msg = storager.store(
            state.states.storager,
            state.states.dbr.model.filepath,
            "{}/{}".format(state.model.dirstorage, '_aeroback'),
            None)
    if err:
        _D.ERROR(
                __name__,
                "Error storing File Incremental Tracking DB",
                'file', state.states.dbr.model.filename,
                'msg', msg
                )
Example #4
0
def init(date_str, date_int, dir_temp, storstate, params):
    ''' Initialize model and state'''

    # Model
    model, err, msg = _init_model(date_str, date_int, dir_temp, params)
    if err:
        return State(None), err, msg

    '''
    _D.OBJECT(
            __name__,
            "Feeder Dir Compress Model",
            model
            )
    '''

    # State
    state, err, msg = _init_state(model, storstate)
    if err:
        return State(None), err, msg

    # Description for reporting
    state.set_description(state.model.description)

    # Descriptors for reporting
    state.set_descriptor('Type', state.model.atype)
    state.set_descriptor('Storage dir', state.model.dirstorage)
    state.set_descriptor('Local dir', state.model.directory)
    state.set_descriptor('Includes', state.model.includes)
    state.set_descriptor('Excludes', state.model.excludes)
    state.set_descriptor('Max session upload', fmtutil.byte_size(state.model.maxupload))

    # Stats for reporting
    state.set_stats('Stats:', '&nbsp')

    #OK
    state.set_stats_category('Storage Total', 'Progress', 0)
    state.set_stats_category('Storage Total', 'Files count', 0)
    state.set_stats_category('Storage Total', 'Files size', 0)

    state.set_stats_category('Session Uploaded', 'Files count', 0)
    state.set_stats_category('Session Uploaded', 'Files size', 0)

    #OK
    state.set_stats_category('Local Total', 'Files count', 0)
    state.set_stats_category('Local Total', 'Files size', 0)

    #OK
    state.set_stats_category('Tracking DB', 'DB size', 0)

    return state, err, msg
Example #5
0
def dump_stats(state):
    c = state.db_conn.cursor()
    if not c:
        return

    sql = "SELECT * FROM stats"
    c.execute(sql)

    entries = []
    for row in c:
        entries.append("{}\t{}".format(row[0], fmtutil.byte_size(row[1])))

    _D.DEBUG(
        __name__,
        "File Incremental Tracking DB dump",
        'stats', entries
        )
Example #6
0
def cleanup(state):
    """Cleanup state"""

    # Dump DBr
    """
    dbr.dump(state.states.dbr)
    """

    # Disconnect DBr
    dbr.cleanup(state.states.dbr)

    # Get DB file size for statistics
    db_size = fmtutil.byte_size(dbr.get_db_file_size(state.states.dbr))
    state.set_stats_category("Tracking DB", "Size", db_size)

    # Store DB (local --> storage)
    err, msg = storager.store(state.states.storager, state.states.dbr.model.filepath, state.model.dirstorage, None)
    if err:
        _D.ERROR(__name__, "Error storing DB", "file", state.states.dbr.model.filename, "msg", msg)
Example #7
0
def _store(state):
    """
    Store each file.
    Only allows max_fails failures before stopping and returning error.
    """
    max_fails = 5
    fails = 0

    i = 0
    total_size = 0

    # Get files to store
    rows = dbr.get_files_upload(state.states.dbr)

    directory = state.model.directory
    dirstorage = state.model.dirstorage
    #dir_str_len = len(state.model.directory)
    # Store each file
    for row in rows:
        #filepath = row[0]
        #modified = row[1]
        #size = row[2]
        filepath = row['filepath']
        modified = row['modified']
        size = row['size']

        # Extract path that is in between directory and filename
        filedir, filename = fsutil.path_to_body_tail(filepath)
        '''
        if len(filedir) > dir_str_len:
            # Case of file in subdirectory of directory
            dirstorage = os.path.join(
                    state.model.dirstorage,
                    filedir[dir_str_len + 1:])
        else:
            # Case of file located in directory itself
            dirstorage = state.model.dirstorage
        '''

        err, msg = storager.store(
                state.states.storager,
                os.path.join(directory, filepath),
                "{}/{}".format(dirstorage, filedir),
                state.model.atype)
        if err:
            # Log error
            _D.ERROR(
                    __name__,
                    "Error storing file",
                    'file', filepath
                    )
            fails += 1
            if fails == max_fails:
                break

        else:
            # Update DB on file store success
            print "\t+ ", filepath
            dbr.add_update_storage_file(state.states.dbr, filepath, modified, size)
            i += 1
            total_size += size

    # Commit all added storage files, if any stores happened
    if i:
        dbr.finish_adding_storage_files(state.states.dbr)
        dbr.add_stats(state.states.dbr, state.model.date_str, total_size)

    # Dump stats
    #dbr.dump_stats(state.states.dbr)

    state.total_stored_files = i
    state.total_stored_size = total_size

    state.set_stats_category('Session Uploaded', 'Files count', i)
    state.set_stats_category('Session Uploaded', 'Files size', fmtutil.byte_size(total_size))

    if fails:
        return 1, "Error storing files, aborted after {} failures".format(max_fails)

    return 0, None
Example #8
0
def _scan_local_files(state):
    # List of directories to ignore
    # Usually those that are hidden (start with '.')
    ignored = []

    # Scan directory
    count = 0
    total_size = 0
    directory = state.model.directory
    includes = state.model.includes
    excludes = state.model.excludes
    ignore_patterns = state.model.ignore_patterns

    for root, subFolders, files in os.walk(directory):

        # Check this directory is not a child of ignored
        skip = False
        for ign in ignored:
            if root.startswith(ign):
                skip = True
                break

        if skip:
            continue

        # Add root directory to ignored if basename matches one of ignore patterns
        base = os.path.basename(root)
        if _matches(base, ignore_patterns):
            print "\t- {}".format(base)
            ignored.append(root)
            continue

        if not _is_dir_valid_for_backup(root, directory, includes, excludes):
            continue

        print "\t  {}".format(root)

        # Legitimate directory, process its files
        for f in files:

            # Skip files matching ignore patterns
            if _matches(f, ignore_patterns):
                print "\t\t- {}".format(f)
                continue

            print "\t\t  {}".format(f)

            # Process file
            filepath = os.path.join(root, f)
            modified = int(os.path.getmtime(filepath))
            size = os.path.getsize(filepath)
            total_size += size

            #print "\t\t  = {}".format(filepath[len(directory) + 1:])
            dbr.add_local_file(
                    state.states.dbr,
                    filepath[len(directory) + 1:],
                    modified,
                    size)
            count += 1

    # Commit, one for all adds
    dbr.commit_added_local_files(state.states.dbr)

    '''
    _D.DEBUG(
            __name__,
            "Finished scanning local files",
            'Number of files', count,
            'Total size', fmtutil.byte_size(total_size)
            )
    dbr.dump_files_local(state.states.dbr)
    '''

    state.total_local_files = count
    state.total_local_size = total_size

    state.set_stats_category('Local Total', 'Files count', count)
    state.set_stats_category('Local Total', 'Files size', fmtutil.byte_size(total_size))

    return 0, None
Example #9
0
def _exec_job(state, job, job_n, jobs_count):
    # Init & exec each active storage
    err, msg = context.set_param("gsutil", job["gsutil"])
    if err:
        return 1, msg

    # Report errors and keep working
    # allow the job run on healthy storages
    job["storstates"] = []
    for storparams in job["storages"]:
        if storparams["active"]:

            # Create temp storager directory
            dir_temp = tempfile.mkdtemp(dir=state.model.dir_temp)

            # Init storager
            storstate, err, msg = storager.init(state.model.date_str, state.model.date_int, dir_temp, storparams)
            if err:
                _D.ERROR(__name__, "Skipping storage due to error in init", "msg", msg)
                continue

            # Exec storager
            err, msg = storager.execute(storstate)
            if err:
                _D.ERROR(__name__, "Skipping storage due to error in exec", "msg", msg)
                continue

            # Add to list of storagers states
            job["storstates"].append(storstate)

    # Any storagers configured ?
    if not job["storstates"]:
        return 1, "No storagers configured, aborting job"

    # ... do backup jobs ...

    # Run each backup type for each storage
    # Report errors and keep working
    for backup in job["backups"]:
        if backup["active"]:

            # Check previous backup finished
            section_name = "{}:{}".format(backup["type"], backup["dirstorage"])
            running = runlogr.get_section_item(state.states.runlogr, section_name, "running<bool>")
            if running:
                msg = "Previous run of the backup is still marked as running. If you believe that's not the case then manually change in section [{}] parameter 'running' to False in file: {}".format(
                    backup["type"], runlogr.get_filepath(state.states.runlogr)
                )
                state.add_msg_error(msg)
                _D.ERROR(
                    __name__,
                    "Previous run of backup not yet finished",
                    "backup type",
                    backup["type"],
                    "distorage",
                    backup["dirstorage"],
                    "msg",
                    msg,
                )
                continue

            # Check if time to run
            # _time_to_run(last_run, now, period):
            if not _time_to_run(
                backup["type"],
                runlogr.get_section_item(state.states.runlogr, section_name, "last_run<time>"),
                state.model.date,
                backup.get("frequency", None),
            ):
                # Time hasn't come yet, skip this backup
                continue

            # Update runlog with backup type
            runlogr.set_section(
                state.states.runlogr, section_name, {"last_run<time>": state.model.date, "running<bool>": True}
            )

            # Run backup on each storage
            errs = False
            for storstate in job["storstates"]:
                # Create unique temp directory inside dir_temp
                dir_temp = tempfile.mkdtemp(dir=state.model.dir_temp)
                # Execute
                err, msg = _exec_backup_type(state, storstate, backup, dir_temp)
                # Delete temp directory
                fsutil.remove_dir_tree(dir_temp)

                if err:
                    errs = True
                    _D.ERROR(__name__, "Error executing backup", "msg", msg, "params", backup)
                    continue

            # Update runlog with backup finish
            _runlog_update_finish(state, section_name, err_bool=errs)

    # ... done backup jobs ...

    # Add storage stats to reporting
    for storstate in job["storstates"]:
        cat = "Job {}/{} uploaded".format(job_n, jobs_count)
        state.set_descriptor_category(
            cat, storstate.model.atype, fmtutil.byte_size(storager.get_stored_stats(storstate))
        )

    # Cleanup storagers
    # Delete temp directory
    for storstate in job["storstates"]:
        storager.cleanup(storstate)
        fsutil.remove_dir_tree(storstate.model.dir_temp)

    return 0, None