Beispiel #1
0
 def test_exclude(self):
     ctx = Context()
     ctx.set_exclude_extensions(('jpg', 'png'))  # Include all, exclude JPGs
     file_filter = FileFilter(ctx)
     EXCLUDED = ('x.jpg', 'X.JPG', 'z.PNG', '.algojpg', '.frockup', '.frockup-yadayadayada')
     INCLUDED = ('x.zip', 'X.ZIP', 'xjpg', 'XJPG')
     for filename in INCLUDED:
         self.assertTrue(file_filter.include_file('/', filename),
                         "File {} should be included".format(filename))
     for filename in EXCLUDED:
         self.assertFalse(file_filter.include_file('/', filename),
                         "File {} should be excluded".format(filename))
Beispiel #2
0
 def __init__(self):
     self.ctx = Context()
     self.ctx.set_include_extensions(("jpg",))
     self.file_filter = FileFilter(self.ctx)
     self.local_metadata = LocalMetadata(self.ctx)
     self.logger = logging.getLogger("Remote")
Beispiel #3
0
class Remote(object):
    def __init__(self):
        self.ctx = Context()
        self.ctx.set_include_extensions(("jpg",))
        self.file_filter = FileFilter(self.ctx)
        self.local_metadata = LocalMetadata(self.ctx)
        self.logger = logging.getLogger("Remote")

    def get_background_process_status(self, function_args):
        self.logger.debug("get_background_process_status() - %s", function_args)
        data = PROCESS_CONTROLLER.get_background_process_status()
        return data

    def launch_backup(self, function_args):
        self.logger.info("launch_backup() - %s", function_args)
        directory_name = function_args[0]
        assert os.path.exists(directory_name)
        data = PROCESS_CONTROLLER.launch_backup(directory_name)
        return data

    def stop_all_processes(self, function_args):
        self.logger.info("stop_all_processes() - %s", function_args)
        data = PROCESS_CONTROLLER.stop_all_processes()
        return data

    def load_directory(self, function_args):
        base_dir = function_args[0]
        assert os.path.exists(base_dir)

        directories = []
        files = {}
        for root, _, files in os.walk(base_dir):
            try:
                self.local_metadata._opendb(root, try_ro_on_error=True)
                file_list = []
                ignored_count = 0
                updated_count = 0
                pending_count = 0
                pending_bytes = 0
                for a_file in files:
                    should_proc, file_stats = _should_process_file(
                        root, a_file, self.file_filter, self.local_metadata, self.ctx
                    )

                    if (should_proc, file_stats) == (False, None):
                        # Excluido!
                        ignored_count += 1
                    else:
                        if should_proc is True:
                            pending_count += 1
                            pending_bytes += file_stats.stats.st_size
                        elif should_proc is False:
                            updated_count += 1
                        else:
                            assert False, "Invalid value for should_proc: {}".format(should_proc)

                directory = {
                    "name": root,
                    "files": files,
                    "files_count": len(files),
                    "file_list": file_list,
                    "ignored_count": ignored_count,
                    "updated_count": updated_count,
                    "pending_count": pending_count,
                    "pending_bytes": pending_bytes,
                }
                directories.append(directory)
            finally:
                self.local_metadata.close()

        return {"directories": directories}
Beispiel #4
0
def main():
    parser = argparse.ArgumentParser(description='Backup files to Glacier')
    parser.add_argument('--info', dest='log_level', action='store_const', const='info',
        help="Set log level to info")
    parser.add_argument('--debug', dest='log_level', action='store_const', const='debug',
        help="Set log level to debug")
    parser.add_argument('--include', dest='include',
        help="File extensions to include, separated by commas (ej: jpg,JPG)")
    parser.add_argument('--exclude', dest='exclude',
        help="File extensions to exclude, separated by commas (ej: avi,AVI,mov,MOV,xcf,XCF)")
    parser.add_argument('--one-file', dest='one_file',
        help="To upload only one file, if needed")
    parser.add_argument('--dry-run', dest='dry_run', action='store_true',
        help="Simulate process and report, without uploading anything")
    parser.add_argument('directory', nargs='+', metavar='DIRECTORY',
        help="Directory to backup")

    args = parser.parse_args()
    if args.log_level == 'debug':
        logging_.basicConfig(level=logging_.DEBUG)
    elif args.log_level == 'info':
        logging_.basicConfig(level=logging_.INFO)
    else:
        logging_.basicConfig(level=logging_.WARN)

    ctx = Context()

    if args.include and args.exclude:
        parser.error("Can't use --include and --exclude at the same time.")
        return
    elif args.include:
        ctx.set_include_extensions(args.include.split(','))
    elif args.exclude:
        ctx.set_exclude_extensions(args.exclude.split(','))

    if args.dry_run:
        ctx.dry_run = True

    if 'FROKUP_FTP_MODE' in os.environ:
        main = Main(ctx=ctx, glacier=GlacierFtpBased)
        try:
            main.glacier.launch()
            main.glacier.wait_for_ftpserver()
            for a_directory in args.directory:
                main.process_directory(a_directory)
            main.close()
        finally:
            main.glacier.kill_ftp()
    else:
        main = Main(ctx=ctx)
        if args.one_file:
            main.process_file(args.directory[0], args.one_file)
        else:
            for a_directory in args.directory:
                main.process_directory(a_directory)
        main.close()

    included, excluded = ctx.get_log_processed()
    print "{0} included file(s):".format(len(included))
    for dirname, filename in included:
        print " + {0}/{1}".format(dirname, filename)

    print "{0} excluded file(s):".format(len(excluded))
    for dirname, filename in excluded:
        print " - {0}/{1}".format(dirname, filename)
Beispiel #5
0
def action_upload_directory(_child_conn, directory):
    """
    Uploads a directory.
    """
    logger = logging.getLogger('action_upload_directory[{}]'.format(os.getpid()))
    try:
        logger.info("action_upload_directory(directory=%s)", directory)
        _child_conn.send(PROCESS_STARTED)

        ctx = Context()
        ctx.set_include_extensions(('jpg',))
        file_filter = FileFilter(ctx)
        local_metadata = LocalMetadata(ctx)
        glacier = Glacier(ctx)

        file_list_to_proc = []
        bytes_to_backup = 0
        for a_file in os.listdir(directory):
            if not os.path.isfile(os.path.join(directory, a_file)):
                continue
            should_proc, file_stats = _should_process_file(
                directory, a_file, file_filter, local_metadata, ctx)
            if should_proc:
                logger.info("INCLUDING %s/%s", directory, a_file)
                file_list_to_proc.append((a_file, file_stats))
                bytes_to_backup += file_stats.stats.st_size
            else:
                logger.info("EXCLUDING %s/%s", directory, a_file)

        bytes_uploaded = 0
        start_time = time.time()
        msg_template = "Uploading file {}/{} - ({} of {}) - ({} kb uploaded / {} kb pending)" + \
            " - Speed: {} kb/sec"
        try:
            num = 0
            for a_file, file_stats in file_list_to_proc:
                num += 1
                if _child_conn.poll():
                    received = _child_conn.recv()
                    if received == 'STOP':
                        _child_conn.send(PROCESS_CANCELLED)
                        return
                    else:
                        logger.warn("Ignoring received text '{}'".format(received))

                bytes_pending_upload = bytes_to_backup - bytes_uploaded
                bytes_per_sec = int(bytes_uploaded / (time.time() - start_time))
                _child_conn.send(msg_template.format(directory, a_file,
                    num, len(file_list_to_proc), (bytes_uploaded / 1024),
                    (bytes_pending_upload / 1024), (bytes_per_sec / 1024)))
                logger.info("Starting upload of %s/%s", directory, a_file)
                if DRY_RUN:
                    time.sleep(2)
                else:
                    glacier_data = glacier.upload_file(directory, a_file)
                logger.info("Finished upload of %s/%s", directory, a_file)
                bytes_uploaded += file_stats.stats.st_size

                if DRY_RUN:
                    pass
                else:
                    local_metadata.update_metadata(directory, a_file, file_stats, glacier_data)
            _child_conn.send(PROCESS_FINISH_OK)
        except:
            _child_conn.send(PROCESS_FINISH_WITH_ERROR)
    except:
        logger.exception("Exception detected")
        raise