def test_a_present_file_whose_content_has_changed_will_be_synced(): syncd = sd.SyncDecider() from_repo = r.TestRepo("from", [r.Entry("a", "2")]) to_repo = r.TestRepo("to", [r.Entry("a", "1")]) ops = list(syncd.sync(from_repo, to_repo)) assert ops == [o.Copy("a", from_repo, to_repo)]
def test_an_excludes_can_be_globs(): syncd = sd.SyncDecider(excludes=["b*", "c*"]) from_repo = r.TestRepo("from", ["a"]) to_repo = r.TestRepo("to", ["a", "be"]) ops = list(syncd.sync(from_repo, to_repo)) assert ops == [o.Nop("a", from_repo, to_repo)]
def test_a_new_will_be_synced(): syncd = sd.SyncDecider() from_repo = r.TestRepo("from", ["a"]) to_repo = r.TestRepo("to") ops = list(syncd.sync(from_repo, to_repo)) assert ops == [o.Copy("a", from_repo, to_repo)]
def test_an_excluded_file_is_not_deleted(): syncd = sd.SyncDecider(excludes=["b"]) from_repo = r.TestRepo("from", ["a"]) to_repo = r.TestRepo("to", ["a", "b"]) ops = list(syncd.sync(from_repo, to_repo)) assert ops == [o.Nop("a", from_repo, to_repo)]
def test_a_missing_file_will_be_deleted(): syncd = sd.SyncDecider() from_repo = r.TestRepo("from", ["a"]) to_repo = r.TestRepo("to", ["a", "b"]) ops = list(syncd.sync(from_repo, to_repo)) assert ops == [o.Nop("a", from_repo, to_repo), o.Delete("b", from_repo, to_repo)]
def test_an_identical_file_will_not_be_synced(): syncd = sd.SyncDecider() from_repo = r.TestRepo("from", ["a"]) to_repo = r.TestRepo("to", ["a"]) ops = list(syncd.sync(from_repo, to_repo)) assert ops == [o.Nop("a", from_repo, to_repo)]
def run(args): s3uri = args.s3uri localpath = args.localpath excludes = args.exclude interval = args.interval i = pc.Info('s3insync_version', 'Version and config information for the client') i.info({ 'version': s3insync.__version__, 'aws_repo': s3uri, 'localpath': localpath, }) start_time = pc.Gauge('s3insync_start_time', 'Time the sync process was started') start_time.set_to_current_time() last_sync = pc.Gauge('s3insync_last_sync_time', 'Time the last sync completed') op_count = pc.Counter('s3insync_operations', 'Count of operations', labelnames=('type', )) failed_op_count = pc.Counter('s3insync_failed_operations', 'Count of failed operations', labelnames=('type', )) files_in_s3 = pc.Gauge( 's3insync_files_in_s3', 'Number of files in S3', ) pc.start_http_server(8087) src = r.S3Repo('s3', s3uri) dest = r.LocalFSRepo('fs', localpath, os.path.join(os.getenv('HOME'), ".s3insync")) dest.ensure_directories() sync = sd.SyncDecider(excludes) set_exit = setup_signals() while not set_exit.is_set(): logger.debug("Starting sync") start = time.monotonic() try: success, failures = sync.execute_sync(src, dest) files_in_s3.set(success.pop('total', 0)) set_op_counts(success, op_count) set_op_counts(failures, failed_op_count) last_sync.set_to_current_time() except Exception: logger.exception("Failed to excute sync") duration = time.monotonic() - start logger.debug("Stopping sync after %g secs", duration) set_exit.wait(max(30, interval - duration))