def test_saves_to_file(self): state = SyncState(self.state_path) state.push_path("a") state.push_path("b") state.sync_path("c") state.sync_path("d") self.assertStateFile("""\ 2016-01-01T12:00:00 +push a 2016-01-01T12:00:00 +push b 2016-01-01T12:00:00 +sync c 2016-01-01T12:00:00 +sync d """) state.push_done("a") state.sync_done("d") self.assertStateFile("""\ 2016-01-01T12:00:00 +push a 2016-01-01T12:00:00 +push b 2016-01-01T12:00:00 +sync c 2016-01-01T12:00:00 +sync d 2016-01-01T12:00:00 -push a 2016-01-01T12:00:00 -sync d """)
def test_raises_error_if_syncing_nonexistent_path(self): state = SyncState(self.state_path) s3 = S3Null({ "S3_URI_PREFIX": "s3://foo", "ROOT_PATH": root_dir, }, state) with self.assertRaisesRegex( ValueError, r"^trying to sync a non-existent path: nonexistent"): state.sync_path("nonexistent")
def test_raises_error_if_syncing_nonexistent_path(self): state = SyncState(self.state_path) s3 = S3Null({ "S3_URI_PREFIX": "s3://foo", "ROOT_PATH": root_dir, }, state) with self.assertRaisesRegexp( ValueError, ur"^trying to sync a non-existent path: nonexistent"): state.sync_path("nonexistent")
def test_raises_error_if_syncing_a_non_directory(self): state = SyncState(self.state_path) s3 = S3Null({ "S3_URI_PREFIX": "s3://foo", "ROOT_PATH": root_dir, }, state) with open(os.path.join(root_dir, "foo"), 'w') as foo: pass with self.assertRaisesRegexp( ValueError, ur"^trying to sync a path which is not a directory: foo"): state.sync_path("foo")
def test_raises_error_if_syncing_a_non_directory(self): state = SyncState(self.state_path) s3 = S3Null({ "S3_URI_PREFIX": "s3://foo", "ROOT_PATH": root_dir, }, state) with open(os.path.join(root_dir, "foo"), 'w') as foo: pass with self.assertRaisesRegex( ValueError, r"^trying to sync a path which is not a directory: foo"): state.sync_path("foo")
def test_raises_if_s3_uri_prefix_missing(self): state = SyncState(self.state_path) with self.assertRaisesRegex( ImproperlyConfigured, r"^you must specify S3_URI_PREFIX in the general " r"configuration$"): S3Null({}, state)
def test_records_state_in_memory(self): state = SyncState(self.state_path) state.push_path("a") state.push_path("b") state.sync_path("c") state.sync_path("d") self.assertEqual(state.current_state, { "push": ["a", "b"], "sync": ["c", "d"], }) state.push_done("a") state.sync_done("d") self.assertEqual(state.current_state, { "push": ["b"], "sync": ["c"], })
def test_raises_if_roo_path_missing(self): state = SyncState(self.state_path) with self.assertRaisesRegex( ImproperlyConfigured, r"^you must specify ROOT_PATH in the general " r"configuration$"): S3Null({ "S3_URI_PREFIX": "foo" }, state)
def test_is_mutually_exclusive(self): state = SyncState(self.state_path) # We have to do this in a different process because our own process # can reopen the same file multiple times. def target(): with self.assertRaises(IOError): SyncState(self.state_path) p = multiprocessing.Process(target=target) p.start() p.join()
def test_records_state_in_memory(self): state = SyncState(self.state_path) state.push_path("a") state.push_path("b") state.sync_path("c") state.sync_path("d") self.assertEqual(state.current_state, { "push": set(("a", "b")), "sync": set(("c", "d")) }) state.push_done("a") state.sync_done("d") self.assertEqual(state.current_state, { "push": set(("b",)), "sync": set(("c",)) })
def test_reads_from_file(self): self.storeToState("""\ 2016-01-01T12:00:00 +push a 2016-01-01T12:00:00 +push b 2016-01-01T12:00:00 +sync c 2016-01-01T12:00:00 +sync d """) state = SyncState(self.state_path).current_state self.assertEqual(state, { "push": ["a", "b"], "sync": ["c", "d"], }) self.storeToState("""\ 2016-01-01T12:00:00 +push a 2016-01-01T12:00:00 +push b 2016-01-01T12:00:00 +sync c 2016-01-01T12:00:00 +sync d 2016-01-01T12:00:00 -push a 2016-01-01T12:00:00 -sync d """) state = SyncState(self.state_path).current_state self.assertEqual(state, { "push": ["b"], "sync": ["c"], }) # This simulates a push that happened on the path "", which is # a valid path. It is equivalent to ROOT_PATH. self.storeToState("""\ 2016-01-01T12:00:00 +sync \n\ """) state = SyncState(self.state_path).current_state self.assertEqual(state, { "push": [], "sync": [""], })
def test_survives_fatal_errors(self): state = SyncState(self.state_path) s3 = S3Null({ "S3_URI_PREFIX": "s3://foo", "ROOT_PATH": root_dir, }, state) os.mkdir(os.path.join(root_dir, "server")) state.sync_path("server") state.sync_path("") # We can abuse push_path because pushed paths are not checked. state.push_path("a") state.push_path("b") stderr = StringIO() s3._cached_stderr = stderr s3._fail_on = set(("a", "")) s3.run() # Some went through. self.assertEqual(s3._pushed, set(("b", ))) self.assertEqual(s3._synced, set(("server",))) # Some failed. self.assertRegex( stderr.getvalue(), re.compile(r"^Error while processing: a$", re.MULTILINE)) self.assertRegex( stderr.getvalue(), re.compile(r"^Error while processing: $", re.MULTILINE)) # Those that failed still need to be done. self.assertEqual(state.current_state, { "push": ["a"], "sync": [""], }) # The state on disk does not show the failures as done. # The funky "\n\" in what follows is to prevent git from swallowing # the space at the end of the line. self.assertStateFile("""\ 2016-01-01T12:00:00 +sync server 2016-01-01T12:00:00 +sync \n\ 2016-01-01T12:00:00 +push a 2016-01-01T12:00:00 +push b 2016-01-01T12:00:00 -push b 2016-01-01T12:00:00 -sync server """)
def test_does_not_modify_a_file_if_nothing_changes(self): self.storeToState("""\ 2016-01-01T12:00:00 +push a 2016-01-01T12:00:00 +push b 2016-01-01T12:00:00 +sync c 2016-01-01T12:00:00 +sync d """) state = SyncState(self.state_path) state.current_state self.assertStateFile("""\ 2016-01-01T12:00:00 +push a 2016-01-01T12:00:00 +push b 2016-01-01T12:00:00 +sync c 2016-01-01T12:00:00 +sync d """, raw=True)
def test_survives_fatal_errors(self): state = SyncState(self.state_path) s3 = S3Null({ "S3_URI_PREFIX": "s3://foo", "ROOT_PATH": root_dir, }, state) os.mkdir(os.path.join(root_dir, "server")) state.sync_path("server") state.sync_path("") # We can abuse push_path because pushed paths are not checked. state.push_path("a") state.push_path("b") stderr = StringIO() s3._cached_stderr = stderr s3._fail_on = set(("a", "")) s3.run() # Some went through. self.assertEqual(s3._pushed, set(("b", ))) self.assertEqual(s3._synced, set(("server", ))) # Some failed. self.assertRegexpMatches( stderr.getvalue(), re.compile(ur"^Error while processing: a$", re.MULTILINE)) self.assertRegexpMatches( stderr.getvalue(), re.compile(ur"^Error while processing: $", re.MULTILINE)) # Those that failed still need to be done. self.assertEqual(state.current_state, { "push": set(("a", )), "sync": set(("", )) }) # The state on disk does not show the failures as done. # The funky "\n\" in what follows is to prevent git from swallowing # the space at the end of the line. self.assertStateFile("""\ 2016-01-01T12:00:00 +sync server 2016-01-01T12:00:00 +sync \n\ 2016-01-01T12:00:00 +push a 2016-01-01T12:00:00 +push b 2016-01-01T12:00:00 -push b 2016-01-01T12:00:00 -sync server """)
def test_pushes_and_syncs(self): state = SyncState(self.state_path) s3 = S3Null({ "S3_URI_PREFIX": "s3://foo", "ROOT_PATH": root_dir, }, state) os.mkdir(os.path.join(root_dir, "server")) state.sync_path("server") state.sync_path("") # We can abuse push_path because pushed paths are not checked. state.push_path("a") state.push_path("b") s3.run() self.assertEqual(s3._pushed, set(("a", "b"))) self.assertEqual(s3._synced, set(("server", "")))
def test_emits_on_sync_path(self): state = SyncState(self.state_path) paths = [] state.ee.on('sync', paths.append) state.sync_path("a") self.assertEqual(paths, ["a"])
def target(): with self.assertRaises(IOError): SyncState(self.state_path)
def test_emits_on_sync_path(self): state = SyncState(self.state_path) paths = [] state.ee.on('sync', lambda x: paths.append(x)) state.sync_path("a") self.assertEqual(paths, ["a"])