def test_previously_deleted_now_created(self, local_client, s3_client): utils.set_s3_index(s3_client, { 'foo': { 'local_timestamp': None, 'remote_timestamp': 4000, } }) utils.set_local_index(local_client, { 'foo': { 'local_timstamp': None, 'remote_timestamp': 4000, } }) utils.set_local_contents(local_client, 'foo', timestamp=7000) # Will create previously deleted file worker = sync.SyncWorker(local_client, s3_client) worker.sync() clients = [local_client, s3_client] assert_local_keys(clients, ['foo']) assert_remote_timestamp(clients, 'foo', 7000) # delete the file again and check that it is successful utils.delete_local(local_client, 'foo') worker.sync() assert_local_keys(clients, []) assert_remote_timestamp(clients, 'foo', 7000)
def test_correct_output_nonempty(self, s3_client, local_client, capsys): config = { "targets": { "foo": { "local_folder": local_client.get_uri(), "s3_uri": s3_client.get_uri(), "aws_access_key_id": "", "aws_secret_access_key": "", "region_name": "eu-west-2", } } } set_s3_index( s3_client, { "milk": { "local_timestamp": get_timestamp(1989, 10, 23, 11, 30) }, "honey": { "local_timestamp": get_timestamp(2016, 12, 12, 8, 30) }, "ginger": { "local_timestamp": None }, }, ) set_local_index( local_client, { "milk": { "local_timestamp": get_timestamp(2016, 12, 12, 8, 30) }, "honey": { "local_timestamp": get_timestamp(2016, 11, 10, 18, 40) }, "lemon": { "local_timestamp": get_timestamp(2017, 2, 2, 8, 30) }, "ginger": { "local_timestamp": None }, }, ) args = argparse.Namespace(target="foo", sort_by="key", show_all=False, descending=False) command = LsCommand(args, config, create_logger()) command.run() out, err = capsys.readouterr() assert err == "" assert out == ("key local s3\n" "----- ------------------- -------------------\n" "honey 2016-11-10 18:40:00 2016-12-12 08:30:00\n" "lemon 2017-02-02 08:30:00\n" "milk 2016-12-12 08:30:00 1989-10-23 11:30:00\n" "Total Size: 0.00Mb\n")
def test_nochanges_but_different_remote_timestamps(self, local_client, s3_client): utils.set_local_index(local_client, { 'german.txt': { 'local_timestamp': 4000, 'remote_timestamp': 4000, } }) utils.set_s3_index(s3_client, { 'german.txt': { 'local_timestamp': 6000, 'remote_timestamp': 6000, } }) utils.set_local_contents(local_client, 'german.txt', timestamp=4000) utils.set_s3_contents(s3_client, 'german.txt', timestamp=6000) worker = sync.SyncWorker(local_client, s3_client) resolutions, unhandled_events = worker.get_sync_states() expected_resolutions = { 'german.txt': Resolution("UPDATE", local_client, s3_client, 'german.txt', 6000) } assert resolutions == expected_resolutions assert unhandled_events == {}
def test_set_index_timestamps(self, s3_client): # given utils.set_s3_index(s3_client, { 'red': { 'remote_timestamp': 1234, 'local_timestamp': 1200, }, }) # when s3_client.set_index_local_timestamp('red', 3000) s3_client.set_remote_timestamp('red', 4000) s3_client.set_index_local_timestamp('green', 5000) s3_client.set_remote_timestamp('yellow', 6000) # then expected_index = { 'red': { 'local_timestamp': 3000, 'remote_timestamp': 4000, }, 'green': { 'local_timestamp': 5000, }, 'yellow': { 'remote_timestamp': 6000, }, } assert s3_client.index == expected_index
def test_set_index_timestamps(self, s3_client): # given utils.set_s3_index( s3_client, {"red": { "remote_timestamp": 1234, "local_timestamp": 1200 }}) # when s3_client.set_index_local_timestamp("red", 3000) s3_client.set_remote_timestamp("red", 4000) s3_client.set_index_local_timestamp("green", 5000) s3_client.set_remote_timestamp("yellow", 6000) # then expected_index = { "red": { "local_timestamp": 3000, "remote_timestamp": 4000 }, "green": { "local_timestamp": 5000 }, "yellow": { "remote_timestamp": 6000 }, } assert s3_client.index == expected_index
def test_updated_but_different_remote_timestamp(self, local_client, s3_client): utils.set_local_index(local_client, { 'biology.txt': { 'local_timestamp': 4000, 'remote_timestamp': 3000, } }) utils.set_s3_index(s3_client, { 'biology.txt': { 'local_timestamp': 6000, 'remote_timestamp': 6000, } }) utils.set_local_contents(local_client, 'biology.txt', timestamp=4500) utils.set_s3_contents(s3_client, 'biology.txt', timestamp=6000) worker = sync.SyncWorker(local_client, s3_client) resolutions, unhandled_events = worker.get_sync_states() expected_unhandled_events = { 'biology.txt': ( SyncState(SyncState.UPDATED, 4500, 3000), SyncState(SyncState.NOCHANGES, 6000, 6000) ) } assert resolutions == {} assert unhandled_events == expected_unhandled_events
def test_deleted_but_different_remote_timestamp(self, local_client, s3_client): utils.set_local_index( local_client, { "chemistry.txt": { "local_timestamp": 4000, "remote_timestamp": 3000 } }, ) utils.set_s3_index( s3_client, { "chemistry.txt": { "local_timestamp": 6000, "remote_timestamp": 6000 } }, ) utils.set_s3_contents(s3_client, "chemistry.txt", timestamp=6000) worker = sync.SyncWorker(local_client, s3_client) resolutions, unhandled_events = worker.get_sync_states() expected_unhandled_events = { "chemistry.txt": ( SyncState(SyncState.DELETED, None, 3000), SyncState(SyncState.NOCHANGES, 6000, 6000), ) } assert resolutions == {} assert unhandled_events == expected_unhandled_events
def test_correct_output_nonempty(self, s3_client, local_client, capsys): config = { 'targets': { 'foo': { 'local_folder': local_client.get_uri(), 's3_uri': s3_client.get_uri(), 'aws_access_key_id': '', 'aws_secret_access_key': '', 'region_name': 'eu-west-2', } } } set_s3_index( s3_client, { 'milk': { 'local_timestamp': get_timestamp(1989, 10, 23, 11, 30) }, 'honey': { 'local_timestamp': get_timestamp(2016, 12, 12, 8, 30) }, 'ginger': { 'local_timestamp': None, } }) set_local_index( local_client, { 'milk': { 'local_timestamp': get_timestamp(2016, 12, 12, 8, 30) }, 'honey': { 'local_timestamp': get_timestamp(2016, 11, 10, 18, 40) }, 'lemon': { 'local_timestamp': get_timestamp(2017, 2, 2, 8, 30) }, 'ginger': { 'local_timestamp': None, } }) args = argparse.Namespace( target='foo', sort_by='key', show_all=False, descending=False, ) command = LsCommand(args, config, create_logger()) command.run() out, err = capsys.readouterr() assert err == "" assert out == ('key local s3\n' '----- ------------------- -------------------\n' 'honey 2016-11-10 18:40:00 2016-12-12 08:30:00\n' 'lemon 2017-02-02 08:30:00\n' 'milk 2016-12-12 08:30:00 1989-10-23 11:30:00\n')
def test_get_index_keys(self, s3_client): utils.set_s3_index(s3_client, { 'cow': { 'local_timestamp': 4000, 'remote_timestamp': 3000, }, 'chicken': { 'local_timestamp': 4000, 'remote_timestamp': 3000, } }) assert sorted(s3_client.get_index_keys()) == sorted(['cow', 'chicken'])
def test_show_all(self, s3_client, local_client, capsys): config = { "targets": { "foo": { "local_folder": local_client.get_uri(), "s3_uri": s3_client.get_uri(), "aws_access_key_id": "", "aws_secret_access_key": "", "region_name": "eu-west-2", } } } set_s3_index( s3_client, { "cheese": { "local_timestamp": get_timestamp(2017, 12, 12, 8, 30) }, "crackers": { "local_timestamp": None }, }, ) set_local_index( local_client, { "cheese": { "local_timestamp": get_timestamp(2017, 2, 2, 8, 30) }, "crackers": { "local_timestamp": None }, }, ) args = argparse.Namespace(target="foo", sort_by="key", show_all=True, descending=False) command = LsCommand(args, config, create_logger()) command.run() out, err = capsys.readouterr() assert err == "" assert out == ("key local s3\n" "-------- ------------------- -------------------\n" "cheese 2017-02-02 08:30:00 2017-12-12 08:30:00\n" "crackers <deleted>\n" "Total Size: 0.00Mb\n")
def test_get_index_keys(self, s3_client): utils.set_s3_index( s3_client, { "cow": { "local_timestamp": 4000, "remote_timestamp": 3000 }, "chicken": { "local_timestamp": 4000, "remote_timestamp": 3000 }, }, ) assert sorted(s3_client.get_index_keys()) == sorted(["cow", "chicken"])
def test_show_all(self, s3_client, local_client, capsys): config = { 'targets': { 'foo': { 'local_folder': local_client.get_uri(), 's3_uri': s3_client.get_uri(), 'aws_access_key_id': '', 'aws_secret_access_key': '', 'region_name': 'eu-west-2', } } } set_s3_index( s3_client, { 'cheese': { 'local_timestamp': get_timestamp(2017, 12, 12, 8, 30) }, 'crackers': { 'local_timestamp': None, } }) set_local_index( local_client, { 'cheese': { 'local_timestamp': get_timestamp(2017, 2, 2, 8, 30) }, 'crackers': { 'local_timestamp': None, } }) args = argparse.Namespace( target='foo', sort_by='key', show_all=True, descending=False, ) command = LsCommand(args, config, create_logger()) command.run() out, err = capsys.readouterr() assert err == "" assert out == ('key local s3\n' '-------- ------------------- -------------------\n' 'cheese 2017-02-02 08:30:00 2017-12-12 08:30:00\n' 'crackers <deleted>\n')
def test_get_all_index_timestamps(self, s3_client): # given utils.set_s3_index(s3_client, { 'hello': { 'local_timestamp': 1200, }, 'world': { 'local_timestamp': 4000, } }) # then expected_output = { 'hello': 1200, 'world': 4000, } actual_output = s3_client.get_all_index_local_timestamps() assert actual_output == expected_output
def test_get_all_index_timestamps(self, s3_client): # given utils.set_s3_index( s3_client, { "hello": { "local_timestamp": 1200 }, "world": { "local_timestamp": 4000 } }, ) # then expected_output = {"hello": 1200, "world": 4000} actual_output = s3_client.get_all_index_local_timestamps() assert actual_output == expected_output
def test_get_index_timestamps(self, s3_client): # given utils.set_s3_index(s3_client, { 'hello': { 'remote_timestamp': 1234, 'local_timestamp': 1200, }, 'world': { 'remote_timestamp': 5000, } }) # then assert s3_client.get_remote_timestamp('hello') == 1234 assert s3_client.get_index_local_timestamp('hello') == 1200 assert s3_client.get_remote_timestamp('world') == 5000 assert s3_client.get_index_local_timestamp('world') is None
def test_update_index(self, s3_client): # given utils.set_s3_index( s3_client, { "red": { "remote_timestamp": 1234, "local_timestamp": 1200 }, "green": { "remote_timestamp": 5000 }, }, ) utils.set_s3_contents(s3_client, "red", timestamp=5001) utils.set_s3_contents(s3_client, "yellow", timestamp=1000) utils.set_s3_contents(s3_client, "orange", timestamp=2000) # when s3_client.update_index() # then expected_index = { "red": { "remote_timestamp": 1234, "local_timestamp": 5001 }, "green": { "remote_timestamp": 5000, "local_timestamp": None }, "yellow": { "remote_timestamp": None, "local_timestamp": 1000 }, "orange": { "remote_timestamp": None, "local_timestamp": 2000 }, } assert s3_client.index == expected_index
def test_update_index(self, s3_client): # given utils.set_s3_index(s3_client, { 'red': { 'remote_timestamp': 1234, 'local_timestamp': 1200, }, 'green': { 'remote_timestamp': 5000, } }) utils.set_s3_contents(s3_client, 'red', timestamp=5001) utils.set_s3_contents(s3_client, 'yellow', timestamp=1000) utils.set_s3_contents(s3_client, 'orange', timestamp=2000) # when s3_client.update_index() # then expected_index = { 'red': { 'remote_timestamp': 1234, 'local_timestamp': 5001, }, 'green': { 'remote_timestamp': 5000, 'local_timestamp': None, }, 'yellow': { 'remote_timestamp': None, 'local_timestamp': 1000, }, 'orange': { 'remote_timestamp': None, 'local_timestamp': 2000, }, } assert s3_client.index == expected_index
def test_get_index_timestamps(self, s3_client, compression): # given utils.set_s3_index( s3_client, { "hello": { "remote_timestamp": 1234, "local_timestamp": 1200 }, "world": { "remote_timestamp": 5000 }, }, compression=compression, ) # then assert s3_client.get_remote_timestamp("hello") == 1234 assert s3_client.get_index_local_timestamp("hello") == 1200 assert s3_client.get_remote_timestamp("world") == 5000 assert s3_client.get_index_local_timestamp("world") is None
def test_correct_output(self, local_client, s3_client): utils.set_local_index( local_client, { "chemistry.txt": { "local_timestamp": 9431, "remote_timestamp": 9431 }, "physics.txt": { "local_timestamp": 10000, "remote_timestamp": 10000 }, "maltese.txt": { "local_timestamp": 7000, "remote_timestamp": 6000 }, }, ) utils.set_s3_index( s3_client, { "chemistry.txt": { "local_timestamp": 10000, "remote_timestamp": 9431 }, "physics.txt": { "local_timestamp": 13000, "remote_timestamp": 12000 }, "maltese.txt": { "local_timestamp": 6000, "remote_timestamp": 6000 }, }, ) utils.set_local_contents(local_client, "history.txt", timestamp=5000) utils.set_s3_contents(s3_client, "art.txt", timestamp=200000) utils.set_local_contents(local_client, "english.txt", timestamp=90000) utils.set_s3_contents(s3_client, "english.txt", timestamp=93000) utils.set_s3_contents(s3_client, "chemistry.txt", timestamp=10000) utils.set_local_contents(local_client, "physics.txt", timestamp=11000) utils.set_s3_contents(s3_client, "physics.txt", timestamp=13000) utils.set_local_contents(local_client, "maltese.txt", timestamp=7000) utils.set_s3_contents(s3_client, "maltese.txt", timestamp=8000) worker = sync.SyncWorker(local_client, s3_client) resolutions, unhandled_events = worker.get_sync_states() expected_unhandled_events = { "english.txt": ( SyncState(SyncState.CREATED, 90000, None), SyncState(SyncState.CREATED, 93000, None), ), "physics.txt": ( SyncState(SyncState.UPDATED, 11000, 10000), SyncState(SyncState.NOCHANGES, 13000, 12000), ), } expected_resolutions = { "maltese.txt": Resolution(Resolution.UPDATE, local_client, s3_client, "maltese.txt", 8000), "chemistry.txt": Resolution(Resolution.DELETE, s3_client, None, "chemistry.txt", 9431), "history.txt": Resolution(Resolution.CREATE, s3_client, local_client, "history.txt", 5000), "art.txt": Resolution(Resolution.CREATE, local_client, s3_client, "art.txt", 200000), } assert unhandled_events == expected_unhandled_events assert resolutions == expected_resolutions
def test_correct_output(self, local_client, s3_client): utils.set_local_index(local_client, { 'chemistry.txt': { 'local_timestamp': 9431, 'remote_timestamp': 9431, }, 'physics.txt': { 'local_timestamp': 10000, 'remote_timestamp': 10000, }, 'maltese.txt': { 'local_timestamp': 7000, 'remote_timestamp': 6000, }, }) utils.set_s3_index(s3_client, { 'chemistry.txt': { 'local_timestamp': 10000, 'remote_timestamp': 9431, }, 'physics.txt': { 'local_timestamp': 13000, 'remote_timestamp': 12000, }, 'maltese.txt': { 'local_timestamp': 6000, 'remote_timestamp': 6000, }, }) utils.set_local_contents(local_client, 'history.txt', timestamp=5000) utils.set_s3_contents(s3_client, 'art.txt', timestamp=200000) utils.set_local_contents(local_client, 'english.txt', timestamp=90000) utils.set_s3_contents(s3_client, 'english.txt', timestamp=93000) utils.set_s3_contents(s3_client, 'chemistry.txt', timestamp=10000) utils.set_local_contents(local_client, 'physics.txt', timestamp=11000) utils.set_s3_contents(s3_client, 'physics.txt', timestamp=13000) utils.set_local_contents(local_client, 'maltese.txt', timestamp=7000) utils.set_s3_contents(s3_client, 'maltese.txt', timestamp=8000) worker = sync.SyncWorker(local_client, s3_client) resolutions, unhandled_events = worker.get_sync_states() expected_unhandled_events = { 'english.txt': ( SyncState(SyncState.CREATED, 90000, None), SyncState(SyncState.CREATED, 93000, None) ), 'physics.txt': ( SyncState(SyncState.UPDATED, 11000, 10000), SyncState(SyncState.NOCHANGES, 13000, 12000), ) } expected_resolutions = { 'maltese.txt': Resolution( Resolution.UPDATE, local_client, s3_client, 'maltese.txt', 8000 ), 'chemistry.txt': Resolution( Resolution.DELETE, s3_client, None, 'chemistry.txt', 9431 ), 'history.txt': Resolution( Resolution.CREATE, s3_client, local_client, 'history.txt', 5000 ), 'art.txt': Resolution( Resolution.CREATE, local_client, s3_client, 'art.txt', 200000 ), } assert unhandled_events == expected_unhandled_events assert resolutions == expected_resolutions