def test_move_on_disk(test_dataset: DatasetForTests, integration_test_data: Path, other_dataset: DatasetForTests): # type: (Tuple[Collection, DatasetLite, str, Path]) -> None """ Indexed dataset was moved over the top of another indexed dataset """ test_dataset.add_to_index() other_dataset.add_to_index() shutil.move(other_dataset.path, str(uri_to_local_path(test_dataset.uri))) _check_sync( collection=test_dataset.collection, expected_paths=[ test_dataset.uri, other_dataset.path.as_uri(), ], expected_mismatches=[ mm.LocationMissingOnDisk(test_dataset.dataset, test_dataset.uri), mm.LocationNotIndexed(other_dataset.dataset, test_dataset.uri), mm.LocationMissingOnDisk(other_dataset.dataset, other_dataset.path.as_uri()), ], expected_index_result={ test_dataset.dataset: (), other_dataset.dataset: (test_dataset.uri,), test_dataset.parent: (), }, cache_path=integration_test_data, fix_settings=dict(index_missing=True, update_locations=True) )
def test_replace_on_disk(test_dataset: DatasetForTests, integration_test_data: Path, other_dataset: DatasetForTests): # type: (Tuple[Collection, DatasetLite, str, Path]) -> None """ File on disk has a different id to the one in the index (ie. it was quietly reprocessed) """ test_dataset.add_to_index() # move a new one over the top shutil.move(other_dataset.path, str(uri_to_local_path(test_dataset.uri))) _check_sync( collection=test_dataset.collection, expected_paths=[ test_dataset.uri ], expected_mismatches=[ mm.LocationMissingOnDisk(test_dataset.dataset, test_dataset.uri), mm.DatasetNotIndexed(other_dataset.dataset, test_dataset.uri), ], expected_index_result={ test_dataset.dataset: (), other_dataset.dataset: (test_dataset.uri,), test_dataset.parent: (), }, cache_path=integration_test_data, fix_settings=dict(index_missing=True, update_locations=True) )
def test_new_and_old_on_disk(test_dataset: DatasetForTests, integration_test_data: Path, other_dataset: DatasetForTests): # type: (Tuple[Collection, DatasetLite, str, Path]) -> None # ls8_collection, on_disk, on_disk_uri, root = syncable_environment old_indexed = DatasetLite(uuid.UUID('5294efa6-348d-11e7-a079-185e0f80a5c0')) # An indexed file not on disk, and disk file not in index. missing_dataset = other_dataset missing_dataset.add_to_index() # Make it missing shutil.rmtree(str(missing_dataset.copyable_path)) _check_sync( collection=test_dataset.collection, expected_paths=[ missing_dataset.uri, test_dataset.uri ], expected_mismatches=[ mm.LocationMissingOnDisk(old_indexed, missing_dataset.uri), mm.DatasetNotIndexed(test_dataset.dataset, test_dataset.uri) ], expected_index_result={ test_dataset.dataset: (test_dataset.uri,), old_indexed: (), test_dataset.parent: (), }, cache_path=integration_test_data, fix_settings=dict(index_missing=True, update_locations=True) )