def test_collection_ingest(): store = S3Store(bucket_name='test_bucket') coll = Collection('test', store) coll.ingest(DATA_FILE) pkgs = list(coll) assert len(pkgs) == 1, pkgs pkg0 = pkgs[0] assert pkg0.id == checksum(DATA_FILE), pkg0.id print pkg0 sources = list(pkg0.all(Source)) assert len(sources) == 1, sources assert sources[0].name == 'test.csv', sources[0].name
def test_collection_ingest(): path = mkdtemp() store = FileStore(path=path) coll = Collection("test", store) coll.ingest(DATA_FILE) pkgs = list(coll) assert len(pkgs) == 1, pkgs pkg0 = pkgs[0] assert pkg0.id == checksum(DATA_FILE), pkg0.id sources = list(pkg0.all(Source)) assert len(sources) == 1, sources assert sources[0].name == "test.csv", sources[0].name rmtree(path)
def test_collection_ingest(): path = mkdtemp() store = FileStore(path=path) coll = Collection('test', store) coll.ingest(DATA_FILE) pkgs = list(coll) assert len(pkgs) == 1, pkgs pkg0 = pkgs[0] assert pkg0.id == checksum(DATA_FILE), pkg0.id sources = list(pkg0.all(Source)) assert len(sources) == 1, sources assert sources[0].name == 'test.csv', sources[0].name rmtree(path)
def hash(self): """ Generate an SHA1 hash of the given ingested object. """ if self._hash is None: self._hash = checksum(self.local()) return self._hash