def test_metadata_flush_reload(self, get_tmpdir): tmpdir = get_tmpdir r = LocalFeedDataRepo(metadata=LocalFeedDataRepoMetadata( data_write_dir=tmpdir)) r.initialize() ts = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) r.metadata.download_result = DownloadOperationResult( started=ts, status=FeedDownloader.State.in_progress.value, results=[]) r.flush_metadata() r.metadata = None r.reload_metadata() assert r.metadata.download_result.started == ts assert (r.metadata.download_result.status == FeedDownloader.State.in_progress.value)
def test_LocalFeedDataRepo(): tmpdir = tempfile.mkdtemp(prefix="anchoretest_repo-") r = LocalFeedDataRepo(metadata=LocalFeedDataRepoMetadata(data_write_dir=tmpdir)) try: assert os.listdir(tmpdir) == [] r.initialize() assert os.listdir(tmpdir) == ["metadata.json"] ts = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) r.metadata.download_result = DownloadOperationResult( started=ts, status=FeedDownloader.State.in_progress.value, results=[] ) r.flush_metadata() r.metadata = None r.reload_metadata() assert r.metadata.download_result.started == ts assert ( r.metadata.download_result.status == FeedDownloader.State.in_progress.value ) r.write_data( "feed1", "group1", chunk_id=0, data=b'{"next_token": "something", "data": [{"somekey": "somevalue"}]}', ) with timer("Read single record group", log_level="info"): found_count = 0 for i in r.read("feed1", "group1", start_index=0): logger.info("Got record {}".format(i)) found_count += 1 logger.info("Repo metadata: {}".format(r.metadata)) assert found_count > 0 finally: logger.info("Done with repo test") r.teardown()