def test_get_backup_resource(): ProjectManager.set_path(None) with pytest.raises(NotInitilizedError): ResourceManager.backup_resources() ProjectManager.set_path('tests/test-project') assert len(ResourceManager.backup_resources()) == 1 assert isinstance(ResourceManager.backup_resources()[0], LocalDisk)
def test_do_backup(data): ProjectManager.set_path('tests/test-project') for message in data: ResourceManager._do_backup(message) local_backup_path = ResourceManager.backup_resources()[0].path assert len(os.listdir(local_backup_path)) == 8 assert '2018-08-21 06:21:35.json' in os.listdir(local_backup_path) for f in os.listdir(local_backup_path): os.remove(os.path.join(local_backup_path, f))
def test_do_backup(data): ProjectManager.set_path('tests/test-project') # print(ResourceManager.get_latest_backed_up_time()) ResourceManager._do_backup(data[0:5]) dm = DataManager dm.do_backup(chunk=5, stream_from=data) bdup_files = [ i for i in os.listdir('tests/test-project/kafka/backups') if i.endswith('json') ] assert len(bdup_files) == 5
def test_get_latest_backup(data): ProjectManager.set_path('tests/test-project') for message in data: ResourceManager._do_backup(message) assert isinstance(ResourceManager.get_latest_backups(), dict) assert 'LocalDisk' in ResourceManager.get_latest_backups().keys() local_backup_path = ResourceManager.backup_resources()[0].path for f in os.listdir(local_backup_path): os.remove(os.path.join(local_backup_path, f))
class DataManager(BaseConfigManager): DATE_FORMAT = '%Y-%m-%d %H:%M:%S' RESOURCE_MANAGER = ResourceManager() STREAMS = StreamFactory() @classmethod def get_first_time(cls): pass @classmethod def do_backup(cls, chunk: int = 100, stream_from: Union[Iterable, AnyStr] = 'kafka'): last_seen = cls.RESOURCE_MANAGER.get_latest_backed_up_time() stream_ = cls.STREAMS.stream_from_datetime( last_seen, stream_from).buffer_with_count(chunk) stream_.subscribe(lambda x: cls.RESOURCE_MANAGER._do_backup(x)) @classmethod def stream(cls, stream_from: Union[Iterable, AnyStr] = 'kafka'): return cls.STREAMS.stream_from_start(stream_from) @classmethod def historic_blocking(cls, stream_from: Union[Iterable, AnyStr] = 'kafka'): return cls.STREAMS.stream_from_start(stream_from).to_blocking()
'side': 'buy' }, { 'price': 6379.99, 'ts': '2018-08-21 06:21:57', 'volume_24h': 8347.08598465, 'side': 'sell' }, { 'price': 6380.0, 'ts': '2018-08-21 06:22:00', 'volume_24h': 8347.21550695, 'side': 'buy' }, { 'price': 6380.28, 'ts': '2018-08-21 06:22:00', 'volume_24h': 8347.21668217, 'side': 'buy' }, { 'price': 6380.46, 'ts': '2018-08-21 06:22:04', 'volume_24h': 8347.06809264, 'side': 'buy' }] from kryptoflow.managers.project import ProjectManager ProjectManager.set_path('tests/test-project') # print(ResourceManager.get_latest_backed_up_time()) ResourceManager._do_backup(test_data[0:5]) dm = DataManager dm.do_backup(chunk=2, stream_from=test_data) print(os.listdir('tests/test-project/kafka/backups'))
def test_resource_factory(): with pytest.raises(ResourcedError): ResourceManager.get_resource('wrong', 'path')