def test_find(simple_file_config, tmpdir): backup = simple_file_config rev = Revision(backup, '123-456', backup) rev.timestamp = backy.utils.now() rev.materialize() backup.scan() assert str(tmpdir / '123-456') == backup.find(0).filename
def test_scrub_wrong_type(simple_file_config): b = simple_file_config r = Revision(b) r.materialize() b.scan() with pytest.raises(RuntimeError): r.backend.scrub(b, 'asdf')
def test_revision_create_child(backup): backup.history = [Revision(backup, 'asdf')] r = Revision.create(backup, tags={'test'}) assert r.uuid is not None assert r.tags == {'test'} assert r.parent == 'asdf' assert (backy.utils.now() - r.timestamp).total_seconds() < 10 assert r.backup is backup
def test_find_should_raise_if_not_found(simple_file_config): backup = simple_file_config rev = Revision(backup, '123-456') rev.timestamp = backy.utils.now() rev.materialize() backup.scan() with pytest.raises(KeyError): backup.find('no such revision')
def add_revision(timestamp): revision = Revision(backup, len(backup.history) + 1, timestamp=timestamp) revision.tags = {'daily'} revision.materialize() backup.history.append(revision) backup.history.sort(key=lambda x: x.timestamp) return revision
def test_scrub_light(simple_file_config): b = simple_file_config r = Revision(b) r.materialize() b.scan() f = r.open('w') f.write(b'asdf') f.close() assert r.backend.scrub(b, 'light') == 0 for x, _, _ in r.backend.store.ls(): os.unlink(x) assert r.backend.scrub(b, 'light') == 1
def test_scrub_deep(simple_file_config): b = simple_file_config r = Revision(b) r.materialize() b.scan() f = r.open('w') f.write(b'asdf') f.close() assert r.backend.scrub(b, 'deep') == 0 for x, _, _ in r.backend.store.ls(): os.chmod(x, 0o660) with open(x, 'w') as f: f.write('foobar') assert r.backend.scrub(b, 'deep') == 1
def test_revision_create(backup): backup.history = [] r = Revision.create(backup, set(['1', '2'])) assert r.uuid is not None assert r.tags == set(['1', '2']) assert (backy.utils.now() - r.timestamp).total_seconds() < 10 assert r.backup is backup
def test_delete_revision(backup): r = Revision(backup, '123-456', backy.utils.now()) r.materialize() assert p.exists(backup.path + '/123-456.rev') backup.scan() open(backup.path + '/123-456', 'w') assert p.exists(backup.path + '/123-456.rev') r.remove() # Ensure the revision data file exists - we do not implicitly create # it any longer. assert not p.exists(backup.path + '/123-456') assert not p.exists(backup.path + '/123-456.rev')
def test_check_too_old(daemon, tmpdir, clock, capsys): job = daemon.jobs['test01'] revision = Revision(job.backup, '1') revision.timestamp = backy.utils.now() - datetime.timedelta(hours=48) revision.stats['duration'] = 60.0 revision.materialize() daemon._write_status_file() try: daemon.check() except SystemExit as exit: assert exit.code == 2 out, err = capsys.readouterr() assert out == """\
def test_purge(simple_file_config): b = simple_file_config r = Revision(b) # Write 1 version to the file f = r.open('w') f.write(b'asdf') f.close() r.materialize() b.scan() # Reassign as the scan will create a new reference r = b.history[0] assert len(list(r.backend.store.ls())) == 1 r.backend.purge() assert len(list(r.backend.store.ls())) == 1 r.remove() r.backend.purge() assert len(list(r.backend.store.ls())) == 0
def test_store_revision_data(backup, clock): r = Revision(backup, 'asdf2', backy.utils.now()) r.parent = 'asdf' r.backup = backup r.write_info() with open(r.info_filename, encoding='utf-8') as info: assert yaml.safe_load(info) == { "parent": "asdf", "backend_type": "chunked", "uuid": "asdf2", "stats": {"bytes_written": 0}, "tags": [], "trust": "trusted", "timestamp": datetime.datetime( 2015, 9, 1, 7, 6, 47, tzinfo=datetime.timezone.utc)}
def test_overlay(simple_file_config): r = Revision(simple_file_config) assert isinstance(r.backend, ChunkedFileBackend) # Write 1 version to the file f = r.open('w') f.write(b'asdf') f.close() with r.open('r') as f: assert f.read() == b'asdf' # Open the file in overlay, write to it f = r.open('o') assert f.read() == b'asdf' f.seek(0) f.write(b'bsdf') f.seek(0) assert f.read() == b'bsdf' f.close() # Close the file and open it again results in the original content f = r.open('r') assert f.read() == b'asdf' f.close()
def test_incomplete_revs_dont_count_for_sla(daemon, clock, tmpdir): job = daemon.jobs['test01'] r1 = Revision(job.backup, '1') r1.timestamp = backy.utils.now() - datetime.timedelta(hours=48) r1.stats['duration'] = 60.0 r1.materialize() r2 = Revision(job.backup, '2') r2.timestamp = backy.utils.now() - datetime.timedelta(hours=1) r2.materialize() job.backup.scan() assert False is job.sla
def test_sla_over_time(daemon, clock, tmpdir): job = daemon.jobs['test01'] # No previous backups - we consider this to be OK initially. # I agree that this gives us a blind spot in the beginning. I'll # think of something when this happens. Maybe keeping a log of errors # or so to notice that we tried previously. revision = Revision(job.backup, '1') # We're on a 24h cycle. 6 hours old backup is fine. revision.timestamp = backy.utils.now() - datetime.timedelta(hours=6) revision.stats['duration'] = 60.0 revision.materialize() job.backup.scan() assert len(job.backup.history) == 1 assert job.sla is True # 24 hours is also fine. revision.timestamp = backy.utils.now() - datetime.timedelta(hours=24) revision.write_info() job.backup.scan() assert job.sla is True # 32 hours is also fine. revision.timestamp = backy.utils.now() - datetime.timedelta(hours=32) revision.write_info() job.backup.scan() assert job.sla is True # 24*1.5 hours is the last time that is OK. revision.timestamp = backy.utils.now() - datetime.timedelta(hours=24 * 1.5) revision.write_info() job.backup.scan() assert job.sla is True # 1 second later we consider this not to be good any longer. revision.timestamp = backy.utils.now() - datetime.timedelta( hours=24 * 1.5) - datetime.timedelta(seconds=1) revision.write_info() job.backup.scan() assert job.sla is False
def test_revision_base(backup): revision = Revision(backup, 'uuid') assert revision.uuid == 'uuid' assert revision.backup is backup
def test_load_sample2(backup): r = Revision.load(SAMPLE_DIR + '/sample2.rev', backup) assert r.uuid == 'asdf2' assert r.timestamp == datetime.datetime(2015, 8, 1, 21, 0, tzinfo=pytz.UTC) assert r.parent == 'asdf' assert r.backup is backup
def test_filenames_based_on_uuid_and_backup_dir(): backup = mock.Mock() backup.path = '/srv/backup/foo' r = Revision(backup, 'asdf') assert r.filename == '/srv/backup/foo/asdf' assert r.info_filename == '/srv/backup/foo/asdf.rev'