def test_read_database_cache(temp_user_cache, monkeypatch): read = set() monkeypatch.setattr(events_mod.pickle, 'load', lambda f: read.add(f.name.split('/')[-1][:-4])) in_dir = 'tests/data/test' events = Events.read(in_dir) events = Events.read(in_dir) assert read == set(events.tasks())
def test_read_database_cache_broken(temp_user_cache, tmpdir): in_dir = 'tests/data/test' events = Events.read(in_dir) cache_files = glob(tmpdir.join('cache', '**', '*.pkl').strpath, recursive=True) with open(cache_files[0], 'w') as f: f.write('Broken data') events2 = Events.read(in_dir) assert events == events2
def test_read_database_cache_broken(temp_user_cache, tmpdir): in_dir = 'tests/data/test' events = Events.read(in_dir) cache_files = glob( tmpdir.join('cache', '**', '*.pkl').strpath, recursive=True ) with open(cache_files[0], 'w') as f: f.write('Broken data') events2 = Events.read(in_dir) assert events == events2
def test_read_database_cache(temp_user_cache, monkeypatch): read = set() monkeypatch.setattr( events_mod.pickle, 'load', lambda f: read.add(f.name.split('/')[-1][:-4]) ) in_dir = 'tests/data/test' events = Events.read(in_dir) events = Events.read(in_dir) assert read == set(events.tasks())
def test_check_events(n, task, start, delta): # FIXME: Clean-ish way to perform check, with the caveat that it parses the # database on each entry. Need a better solution. events = Events.read('tests/data/test') expect(events[n].task) == task expect(events[n].start) == start expect(events[n].delta) == delta
def test_fail_start_when_task_typo(): events = Events.read('tests/data/test_not_running', write_cache=False) with raises( TaskNotExistError, match='Task non_existent does not exist! Use “--new” to ' 'create it'): events.start('non_existent')
def test_check_events(n: int, task: str, start: datetime, delta: timedelta): # FIXME: Clean-ish way to perform check, with the caveat that it parses the # database on each entry. Need a better solution. events = Events.read('tests/data/test', write_cache=False) assert events[n].task == task assert events[n].start == start assert events[n].delta == delta
def test_check_events(n, task, start, delta): # FIXME: Clean-ish way to perform check, with the caveat that it parses the # database on each entry. Need a better solution. events = Events.read('tests/data/test', write_cache=False) expect(events[n].task) == task expect(events[n].start) == start expect(events[n].delta) == delta
def test_fail_start_when_task_typo(): events = Events.read('tests/data/test_not_running') with expect.raises( TaskNotExistError, "Task non_existant does not exist! Use `--new' to " "create it"): events.start(task='non_existant')
def test_write_database_cache(temp_user_cache, tmpdir): events = Events.read('tests/data/test') events._dirty = events.tasks() events.write(tmpdir.join('database').strpath) cache_files = glob(tmpdir.join('cache', '**', '*.pkl').strpath, recursive=True) assert {f.split('/')[-1][:-4] for f in cache_files} == set(events.tasks())
def test_write_database_cache(temp_user_cache, tmpdir): events = Events.read('tests/data/test') events._dirty = events.tasks() events.write(tmpdir.join('database').strpath) cache_files = glob( tmpdir.join('cache', '**', '*.pkl').strpath, recursive=True ) assert {f.split('/')[-1][:-4] for f in cache_files} == set(events.tasks())
def test_fail_start_when_task_typo(): events = Events.read('tests/data/test_not_running', write_cache=False) with raises( TaskNotExistError, match='Task non_existent does not exist! Use “--new” to ' 'create it' ): events.start('non_existent')
def test_write_database(tmpdir): in_dir = 'tests/data/test' events = Events.read(in_dir, write_cache=False) events._dirty = events.tasks() events.write(tmpdir.strpath) comp = dircmp(in_dir, tmpdir.strpath, []) assert comp.diff_files == [] assert comp.left_only == [] assert comp.right_only == [] assert comp.funny_files == [] assert comp.subdirs == {}
def test_write_database(): runner = CliRunner() in_dir = abspath('tests/data/test') events = Events.read(in_dir, write_cache=False) events._dirty = events.tasks() with runner.isolated_filesystem() as tempdir: events.write(tempdir) comp = dircmp(in_dir, tempdir) expect(comp.diff_files) == [] expect(comp.left_only) == [] expect(comp.right_only) == [] expect(comp.funny_files) == []
def test_write_database(): events = Events.read('tests/data/test') events._dirty = events.tasks() try: events.write('tests/data/test_write') old_files = sorted(glob('tests/data/test/*.csv')) new_files = sorted(glob('tests/data/test_write/*.csv')) for old, new in zip(old_files, new_files): expect(open(old).read()) == open(new).read() finally: for i in os.listdir('tests/data/test_write'): os.unlink('tests/data/test_write/%s' % i) os.rmdir('tests/data/test_write')
def test_write_database_event_backups(tmpdir): test_dir = tmpdir.join('test').strpath copytree('tests/data/test_not_running', test_dir) events = Events.read(test_dir, write_cache=False) events.start('task') events.write(test_dir) comp = dircmp('tests/data/test_not_running', test_dir, []) assert comp.diff_files == [ 'task.csv', ] assert comp.left_only == [] assert comp.right_only == [ 'task.csv~', ] assert comp.funny_files == [] assert comp.subdirs == {}
def test_read_datebase(database, events): evs = Events.read('tests/data/' + database, write_cache=False) expect(len(evs)) == events
def test_sum_durations_in_database(): events = Events.read('tests/data/test_not_running', write_cache=False) assert events.sum() == timedelta(hours=2, minutes=15)
def test_fetch_events_for_task(): events = Events.read('tests/data/test') expect(len(events.for_task(task='task2'))) == 1
def test_read_last(database: str, result: Optional[Event]): evs = Events.read('tests/data/' + database, write_cache=False) assert evs.last() == result
def test_read_datebase(database: str, events: int): evs = Events.read('tests/data/' + database, write_cache=False) assert len(evs) == events
def test_current_running_event(): events = Events.read('tests/data/test', write_cache=False) assert events.running() == 'task'
def test_non_existing_database(): assert Events() == Events.read('I_NEVER_EXIST', write_cache=False)
def test_no_currently_running_event(): events = Events.read('tests/data/test_not_running', write_cache=False) assert not events.running()
def test_non_existing_database(): expect(Events()) == Events.read("I_NEVER_EXIST")
def test_store_messages_with_events(): events = Events.read('tests/data/test') expect(events.last().message) == 'finished'
def test_fetch_events_for_week(): events = Events.read('tests/data/date_filtering') expect(len(events.for_week(year=2011, week=9))) == 1
def test_stop_event(): events = Events.read('tests/data/test', write_cache=False) events.stop() assert not events.running()
def test_fetch_events_for_date(date, expected): events = Events.read('tests/data/date_filtering') expect(len(events.for_date(**date))) == expected
def test_fail_stop_single_when_not_running(): events = Events.read('tests/data/test_not_running', write_cache=False) with raises(TaskNotRunningError, match='No task running!'): events.last().stop()
def test_read_invalid_data(): with raises(ValueError, match='Invalid data'): Events.read('tests/data/faulty_csv', write_cache=False)
def test_fetch_events_for_date(date: Dict[str, int], expected: int): events = Events.read('tests/data/date_filtering', write_cache=False) assert len(events.for_date(**date)) == expected
def test_fail_start_with_overlap(): evs = Events.read('tests/data/test_not_running', write_cache=False) with raises(TaskRunningError, match='Start date overlaps'): evs.start('task', start=datetime(2011, 5, 4, 9, 33))
def test_list_tasks(): events = Events.read('tests/data/test', write_cache=False) assert events.tasks() == ['task', 'task2']
def test_fetch_events_for_week(): events = Events.read('tests/data/date_filtering', write_cache=False) assert len(events.for_week(2011, 9)) == 1
def test_write_database_no_change_noop(tmpdir): in_dir = 'tests/data/test' events = Events.read(in_dir, write_cache=False) events.write(tmpdir.strpath) assert glob(tmpdir.join('*').strpath) == []
def test_non_existing_database(): expect(Events()) == Events.read('I_NEVER_EXIST', write_cache=False)
def test_list_tasks(): events = Events.read('tests/data/test') expect(events.tasks()) == ['task', 'task2']
def test_read_datebase(): expect(len(Events.read('tests/data/test'))) == 3
def test_store_messages_with_events(): events = Events.read('tests/data/test', write_cache=False) assert events.last().message == 'finished'
def test_current_running_event(): events = Events.read('tests/data/test') expect(events.running()) == 'task'
def test_start_event(): events = Events.read('tests/data/test_not_running', write_cache=False) events.start('task2') assert events.running() == 'task2'
def test_no_currently_running_event(): events = Events.read('tests/data/test_not_running') expect(events.running()) is False
def test_fail_start_when_running(): events = Events.read('tests/data/test', write_cache=False) with raises(TaskRunningError, match='Running task task!'): events.start('task2')
def test_sum_durations_in_database(): events = Events.read('tests/data/test_not_running') expect(events.sum()) == timedelta(hours=2, minutes=15)
def test_stop_event_with_message(): events = Events.read('tests/data/test', write_cache=False) events.stop(message='test') last = events.last() assert last.message == 'test'
def test_fetch_events_for_task(): events = Events.read('tests/data/test', write_cache=False) assert len(events.for_task('task2')) == 1