def test_load_5(monkeypatch, capsys, tmp_path): "everyting ok, with tags" count = 0 def mock_connect(*args): return MockConn() def mock_iter(*args): nonlocal count count += 1 if count == 1: return (x for x in [(0, '', '', '{"Application": "NoteTree"}'), (1, '01-01-0001 00:00:00', 'title_1', 'text_1')]) elif count == 2: return (x for x in [('tagid_1', 'tag_title_1')]) elif count == 3: return (x for x in [(1, 'tagid_1')]) dest = tmp_path / 'load5.sql' dest.touch() monkeypatch.setattr(dmls.sql, 'connect', mock_connect) monkeypatch.setattr(MockCursor, '__iter__', mock_iter) assert dmls.load_file(dest) == { 0: { 'Application': 'NoteTree', 'Keywords': ['tag_title_1'] }, '01-01-0001 00:00:00': ['title_1', 'text_1', ['tag_title_1']] } assert capsys.readouterr().out == ( 'execute SQL:' ' `SELECT noteid, created, title, text FROM notes`\n' 'execute SQL: `SELECT tagid, tagname FROM tags`\n' 'execute SQL: `SELECT doc_id, tag_id FROM links`\n')
def test_load_2(monkeypatch, capsys, tmp_path): "test file exists but missing Application option" def mock_connect(*args): return MockConn() def mock_iter(*args): return (x for x in [('1', '01-01-0001 00:00:00', 'title_1', 'text_1')]) dest = tmp_path / 'load2.sql' dest.touch() monkeypatch.setattr(dmls.sql, 'connect', mock_connect) monkeypatch.setattr(MockCursor, '__iter__', mock_iter) with pytest.raises(EOFError): dmls.load_file(dest) assert capsys.readouterr().out == ( 'execute SQL:' ' `SELECT noteid, created, title, text FROM notes`\n')
def test_load_1(monkeypatch, capsys, tmp_path): "test file does not exist" dest = tmp_path / 'load1.sql' try: dest.unlink() except FileNotFoundError: pass assert dmls.load_file(dest) == {} assert capsys.readouterr().out == ''
def test_main(): startdir = pathlib.Path('/tmp/notetree') startdir.mkdir(exist_ok=True) for path in startdir.iterdir(): (startdir / path).unlink() filename = startdir / 'testdata' startvalue = backend_types.index(backend) for count in range(3): subprocess.run(['pytest', 'sample.py']) next_backend(startvalue, startvalue + 1) startvalue += 1 p_filename = str(filename.with_suffix('.pck')) pck_data = dmlp.load_file(p_filename) with open(p_filename + '.out', 'w') as out: # pprint.pprint(pck_data, stream=out) for key, value in pck_data.items(): if key == 0: for x, y in sorted(value.items()): print(x, y, file=out) else: print(key, value, file=out) s_filename = str(filename.with_suffix('.db')) sql_data = dmls.load_file(s_filename) with open(s_filename + '.out', 'w') as out: # pprint.pprint(sql_data, stream=out) for key, value in sql_data.items(): if key == 0: for x, y in sorted(value.items()): print(x, y, file=out) else: print(key, value, file=out) j_filename = str(filename.with_suffix('.json')) json_data = dmlj.load_file(j_filename) with open(j_filename + '.out', 'w') as out: # pprint.pprint(json_data, stream=out) for key, value in json_data.items(): if key == 0: for x, y in sorted(value.items()): print(x, y, file=out) else: print(key, value, file=out) subprocess.run([ 'meld', p_filename + '.out', s_filename + '.out', j_filename + '.out' ])
base_filename = 'MyNotes.pck' p_filename = '/tmp/notetree/' + base_filename s_filename = p_filename.replace('.pck', '.db') j_filename = p_filename.replace('.pck', '.json') nt_data = dmlp.load_file(base_filename) with open(p_filename + '.out-loaded', 'w') as out: pprint.pprint(nt_data, stream=out) dmlp.save_file(p_filename, nt_data) nt_data = dmlp.load_file(p_filename) with open(p_filename + '.out-saved', 'w') as out: pprint.pprint(nt_data, stream=out) dmls.save_file(s_filename, nt_data) new_data = dmls.load_file(s_filename) with open(s_filename + '.out-loaded', 'w') as out: pprint.pprint(new_data, stream=out) dmls.save_file(s_filename, new_data) new_data = dmls.load_file(s_filename) with open(s_filename + '.out-saved', 'w') as out: pprint.pprint(new_data, stream=out) dmlj.save_file(j_filename, nt_data) new_data = dmlj.load_file(j_filename) with open(j_filename + '.out-loaded', 'w') as out: pprint.pprint(new_data, stream=out) dmlj.save_file(j_filename, new_data) new_data = dmlj.load_file(j_filename) with open(j_filename + '.out-saved', 'w') as out: pprint.pprint(new_data, stream=out)