def _info(self, info: FileInfo): info.title = self.meta.get('title') info.created = self.meta.get('created') info.tags = {k.lower() for k in self.meta.get('keywords', []) }.union(self._hashtags) info.links = [LinkInfo(self.path, r.href) for r in sorted(self.hrefs)]
def test_info_and_referrers(fs): doc = """--- title: A Note created: 2012-01-02 03:04:05 ... I link to [two](two.md) and [three](../otherdir/three.md#heading) and have #two #tags.""" path1 = '/notes/dir/one.md' path2 = '/notes/dir/two.md' path3 = '/notes/otherdir/three.md' fs.create_file(path1, contents=doc) fs.create_file(path2, contents='---\ntitle: Note 2\n...\n') repo = config().instantiate() assert repo.info(path1, FileInfoReq.full()) == FileInfo( path1, title='A Note', created=datetime(2012, 1, 2, 3, 4, 5), tags={'tags', 'two'}, links=[ LinkInfo(path1, h) for h in ['../otherdir/three.md#heading', 'two.md'] ]) assert repo.info(path2, FileInfoReq.full()) == FileInfo( path2, title='Note 2', backlinks=[LinkInfo(path1, 'two.md')]) assert repo.info(path3, FileInfoReq.full()) == FileInfo( path3, backlinks=[LinkInfo(path1, '../otherdir/three.md#heading')])
def _info(self, info: FileInfo): info.title = self._title() info.created = self._created() info.tags = self._tags() info.links = [ LinkInfo(self.path, href) for href in sorted(self._link_els.keys()) ]
def test_change(fs): fs.cwd = '/notes' path1 = '/notes/one.md' path2 = '/notes/two.md' path3 = '/notes/moved.md' fs.create_file(path1, contents='[1](old)') fs.create_file(path2, contents='[2](foo)') edits = [SetTitleCmd(path1, 'New Title'), ReplaceHrefCmd(path1, 'old', 'new'), MoveCmd(path1, path3), ReplaceHrefCmd(path2, 'foo', 'bar')] repo = config().instantiate() repo.change(edits) assert not Path(path1).exists() assert Path(path3).read_text() == '---\ntitle: New Title\n...\n[1](new)' assert Path(path2).read_text() == '[2](bar)' assert repo.info(path1, FileInfoReq.full()) == FileInfo(path1) assert repo.info(path3, FileInfoReq.full()) == FileInfo(path3, title='New Title', links=[LinkInfo(path3, 'new')]) assert repo.info(path2, FileInfoReq.full()) == FileInfo(path2, links=[LinkInfo(path2, 'bar')]) assert repo.info('old', FileInfoReq.full()) == FileInfo('/notes/old') assert repo.info('foo', FileInfoReq.full()) == FileInfo('/notes/foo') assert repo.info('new', FileInfoReq.full()) == FileInfo('/notes/new', backlinks=[LinkInfo(path3, 'new')]) assert repo.info('bar', FileInfoReq.full()) == FileInfo('/notes/bar', backlinks=[LinkInfo(path2, 'bar')]) # regression test for bug where invalidate removed entries for files that were referred to # only by files that had not been changed repo.invalidate() assert repo.info('new', FileInfoReq.full()) == FileInfo('/notes/new', backlinks=[LinkInfo(path3, 'new')]) assert repo.info('bar', FileInfoReq.full()) == FileInfo('/notes/bar', backlinks=[LinkInfo(path2, 'bar')])
def test_invalidate(fs): repo = config().instantiate() path = '/notes/one.md' assert repo.info(path, FileInfoReq.full()) == FileInfo(path) fs.create_file(path, contents='#hello [link](foo.md)') assert repo.info(path, FileInfoReq.full()) == FileInfo(path) repo.invalidate() assert repo.info(path, FileInfoReq.full()) == FileInfo(path, tags={'hello'}, links=[LinkInfo(path, 'foo.md')]) repo.invalidate() Path(path).write_text('#goodbye') repo.invalidate() assert repo.info(path, FileInfoReq.full()) == FileInfo(path, tags={'goodbye'})
def test_skip_parse(fs): path1 = '/notes/one.md' path2 = '/notes/one.md.resources/two.md' path3 = '/notes/skip.md' path4 = '/notes/unskip.md' fs.create_file(path1, contents='---\ntitle: Note One\n...\n') fs.create_file(path2, contents='---\ntitle: Note Two\n...\n') fs.create_file(path3, contents='---\ntitle: Note Skip\n...\n') fs.create_file(path4, contents='---\ntitle: Note No Skip\n...\n') def fn(parentpath, filename): return filename.endswith('.resources') or filename == 'skip.md' conf = config() conf.skip_parse = fn with conf.instantiate() as repo: assert list(repo.query('sort:path')) == [ FileInfo(path1, title='Note One'), FileInfo(path2), FileInfo(path3), FileInfo(path4, title='Note No Skip') ] assert repo.info(path1) == FileInfo(path1, title='Note One') assert repo.info(path2) == FileInfo(path2) assert repo.info(path3) == FileInfo(path3) assert repo.info(path4) == FileInfo(path4, title='Note No Skip')
def info(self, path: str, fields: FileInfoReqIsh = FileInfoReq.internal(), path_resolved=False, skip_parse=None) -> FileInfo: if not path_resolved: path = os.path.abspath(path) if skip_parse is None: skip_parse = self._should_skip_parse(path) fields = FileInfoReq.parse(fields) if skip_parse or not os.path.exists(path): info = FileInfo(path) else: try: info = self.accessor_factory(path).info() except Exception as ex: raise IOError(f'Unable to parse {path}') from ex if fields.backlinks: for other in self.query(fields=FileInfoReq(path=True, links=True)): info.backlinks.extend(link for link in other.links if link.referent() == path) info.backlinks.sort(key=attrgetter('referrer', 'href')) return info
def test_info_garbage(fs): # Ideally this test would trigger the try-except block in the load method, # but I don't actually know how to construct a document that does that. doc = '<nonsense️' path = Path('/fakenotes/test.html') fs.create_file(path, contents=doc) info = HTMLAccessor(str(path)).info() assert info == FileInfo(str(path))
def test_resource_path_fn(): assert resource_path_fn('/foo/bar/baz') is None # the next case shouldn't really come up since we don't call path_organizer on directories assert resource_path_fn('/foo/bar/baz.resources') is None assert resource_path_fn('/foo/bar/baz') is None # the next case probably isn't good behavior, but it seems unimportant; leaving this # test case as documentation of the current behavior assert resource_path_fn('/foo/bar/.resources/baz') result = resource_path_fn('/foo/bar.resources/baz') assert result.determinant == '/foo/bar' assert result.fn(FileInfo('/somewhere/else')) == '/somewhere/else.resources/baz' assert result.fn(FileInfo('/foo/bar')) == '/foo/bar.resources/baz' assert result.fn(FileInfo('/foo/bar.md')) == '/foo/bar.md.resources/baz' result = resource_path_fn('/foo/My File.md.resources/subdir/My Picture.png') assert result.determinant == '/foo/My File.md' assert result.fn(FileInfo('/foo/My File.md')) == '/foo/My File.md.resources/subdir/My Picture.png' assert result.fn(FileInfo('/file.md')) == '/file.md.resources/subdir/My Picture.png'
def test_guess_created(fs): info = FileInfo('foo') assert info.guess_created() is None fs.create_file('foo') assert info.guess_created().isoformat() == '2012-02-03T04:05:06+00:00' info.created = datetime(1, 2, 3, 4, 5, 6) assert info.guess_created() == datetime(1, 2, 3, 4, 5, 6)
def test_query(fs, capsys): nd_setup(fs) doc1 = """--- title: A Test File created: 2012-03-04 05:06:07 keywords: - has space - cool ... This is a test doc.""" doc2 = 'Another #test doc.' path1 = '/notes/cwd/subdir/one.md' path2 = '/notes/two.md' fs.create_file(path1, contents=doc1) fs.create_file(path2, contents=doc2) assert cli.main(['query', '-j']) == 0 out, err = capsys.readouterr() expected1 = FileInfo(path=path1, title='A Test File', created=datetime(2012, 3, 4, 5, 6, 7), tags=['cool', 'has space']).as_json() assert json.loads(out) == [ expected1, FileInfo(path=path2, tags=['test']).as_json() ] assert cli.main(['query']) == 0 out, err = capsys.readouterr() assert out assert cli.main(['query', '-t']) == 0 out, err = capsys.readouterr() assert out assert cli.main(['query', '-j', 'tag:has+space']) == 0 out, err = capsys.readouterr() assert json.loads(out) == [expected1] assert cli.main(['query', 'tag:has+space']) == 0 out, err = capsys.readouterr() assert out
def info(self, path: str, fields: FileInfoReqIsh = FileInfoReq.internal(), path_resolved=False) -> FileInfo: self._refresh_if_needed() if not path_resolved: path = os.path.abspath(path) fields = FileInfoReq.parse(fields) cursor = self.connection.cursor() cursor.execute('SELECT id, title, created FROM files WHERE path = ?', (path, )) file_row = cursor.fetchone() info = FileInfo(path) if file_row: file_id = file_row[0] info.title = file_row[1] time_field = file_row[2] if time_field: if time_field.isnumeric(): info.created = datetime.utcfromtimestamp( int(time_field) / 1000) else: info.created = datetime.fromisoformat(time_field) if fields.tags: cursor.execute('SELECT tag FROM file_tags WHERE file_id = ?', (file_id, )) info.tags = {r[0] for r in cursor} if fields.links: cursor.execute( 'SELECT href FROM file_links WHERE referrer_id = ?', (file_id, )) info.links = [ LinkInfo(path, href) for href in sorted(r[0] for r in cursor) ] if fields.backlinks: cursor.execute( 'SELECT referrers.path, file_links.href' ' FROM files referrers' ' INNER JOIN file_links ON referrers.id = file_links.referrer_id' ' WHERE file_links.referent_id = ?', (file_id, )) info.backlinks = [ LinkInfo(referrer, href) for referrer, href in cursor ] info.backlinks.sort(key=attrgetter('referrer', 'href')) return info
def process_fn(src: str): dpfn = move_fns[src] determinant = dpfn.determinant dinfo = info_map.get(determinant, FileInfo(determinant)) if determinant in move_fns: process_fn(determinant) if determinant in moves: dinfo = replace(dinfo, path=moves[determinant]) srcdest = dpfn.fn(dinfo) del move_fns[src] srcdest = find_available_name(srcdest, unavailable, src) if src == srcdest: return moves[src] = srcdest unavailable.add(srcdest)
def info(self) -> FileInfo: """Returns details about the file. This will not necessarily reload the file from disk if the instance has previously loaded it. This will only populate the attributes of FileInfo that are supported by the particular subclass, and also will not populate any attributes (such as backlinks) that cannot be derived from the file in isolation. May raise :exc:`ParseError`. """ if not self._loaded: self.load() info = FileInfo(self.path) self._info(info) return info
def call(p, t): return rewrite_name_using_title(FileInfo(path=p, title=t))
def _info(self, info: FileInfo): info.title = self._meta.get('/Title') info.created = _pdf_strptime(self._meta.get('/CreationDate')) info.tags.update(self._tags())
def test_apply_sorting(): data = [ FileInfo('/a/one', tags={'baz'}, backlinks=[LinkInfo(referrer='whatever', href='whatever')]), FileInfo('/b/two', title='Beta', created=datetime(2010, 1, 15)), FileInfo('/c/Three', title='Gamma', created=datetime(2012, 1, 9), backlinks=[ LinkInfo(referrer='whatever', href='whatever'), LinkInfo(referrer='whatever', href='whatever') ]), FileInfo('/d/four', title='delta', created=datetime(2012, 1, 9), tags={'foo', 'bar'}) ] assert FileQuery.parse('sort:path').apply_sorting(data) == data assert FileQuery.parse('sort:-path').apply_sorting(data) == list( reversed(data)) assert FileQuery.parse('sort:filename').apply_sorting(data) == [ data[3], data[0], data[2], data[1] ] assert FileQuery(sort_by=[ FileQuerySort(FileQuerySortField.FILENAME, ignore_case=False) ]).apply_sorting(data) == [data[2], data[3], data[0], data[1]] assert FileQuery.parse('sort:title').apply_sorting(data) == [ data[1], data[3], data[2], data[0] ] assert FileQuery( sort_by=[FileQuerySort(FileQuerySortField.TITLE, ignore_case=False) ]).apply_sorting(data) == [ data[1], data[2], data[3], data[0] ] assert FileQuery( sort_by=[FileQuerySort(FileQuerySortField.TITLE, missing_first=True) ]).apply_sorting(data) == [ data[0], data[1], data[3], data[2] ] assert FileQuery(sort_by=[ FileQuerySort( FileQuerySortField.TITLE, missing_first=True, reverse=True) ]).apply_sorting(data) == [data[2], data[3], data[1], data[0]] assert FileQuery.parse('sort:created').apply_sorting(data) == [ data[1], data[2], data[3], data[0] ] assert FileQuery.parse('sort:-created').apply_sorting(data) == [ data[0], data[2], data[3], data[1] ] assert FileQuery(sort_by=[ FileQuerySort(FileQuerySortField.CREATED, missing_first=True) ]).apply_sorting(data) == [data[0], data[1], data[2], data[3]] assert FileQuery.parse('sort:-tags').apply_sorting(data) == [ data[3], data[0], data[1], data[2] ] assert FileQuery.parse('sort:-backlinks').apply_sorting(data) == [ data[2], data[0], data[1], data[3] ] assert FileQuery.parse('sort:created,title').apply_sorting(data) == [ data[1], data[3], data[2], data[0] ] assert FileQuery.parse('sort:created,-title').apply_sorting(data) == [ data[1], data[2], data[3], data[0] ]
def test_info_nonexistent(fs): path = '/notes/foo' repo = DirectRepoConf(root_paths={'/notes'}).instantiate() assert repo.info(path) == FileInfo(path)
def test_info_directory(fs): path = Path('/notes/foo/bar') path.mkdir(parents=True, exist_ok=True) repo = DirectRepoConf(root_paths={'/notes'}).instantiate() assert repo.info(str(path)) == FileInfo(str(path)) assert repo.info(str(path.parent)) == FileInfo(str(path.parent))
def test_info_unknown(fs): fs.create_file('/notes/one.md', contents='Hello') repo = config().instantiate() assert repo.info('/notes/two.md') == FileInfo('/notes/two.md')