Ejemplo n.º 1
0
    def runTest(self):
        def basename(name):
            if ":" in name:
                return name.split(":")[-1]
            else:
                return name

        db = sqlite3.connect(':memory:')
        db.row_factory = sqlite3.Row
        pi = PagesIndexer(db, None, tests.MockObject())
        for i, name, cont in self.PAGES:
            db.execute(
                'INSERT INTO pages(id, name, lowerbasename, sortkey, parent, source_file) VALUES (?, ?, ?, ?, 1, 1)',
                (i, name, basename(name).lower(), natural_sort_key(name)))

        ## Test PagesViewInternal methods
        iview = PagesViewInternal(db)
        i, pn = iview.resolve_pagename(Path(''), ['foo'])
        self.assertEqual((i, pn), (3, Path('Foo')))

        i, pn = iview.resolve_link(Path('Foo'), HRef.new_from_wiki_link('Bar'))
        self.assertEqual((i, pn), (2, Path('Bar')))

        ## Test the actual indexer
        pageindexer = tests.MaskedObject(pi, 'connect')
        indexer = LinksIndexer(db, pageindexer)

        for i, name, cont in self.PAGES:
            row = {
                'id': i,
                'name': name,
                'sortkey': natural_sort_key(name),
                'is_link_placeholder': False
            }
            indexer.on_page_row_inserted(pageindexer, row)

        ###
        pageindexer.setObjectAccess('insert_link_placeholder')
        for i, name, text in self.PAGES:
            tree = WikiParser().parse(text)
            row = {'id': i, 'name': name}
            indexer.on_page_changed(pageindexer, row, tree)

        indexer.update()

        links = sorted((r['source'], r['target'])
                       for r in db.execute('SELECT * FROM links'))
        self.assertEqual(links, [(3, 2), (3, 4)])

        ###
        pageindexer.setObjectAccess('remove_page')
        for i, name, cont in self.PAGES:
            row = {'id': i, 'name': name, 'is_link_placeholder': False}
            indexer.on_page_row_deleted(pageindexer, row)

        indexer.update()

        rows = db.execute('SELECT * FROM links').fetchall()
        self.assertEqual(rows, [])
Ejemplo n.º 2
0
	def iter_href(self):
		'''Generator for links in the text
		@returns: yields a list of unique L{HRef} objects
		'''
		from zim.notebook.page import HRef # XXX
		seen = set()
		for elt in itertools.chain(
			self._etree.getiterator(LINK),
			self._etree.getiterator(IMAGE)
		):
			href = elt.attrib.get('href')
			if href and href not in seen:
				seen.add(href)
				if link_type(href) == 'page':
					try:
						yield HRef.new_from_wiki_link(href)
					except ValueError:
						pass
Ejemplo n.º 3
0
	def lookup_from_user_input(self, name, reference=None):
		'''Lookup a pagename based on user input
		@param name: the user input as string
		@param reference: a L{Path} in case relative links are supported as
		customer input
		@returns: a L{Path} object for C{name}
		@raises ValueError: when C{name} would reduce to empty string
		after removing all invalid characters, or if C{name} is a
		relative link while no C{reference} page is given.
		@raises IndexNotFoundError: when C{reference} is not indexed
		'''
		# This method re-uses most of resolve_link() but is defined
		# separate because it has a distinct different purpose.
		# Only accidental that we treat user input as links ... ;)
		href = HRef.new_from_wiki_link(name)
		if reference is None and href.rel == HREF_REL_RELATIVE:
			raise ValueError, 'Got relative page name without parent: %s' % name
		else:
			source = reference or ROOT_PATH
			id, pagename = self._pages.resolve_link(
								source, href, ignore_link_placeholders=False)
			return pagename