def test_command(self): get_default_site() self.assertEqual(Page.objects.count(), 2) call_command('akllt_importzope', str(fixture('whole_export')), verbosity=0) self.assertEqual(Page.objects.count(), 37)
def test_import_page_image(self): root = get_default_site().root_page importer = PagesImporter('apie', 'apie') importer.set_up(root, fixture('image_fixture')) importer.import_all_items() # Test if Image objects where created. self.assertTrue( Image.objects.filter(title__in=( 'AKL Rumšiškėse', 'AKL steigiamasis susirinkimas (II)', )).exists()) # Test if HTML is replaced as intended. def imgid(title): return Image.objects.get(title=title).pk imgids = [ imgid('AKL Rumšiškėse'), imgid('AKL steigiamasis susirinkimas (II)'), ] page = StandardPage.objects.get(url_path='/home/apie/apie/') tags = [ m.group(0) for m in re.finditer(r'<(embed|img)\b[^>]+>', page.body) ] self.assertEqual(tags, [ '<embed alt="AKL Rumšiškėse" embedtype="image" format="left" id="%d"/>' % imgids[0], '<embed alt="AKL steigiamasis susirinkimas (II)" embedtype="image" format="right" id="%d"/>' % imgids[1], '<img src=\'http://wesnoth.org/start/1.10/images/start-1.jpg\' alt=\'Vesnoto ekranvaizdis\'/>', ])
def test_import_page_image(self): root = get_default_site().root_page importer = PagesImporter('apie', 'apie') importer.set_up(root, fixture('image_fixture')) importer.import_all_items() # Test if Image objects where created. self.assertTrue(Image.objects.filter(title__in=( 'AKL Rumšiškėse', 'AKL steigiamasis susirinkimas (II)', )).exists()) # Test if HTML is replaced as intended. def imgid(title): return Image.objects.get(title=title).pk imgids = [ imgid('AKL Rumšiškėse'), imgid('AKL steigiamasis susirinkimas (II)'), ] page = StandardPage.objects.get(url_path='/home/apie/apie/') tags = [ m.group(0) for m in re.finditer(r'<(embed|img)\b[^>]+>', page.body) ] self.assertEqual(tags, [ '<embed alt="AKL Rumšiškėse" embedtype="image" format="left" id="%d"/>' % imgids[0], '<embed alt="AKL steigiamasis susirinkimas (II)" embedtype="image" format="right" id="%d"/>' % imgids[1], '<img src=\'http://wesnoth.org/start/1.10/images/start-1.jpg\' alt=\'Vesnoto ekranvaizdis\'/>', ])
def test_menu_tree(self): home = get_default_site().root_page treeutils.create_tree(home, [ node('p1'), [ node('p2'), node('p3', show_in_menus=False), node('p4'), [ node('p5'), ], ], node('p6'), ]) p4 = Page.objects.get(title='p4') menu = navtags.sidebar_menu({'self': p4}) self.assertEqual(menu.splitlines(), strip([ '<ul class="depth-1 nav nav-pills nav-stacked">', ' <li>', ' <a href="/p1/p2/">p2</a>', ' </li>', ' <li class="active">', ' <a href="#">p4</a>', ' <ul class="depth-2 nav nav-pills nav-stacked">', ' <li>', ' <a href="/p1/p4/p5/">p5</a>', ' </li>', ' </ul>', ' </li>', '</ul>', ]))
def test_menu_tree(self): home = get_default_site().root_page treeutils.create_tree(home, [ node('p1'), [ node('p2'), node('p3', show_in_menus=False), node('p4'), [ node('p5'), ], ], node('p6'), ]) p4 = Page.objects.get(title='p4') menu = navtags.sidebar_menu({'self': p4}) self.assertEqual( menu.splitlines(), strip([ '<ul class="depth-1 nav nav-pills nav-stacked">', ' <li>', ' <a href="/p1/p2/">p2</a>', ' </li>', ' <li class="active">', ' <a href="#">p4</a>', ' <ul class="depth-2 nav nav-pills nav-stacked">', ' <li>', ' <a href="/p1/p4/p5/">p5</a>', ' </li>', ' </ul>', ' </li>', '</ul>', ]))
def test_get_total(self): root = get_default_site().root_page manager = ImportManager(root, fixture('whole_export')) manager.add_importers([ NewsImporter('Naujienos', 'naujienos'), PagesImporter('Atviras kodas', 'ak'), ]) self.assertEqual(manager.get_total(), 19)
def test_import_news_image(self): root = get_default_site().root_page importer = NewsImporter('Naujienos', 'naujienos') importer.set_up(root, fixture('image_fixture')) importer.import_all_items() self.assertTrue(Image.objects.filter(title__in=( 'MS penguin', 'Programuokime smagiai su Scratch', )).exists())
def test_import_news_image(self): root = get_default_site().root_page importer = NewsImporter('Naujienos', 'naujienos') importer.set_up(root, fixture('image_fixture')) importer.import_all_items() self.assertTrue( Image.objects.filter(title__in=( 'MS penguin', 'Programuokime smagiai su Scratch', )).exists())
def test_parse_images(self): root = get_default_site().root_page path = fixture('image_fixture/apie/apie.html') importer = PagesImporter('apie', 'apie') importer.set_up(root, fixture('image_fixture')) result = importer.parse_images(path, '\n'.join([ '<img src="../images/akl.jpg" alt="AKL Rumšiškėse"', ' height="181" width="285" border="0"', ' class="lphoto"/>', ])) self.assertEqual(result, ( '<embed alt="AKL Rumšiškėse" embedtype="image" format="left" id="1"/>' # noqa ))
def test_endless_recursion(self): root = get_default_site().root_page # Create a tree with recursive loop. tree = [Page(slug='recursive')] tree.append(tree) # Try to create that tree. treeutils.create_tree(root, tree) # See the results (hopefully without falling into endless recursion). result = treeutils.grow_tree(root.get_descendants(), slug_getter) self.assertEqual(result, ['recursive'])
def test_manager(self): export_dir = fixture('') root = get_default_site().root_page manager = ImportManager(root, export_dir) manager.add_importers([NewsImporter('Naujienos', 'naujienos')]) for importer, item in manager.iterate(): importer.import_item(item) slugs = Page.objects.values_list('slug', flat=True) self.assertEqual(sorted(slugs), [ 'home', 'naujiena_0001', 'naujiena_1016', 'naujienos', 'root', ]) page = Page.objects.get(slug='naujiena_0001') self.assertEqual(page.title, 'Konkursas')
def test_parse_images(self): root = get_default_site().root_page path = fixture('image_fixture/apie/apie.html') importer = PagesImporter('apie', 'apie') importer.set_up(root, fixture('image_fixture')) result = importer.parse_images( path, '\n'.join([ '<img src="../images/akl.jpg" alt="AKL Rumšiškėse"', ' height="181" width="285" border="0"', ' class="lphoto"/>', ])) self.assertEqual( result, ( '<embed alt="AKL Rumšiškėse" embedtype="image" format="left" id="1"/>' # noqa ))
def test_get_top_menu_page(self): home = get_default_site().root_page treeutils.create_tree(home, [ node('p1'), [ node('p2'), [ node('p3', show_in_menus=False), [ node('p4', show_in_menus=False), ], ], ], ]) p1, (p2, (p3, (p4,))) = treeutils.grow_tree(home.get_descendants()) self.assertEqual(nav.get_top_menu_page(None), None) self.assertEqual(nav.get_top_menu_page(home), None) self.assertEqual(nav.get_top_menu_page(p1).title, 'p1') self.assertEqual(nav.get_top_menu_page(p2).title, 'p1') self.assertEqual(nav.get_top_menu_page(p3).title, 'p1') self.assertEqual(nav.get_top_menu_page(p4).title, 'p1')
def test_duplicates(self): data = { 'date': datetime.date(2002, 10, 15), 'title': 'Konkursas', 'blurb': '<p>Vilniuje, dvi dienas ...', 'body': '<p>Vilniuje, dvi dienas ...', 'slug': 'naujiena_0001', } root = get_default_site().root_page importer = NewsImporter('Naujienos', 'naujienos') importer.root = importer.get_root_page(root) item = ImportItem(pathlib.PurePath()) inst_1, created_1 = importer.create_page(root, item, data) self.assertTrue(created_1) inst_2, created_2 = importer.create_page(root, item, data) self.assertFalse(created_2) self.assertEqual(inst_1.pk, inst_2.pk)
def test_get_top_menu_page(self): home = get_default_site().root_page treeutils.create_tree(home, [ node('p1'), [ node('p2'), [ node('p3', show_in_menus=False), [ node('p4', show_in_menus=False), ], ], ], ]) p1, (p2, (p3, (p4, ))) = treeutils.grow_tree(home.get_descendants()) self.assertEqual(nav.get_top_menu_page(None), None) self.assertEqual(nav.get_top_menu_page(home), None) self.assertEqual(nav.get_top_menu_page(p1).title, 'p1') self.assertEqual(nav.get_top_menu_page(p2).title, 'p1') self.assertEqual(nav.get_top_menu_page(p3).title, 'p1') self.assertEqual(nav.get_top_menu_page(p4).title, 'p1')
def test_document_import(self): root = get_default_site().root_page importer = PagesImporter('Skaitykla', 'skaitykla') importer.set_up(root, fixture('image_fixture')) importer.import_all_items() # Check if Document instances are created. self.assertTrue( Document.objects.filter(title__in=( 'Kazarinas-Technologijos-ir-etika.odt', '2007-03-17-Kazarinas-Laisvosios-programos-mokyme.odt', '2004-06-02_Seimas_atviri_standartai.pdf', 'AKP_naudojimo_patirtis_AM.pdf', )).exists()) # Check if HTML is replaced as intended. def docid(title): return Document.objects.get(title=title).pk docids = [ docid('Kazarinas-Technologijos-ir-etika.odt'), docid('2007-03-17-Kazarinas-Laisvosios-programos-mokyme.odt'), docid('2004-06-02_Seimas_atviri_standartai.pdf'), docid('AKP_naudojimo_patirtis_AM.pdf'), ] page = StandardPage.objects.get(url_path='/home/skaitykla/pranesimai/') self.assertEqual(re.findall(r'<a\b[^>]+>', page.body), [ '<a href="http://www.vac.lt/seminaras/">', '<a href="http://www.mruni.lt/">', '<a id="%d" linktype="document">' % docids[0], '<a href="http://discovery.ot.lt/linma/">', '<a id="%d" linktype="document">' % docids[1], '<a href="http://www.lrs.lt/">', '<a href="http://www.lrs.lt/ivpk">', '<a href="http://www3.lrs.lt/pls/inter/ivpk_print.doc_view?key=234334">', '<a id="%d" linktype="document">' % docids[2], '<a id="%d" linktype="document">' % docids[3], ])
def test(self): root = get_default_site().root_page importer = PagesImporter('Atviras kodas', 'ak') importer.set_up(root, fixture('whole_export')) importer.import_all_items() importer.post_process() page = StandardPage.objects.get(url_path='/home/ak/knygos/') links = list(lxml.html.fromstring(page.body).xpath('//a/@href')) self.assertEqual(links[0], '/ak/knygos/nuo_win_prie_lin/') self.assertEqual(links, [ '/ak/knygos/nuo_win_prie_lin/', '/ak/knygos/nuo_win_prie_lin/', '/ak/knygos/AKrinkinys/', '/ak/knygos/AKrinkinys/', '/ak/knygos/linuxatmintine/', '/ak/knygos/linuxatmintine/', '/ak/knygos/openoffice/', '/ak/knygos/openoffice/', '/ak/knygos/openoffice_atmintine/', '/ak/knygos/openoffice_atmintine/', '/ak/knygos/IT_vadovelis/', '/ak/knygos/IT_vadovelis/', '/ak/knygos/linux_sistemos_administravimas/', '/ak/knygos/linux_sistemos_administravimas/', '/ak/knygos/php4_vadovas/', '/ak/knygos/php4_vadovas/', '/ak/knygos/mysql4_vadovas/', '/ak/knygos/mysql4_vadovas/', '/ak/knygos/kde_atmintine/', '/ak/knygos/kde_atmintine/', 'http://www.kde.org', '/ak/knygos/grafine_aplinka_kde/', '/ak/knygos/grafine_aplinka_kde/', '/ak/knygos/cathedral_and_bazaar/', '/ak/knygos/cathedral_and_bazaar/', ])
def test_document_import(self): root = get_default_site().root_page importer = PagesImporter('Skaitykla', 'skaitykla') importer.set_up(root, fixture('image_fixture')) importer.import_all_items() # Check if Document instances are created. self.assertTrue(Document.objects.filter(title__in=( 'Kazarinas-Technologijos-ir-etika.odt', '2007-03-17-Kazarinas-Laisvosios-programos-mokyme.odt', '2004-06-02_Seimas_atviri_standartai.pdf', 'AKP_naudojimo_patirtis_AM.pdf', )).exists()) # Check if HTML is replaced as intended. def docid(title): return Document.objects.get(title=title).pk docids = [ docid('Kazarinas-Technologijos-ir-etika.odt'), docid('2007-03-17-Kazarinas-Laisvosios-programos-mokyme.odt'), docid('2004-06-02_Seimas_atviri_standartai.pdf'), docid('AKP_naudojimo_patirtis_AM.pdf'), ] page = StandardPage.objects.get(url_path='/home/skaitykla/pranesimai/') self.assertEqual(re.findall(r'<a\b[^>]+>', page.body), [ '<a href="http://www.vac.lt/seminaras/">', '<a href="http://www.mruni.lt/">', '<a id="%d" linktype="document">' % docids[0], '<a href="http://discovery.ot.lt/linma/">', '<a id="%d" linktype="document">' % docids[1], '<a href="http://www.lrs.lt/">', '<a href="http://www.lrs.lt/ivpk">', '<a href="http://www3.lrs.lt/pls/inter/ivpk_print.doc_view?key=234334">', '<a id="%d" linktype="document">' % docids[2], '<a id="%d" linktype="document">' % docids[3], ])
def setUp(self): root = get_default_site().root_page self.importer = PagesImporter('Atviras kodas', 'ak') self.importer.set_up(root, fixture('whole_export'))
def test_create_tree(self): root = get_default_site().root_page tree = self.fixtures.tree_of_pages() treeutils.create_tree(root, tree) self.assertEqual(treeutils.grow_tree(root.get_descendants()), tree)
def setUp(self): super().setUp() root_page = get_default_site().root_page self.root = root_page.add_child(instance=Page(slug='naujienos')) User.objects.create_user('user') User.objects.create_superuser('admin', '*****@*****.**', 'secret')