def initialize_wiki(): try: root = URLPath.root() except NoRootURL: print("Root URL not found, creating...") root = URLPath.create_root(title="QUOREM Wiki", content=get_content_from_file( "quorem/static/markdown/docs/root.md")) article_revision = ArticleRevision( title=root.article.current_revision.title, content=get_content_from_file("quorem/static/markdown/docs/root.md")) root.article.add_revision(article_revision) try: investigation = URLPath.get_by_path("investigation") except URLPath.DoesNotExist: print("Investigation page not found, creating...") URLPath.create_urlpath(root, slug="investigation", title="List of Investigations", content="""This page lists the investigations that are present in your QUOREM database. You may edit anything on this page, except the Automated Report section.\r\n\r\n""") try: protocol = URLPath.get_by_path("protocol") except URLPath.DoesNotExist: print("Protocol page not found, creating...") URLPath.create_urlpath(root, slug="protocol", title="List of Protocols", content="""This page lists the protocols that are present in your QUOREM database. You may edit anything on this page, except the Automated Report section.\r\n\r\n""") try: pipeline = URLPath.get_by_path("pipeline") except URLPath.DoesNotExist: print("Pipeline page not found, creating...") URLPath.create_urlpath(root, slug="pipeline", title="List of Pipelines", content="""This page lists the pipelines that are present in your QUOREM database. You may edit anything on this page, except the Automated Report section.\r\n\r\n""") try: sample = URLPath.get_by_path("sample") except URLPath.DoesNotExist: print("Sample page not found, creating...") URLPath.create_urlpath( root, slug="sample", title="List of Samples", content= "This page lists the samples that are present in your QUOREM database. You may edit anything on this page, except the Automated Report section.\r\n\r\n" ) initialize_documentation(root)
def handle(self, *args, **options): try: import wikitools except ImportError: raise CommandError( 'You need to install wikitools to use this command !') user_matching = {} for um in options['user_matching']: mu = um[::-1] kp, emanresu = mu.split(':', 1) pk = kp[::-1] username = emanresu[::-1] user_matching[username] = pk api_url, api_username, api_password = self.get_params(args) site = wikitools.wiki.Wiki(api_url) site.login(api_username, "dummy") pages = self.get_all_pages(wikitools.api, site) current_site = Site.objects.get_current() url_root = URLPath.root() print(url_root) oldpaths= [article.urlpath_set.all()[0] for article in Article.objects.all() if article.urlpath_set.all()[0].path.count("/") < 3] images = wikitools.api.APIRequest( site, { 'action': 'query', 'list': 'allimages', 'aiprop': 'url'} ).query() for image in images['query']['allimages']: print(image['url']) for page in pages: #root = self.get_page_parent(page, ["boker", "diverse", "f*g/alle_fag", "folk", "foreninger_og_organisasjoner", "studieprogrammer", "utveksling_info", "utveksling_info/universiteter", "studieteknisk"], "diverse") root = self.get_page_parent(page, ["f*g", "boker"], "diverse") if root != "ignore": for path in oldpaths: if path.path == root + "/": url_root = path break self.import_page( wikitools.api, site, page, current_site, url_root, user_matching, options['replace_existing'])
def handle(self, *args, **options): try: import wikitools except ImportError: raise CommandError( 'You need to install wikitools to use this command !') user_matching = {} for um in options['user_matching']: mu = um[::-1] kp, emanresu = mu.split(':', 1) pk = kp[::-1] username = emanresu[::-1] user_matching[username] = pk api_url, api_username, api_password = self.get_params(args) site = wikitools.wiki.Wiki(api_url) site.login(api_username, "dummy") pages = self.get_all_pages(wikitools.api, site) current_site = Site.objects.get_current() url_root = URLPath.root() print(url_root) oldpaths = [ article.urlpath_set.all()[0] for article in Article.objects.all() if article.urlpath_set.all()[0].path.count("/") < 3 ] images = wikitools.api.APIRequest(site, { 'action': 'query', 'list': 'allimages', 'aiprop': 'url' }).query() for image in images['query']['allimages']: print(image['url']) for page in pages: #root = self.get_page_parent(page, ["boker", "diverse", "f*g/alle_fag", "folk", "foreninger_og_organisasjoner", "studieprogrammer", "utveksling_info", "utveksling_info/universiteter", "studieteknisk"], "diverse") root = self.get_page_parent(page, ["f*g", "boker"], "diverse") if root != "ignore": for path in oldpaths: if path.path == root + "/": url_root = path break self.import_page(wikitools.api, site, page, current_site, url_root, user_matching, options['replace_existing'])
def handle(self, *args, **options): try: import wikitools except ImportError: raise CommandError( 'You need to install wikitools to use this command !') try: import pypandoc # noqa @UnusedImport except ImportError: raise CommandError('You need to install pypandoc') user_matching = {} for um in options['user_matching']: mu = um[::-1] kp, emanresu = mu.split(':', 1) pk = kp[::-1] username = emanresu[::-1] user_matching[username] = pk api_url, api_username, api_password = self.get_params(args) site = wikitools.wiki.Wiki(api_url) site.login(api_username, api_password) pages = self.get_all_pages(wikitools.api, site) current_site = Site.objects.get_current() url_root = URLPath.root() for page in pages: self.import_page( wikitools.api, site, page, current_site, url_root, user_matching, options['replace_existing']) self.update_links()