def run(self, serial, tag, tagname, pagename, soup, request, response): s = Store() if (self.mtime < s.mtime(self.wiki_map)): self.load() try: uri = tag['href'] except KeyError: return True try: (schema, link) = uri.split(':',1) except ValueError: return False schema = schema.lower() tag['rel'] = uri if schema in self.schemas.keys(): if '%s' in self.schemas[schema]: try: uri = self.schemas[schema] % link except: print "Error in processing Interwiki link (%s,%s,%s)" % (schema, link, self.schemas[schema]) uri = self.schemas[schema] + link else: uri = self.schemas[schema] + link tag['href'] = uri (schema,netloc,path,parameters,query,fragment) = urlparse.urlparse(uri) tag['title'] = "link to %s on %s" % (link, netloc) tag['class'] = "interwiki" # this tag does not need to be re-processed return False
def run(self, serial, tag, tagname, pagename, soup, request, response): s = Store() if (self.mtime < s.mtime(self.meta_page)): self.load() try: uri = tag['href'] except KeyError: return True try: (schema, link) = uri.split(':',1) except ValueError: return False schema = schema.lower() tag['rel'] = uri if schema in self.schemas.keys(): if '%s' in self.schemas[schema]: try: uri = self.schemas[schema] % link except: log.error("Error in processing Interwiki link (%s,%s,%s)" % (schema, link, self.schemas[schema])) uri = self.schemas[schema] + link else: uri = self.schemas[schema] + link tag['href'] = uri (schema,netloc,path,parameters,query,fragment) = urlparse.urlparse(uri) tag['title'] = "link to %s on %s" % (link, netloc) tag['class'] = "interwiki" # this tag does not need to be re-processed return False
def run(self, serial, tag, tagname, pagename, soup, request, response): s = Store() if (self.mtime < s.mtime(self.meta_page)): self.load() try: acronym = ''.join(tag.find_all(text=re.compile('.+'))).strip().lower() except: return True if acronym in self.acronyms.keys(): meaning = self.acronyms[acronym] tag['title'] = meaning # this tag does not need to be re-processed return False return True
def run(self, serial, tag, tagname, pagename, soup, request, response): s = Store() if (self.mtime < s.mtime(self.meta_page)): self.load() try: acronym = ''.join( tag.find_all(text=re.compile('.+'))).strip().lower() except: return True if acronym in self.acronyms.keys(): meaning = self.acronyms[acronym] tag['title'] = meaning # this tag does not need to be re-processed return False return True
def run(self, serial, tag, tagname, pagename, soup, request, response): s = Store() if (self.mtime < s.mtime(self.meta_page)): self.load() try: link = tag['href'] except KeyError: return True while True: # expand multiple aliases if required stack = [] # avoid loops try: alias = self.aliases[tag['href']] if alias not in stack: stack.append(alias) tag['href'] = alias else: # avoid loops break except: break # this tag may need to be re-processed by another plugin return True