def pygments_markdown(content): _lexer_names = reduce(lambda a,b: a + b[2], lexers.LEXERS.itervalues(), ()) _formatter = formatters.HtmlFormatter(cssclass='highlight') html = markdown.markdown(content) # Using html.parser to prevent bs4 adding <html> tag soup = BeautifulSoup(html) for tag in ("script", "html", "head", "title", "div", "hr", "article", "header", "footer"): if soup.findAll(tag): return escape(content) for pre in soup.findAll('pre'): if pre.code: txt = unicode(pre.code.text) lexer_name = "text" if txt.startswith(':::'): lexer_name, txt = txt.split('\n', 1) lexer_name = lexer_name.split(':::')[1] if lexer_name not in _lexer_names: lexer_name = "text" lexer = lexers.get_lexer_by_name(lexer_name, stripnl=True, encoding='UTF-8') if txt.find("<") != -1 or txt.find(">") != -1: txt = txt.replace("<", "<").replace(">", ">") if txt.find("&") != -1: txt = txt.replace("&", "&") highlighted = highlight(txt, lexer, _formatter) div_code = BeautifulSoup(highlighted).div if not div_code: return content pre.replaceWith(div_code) return unicode(soup)
def update_addons(quiet=True): from distutils.version import LooseVersion if not quiet: kodi.open_busy_dialog() sources = DB.query("SELECT addon_id, source FROM install_history") update_count = 0 for source in sources: addon_id = source[0] source = json.loads(source[1]) if kodi.get_condition_visiblity("System.HasAddon(%s)" % addon_id): if source['type'] == SOURCES.ZIP: url, filename, full_name, version = github_api.find_zip( source['user'], addon_id) if LooseVersion(version) > LooseVersion(source['version']): GitHub_Installer(addon_id, url, full_name, kodi.vfs.join("special://home", "addons"), False, quiet) update_count += 1 elif source['type'] == SOURCES.REPO: full_name = sources['user'] + '/' + sources['repo'] xml_str = github_api.find_xml(full_name) xml = BeautifulSoup(xml_str) addon = xml.find('addon') if LooseVersion(addon['version']) > LooseVersion( source['version']): GitHub_Installer(addon_id, source['url'], full_name, kodi.vfs.join("special://home", "addons"), True, quiet) update_count += 1 if not quiet: kodi.close_busy_dialog() if update_count > 0: kodi.notify("Update complete", 'Some addons may require restarting kodi.') else: kodi.notify("Update complete", 'No updates found.')
def update_addons(quiet=True): from distutils.version import LooseVersion if not quiet: kodi.open_busy_dialog() sources = DB.query("SELECT addon_id, source FROM install_history") update_count = 0 for source in sources: addon_id = source[0] source = json.loads(source[1]) if kodi.get_condition_visiblity("System.HasAddon(%s)" % addon_id): if source['type'] == SOURCES.ZIP: url, filename, full_name, version = github_api.find_zip(source['user'], addon_id) if LooseVersion(version) > LooseVersion(source['version']): GitHub_Installer(addon_id, url, full_name, kodi.vfs.join("special://home", "addons"), False, quiet) update_count += 1 elif source['type'] == SOURCES.REPO: full_name = sources['user'] + '/' + sources['repo'] xml_str = github_api.find_xml(full_name) xml = BeautifulSoup(xml_str) addon = xml.find('addon') if LooseVersion(addon['version']) > LooseVersion(source['version']): GitHub_Installer(addon_id, source['url'], full_name, kodi.vfs.join("special://home", "addons"), True, quiet) update_count += 1 if not quiet: kodi.close_busy_dialog() if update_count > 0: kodi.notify("Update complete",'Some addons may require restarting kodi.') else: kodi.notify("Update complete",'No updates found.')
def browse_repository(url): import requests, zipfile, StringIO from libs.BeautifulSoup import BeautifulSoup r = requests.get(url, stream=True) zip_ref = zipfile.ZipFile(StringIO.StringIO(r.content)) for f in zip_ref.namelist(): if f.endswith('addon.xml'): xml = BeautifulSoup(zip_ref.read(f)) url = xml.find('info').text xml = BeautifulSoup(requests.get(url).text) return xml return False
def browse_repository(url): import requests, zipfile, StringIO from libs.BeautifulSoup import BeautifulSoup r = requests.get(url, stream=True) zip_ref = zipfile.ZipFile(StringIO.StringIO(r.content)) for f in zip_ref.namelist(): if f.endswith('addon.xml'): xml = BeautifulSoup(zip_ref.read(f)) url = xml.find('info').text xml=BeautifulSoup(requests.get(url).text) return xml return False
def parseHTML(self, html): """parse given html returns dictionary with keys: IM, Forum, Alt Forum, HTML, Direct, Forum Thumb, Alt Forum Thumb, HTML Thumb, Twitter Link and basic links""" soup = BeautifulSoup(html) textarea=soup.find('textarea').contents[0] text = BeautifulSoup(textarea) link=text.find('a') imageLink = link['href'] thumbLink = link.find('img')['src'] adLink = link['href'] links = createLinks(adLink, None, imageLink,thumbLink) return links, [imageLink, thumbLink, adLink]
def parseHTML(self, html): """parse given html returns dictionary with keys: IM, Forum, Alt Forum, HTML, Direct, Forum Thumb, Alt Forum Thumb, HTML Thumb, Twitter Link""" soup = BeautifulSoup(html) link=soup.find('table').findNext('table').findNext('table').tr.findNext('table').findNext('table').tr.td.a adLink=link['href'] thumbLink=link.img['src'] directhtmlinput=soup.find(attrs={'name' : 'directhtml'}) alink=BeautifulSoup(directhtmlinput['value']) imageLink=alink.find('img')['src'] links = createLinks(adLink, None, imageLink,thumbLink) return links, [imageLink, thumbLink, adLink]
def handleDetail(cls, html): detail = {} cls.soup = BeautifulSoup(html) liTags = cls.soup.findAll('li') for tag in liTags: text = ''.join(tag.findAll(text=True)) #print text.split(cls.ITEM_SEPARATOR_DETAIL)[1] item = text.split(cls.ITEM_SEPARATOR_DETAIL) key = item[0].encode('utf8') if key in cls.ITEM_NAME_KV_DETAIL.keys(): dbkey = cls.ITEM_NAME_KV_DETAIL[key] detail[dbkey] = item[1] return detail
def get(self): shareURL = urllib.unquote(self.request.get('u')) shareSelection = urllib.unquote(self.request.get('s')) if shareURL.startswith('https'): path = os.path.join(os.path.dirname(__file__), '../views/bookmarklet-exit.html') self.response.out.write(template.render(path,{'message':'Oops! This is a secure page :('})) shareURLParts = urlparse.urlparse(shareURL) if shareURLParts[2] != '': shareURLDir = re.search('(/.*)',shareURLParts[2]).group(0) else: shareURLDir = '' page = urlfetch.fetch(shareURL) pageSoup = BeautifulSoup(page.content) try: shareTitle = pageSoup.html.head.title.string except AttributeError: shareTitle = urllib.unquote(self.request.get('t')) pageImgs = pageSoup.findAll('img') for image in pageImgs: image['src'] = urlparse.urljoin(shareURL, image['src']) template_values = { 'url': shareURL, 'title': shareTitle, 'selection': shareSelection, 'images': pageImgs } # We get the template path then show it path = os.path.join(os.path.dirname(__file__), '../views/bookmarklet.html') self.response.out.write(template.render(path, template_values))
def parseHTML(self, html): """parse given xml returns dictionary with keys: IM, Forum, Alt Forum, HTML, Direct, Forum Thumb, Alt Forum Thumb, HTML Thumb, Twitter Link""" soup = BeautifulSoup(html) alink=soup.find('table').tr.td.a adLink=alink["href"] inputimg=soup.find('table').findNext('table').tr.td.input imageLink=inputimg['value'] inputthumb=soup.find('table').findNext('table').tr.findNext('tr').td.input tlink=BeautifulSoup(inputthumb['value']) thumbLink=tlink.find('img')['src'] links = createLinks(adLink, None, imageLink,thumbLink) return links, [imageLink, thumbLink, adLink]
def build_dependency_list(self, addon_id, url, full_name, master): #if test in ['xbmc.python', 'xbmc.gui'] or kodi.get_condition_visiblity('System.HasAddon(%s)' % addon_id) == 1: return True user, repo = full_name.split("/") kodi.log('Finding dependencies for: %s' % addon_id) if master: self.sources[addon_id] = { "type": SOURCES.REPO, "url": url, "user": user, "repo": repo, "version": "" } xml_str = github_api.find_xml(full_name) self.sources[addon_id]['version'] = github_api.get_version_by_xml( BeautifulSoup(xml_str)) else: version = downloader.download(url, addon_id, self._destination, True, self.quiet) src_file = kodi.vfs.join("special://home/addons", addon_id) kodi.vfs.join(src_file, "addon.xml") xml = kodi.vfs.read_file(kodi.vfs.join(src_file, "addon.xml"), soup=True) self.save_source( addon_id, { "type": SOURCES.ZIP, "url": url, "user": user, "repo": repo, "version": version }) for dep in xml.findAll('import'): test = dep['addon'] try: if dep['optional'].lower() == 'true': if kodi.get_setting('install_optional') == 'false': continue elif kodi.get_setting('prompt_optional') == "true": c = kodi.dialog_confirm("Install Optional Dependency", dep['name'], dep['addon']) if not c: continue except: pass if test in ['xbmc.python', 'xbmc.gui' ] or kodi.get_condition_visiblity( 'System.HasAddon(%s)' % test) == 1: kodi.log('Dependency is already installed: %s' % test) continue self.required_addons += [test] if test not in self.available_addons: self.unmet_addons += [test] else: self.sources[test] = { "type": SOURCES.DEFAULT, "url": self.source_table[test] } kodi.log("%s dependency met in %s" % (test, self.source_table[test])) def user_resolver(user, unmet): dep_url, dep_filename, dep_full_name, version = github_api.find_zip( user, unmet) if dep_url: kodi.log("%s found in %s repo" % (unmet, user)) self.met_addons.append(unmet) user, repo = dep_full_name.split("/") self.sources[unmet] = { "type": SOURCES.ZIP, "url": dep_url, "user": user, "repo": repo, "version": "" } kodi.log("%s dependency met in %s" % (unmet, dep_url)) return True return False def github_resolver(unmet): results = github_api.web_search(unmet) c = kodi.dialog_select("GitHub Search Results for %s" % unmet, [r['full_name'] for r in results['items']]) if c is not False: dep = results['items'][c] dep_url = url = "https://github.com/%s/archive/master.zip" % ( dep['full_name']) self.met_addons.append(unmet) dep_filename = "%s.zip" % unmet self.sources[unmet] = { "type": SOURCES.REPO, "url": dep_url, "user": user, "repo": repo, "version": "" } kodi.log("%s dependency met in %s" % (unmet, dep_url)) self.install_addon(unmet, dep_url, dep['full_name'], master=True) return True return False for unmet in self.unmet_addons: # Now attempt to locate dependencies from available sources # The addons that can be found in any enabled repos will be installed at the end. # check if this exists in users root repo if kodi.get_setting('source_user') == 'true': if user_resolver(user, unmet): continue # check if this exists in tva root repo if kodi.get_setting('source_tva') == 'true': if user_resolver(tva_user, unmet): continue # check if this exists on github if kodi.get_setting('source_github') == 'true': if github_resolver(unmet): continue self.unmet_addons = list(set(self.unmet_addons) - set(self.met_addons)) if len(self.unmet_addons): self.install_error = True if not self.quiet: kodi.close_busy_dialog() kodi.raise_error("", "Unmet Dependencies:", "See log or install manually", ','.join(self.unmet_addons)) kodi.log("Unmet Dependencies for addon install: %s" % self.addon_id) kodi.log(','.join(self.unmet_addons)) self.completed.append(addon_id)
if __name__ == '__main__': if len(sys.argv) != 2: print "should pass the inputfilename" sys.exit() for url in file(sys.argv[1]): # ignore comments url = url.strip() if url[0] == '#': continue #print "Processing: " + url html = urllib2.urlopen(url.strip()) soup = BeautifulSoup(html) res = soup.findAll('link', rel='alternate', attrs={'type': re.compile("^application/(atom|rss)\+xml")}) if len(res) == 0: #print "Couldn't find the Feed!" continue href = res[0]['href'] # relative link? if not href.startswith("http"): link = urljoin(url, href) else: link = href print link