def browse(self): url = self.browse_url % (utils.url_root, urllib.quote_plus( self.sort), self.current_page, urllib.quote_plus( self.search_term), utils.selected_languages()) html_data = http_request.get(url) soup_strainer = SoupStrainer("main") beautiful_soup = BeautifulSoup( html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) ul_entries = beautiful_soup.find("ol", {"class": "authorsByLetter"}) if ul_entries is None: control.directory_end() return li_entries = ul_entries.findAll("li") for li_entry in li_entries: self.add_author_directory(li_entry) if self.search_term == "": next_url = "%s?action=browse-authors&page=%i&sort=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.sort_method)) else: next_url = "%s?action=search-authors&page=%i&query=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.search_term)) print next_url utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return
def live(self): url = self.browse_url % (utils.url_root, urllib.quote_plus(self.sort), self.current_page, utils.selected_languages()) html_data = http_request.get(url) print url soup_strainer = SoupStrainer("div", {"class": "tab-content"}) beautiful_soup = BeautifulSoup(html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) ul_entries = beautiful_soup.find("ul", {"class": "entries"}) if ul_entries is None: # nothing is live control.directory_end() return li_entries = ul_entries.findAll("li") for li_entry in li_entries: action_url = ("%s?action=list-event&event-url=" % (sys.argv[0])) + "%s" utils.add_show_directory(li_entry, action_url) next_url = "%s?action=browse-shows&page=%i&sort=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.sort_method)) utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return
def list(self): url = "%s/%s/posts?page=%u&%s" % (utils.url_root, self.author_url, self.current_page, utils.selected_languages()) html_data = http_request.get(url) soup_strainer = SoupStrainer("main") beautiful_soup = BeautifulSoup( html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) articles = beautiful_soup.findAll("article") if articles is None: control.directory_end() return for article in articles: utils.add_entry_video(article) next_url = "%s?action=list-author&page=%i&author-url=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.author_url)) utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return
def browse(self): threads = utils.get_board(self.board, self.page)["threads"] for t in threads: post = t["posts"][0] if int(post["images"]) <= 0: continue if "sub" in post: title = "%s | images: %s" % (post["sub"], post["images"] + 1) elif "com" in post: title = "%s | images: %s" % (post["com"], post["images"] + 1) else: title = "%s | images: %s" % (post["filename"], post["images"] + 1) icon = utils.get_thumb_url(self.board, post["tim"]) utils.add_directory(title, icon, icon, "%s?action=thread&board=%s&id=%s" % (sys.argv[0], urllib.quote_plus(self.board), post["no"])) if self.page < self.totalPages: next_page = self.page + 1 utils.add_next_page("%s?action=%s&page=%s&total_pages=%s&board=%s" % (sys.argv[0], self.action, next_page, self.totalPages, self.board), next_page) control.directory_end() return
def browse(self): url = self.browse_url % ( utils.url_root, urllib.quote_plus(self.sort), self.current_page, urllib.quote_plus(self.search_term), utils.selected_languages()) html_data = http_request.get(url) soup_strainer = SoupStrainer("div", {"class": "tab-content"}) beautiful_soup = BeautifulSoup(html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) ul_authors = beautiful_soup.find("ul", {"class": "authors"}) if ul_authors is None: control.directory_end() return li_entries = ul_authors.findAll("li") for li_entry in li_entries: self.add_author_directory(li_entry) if self.search_term == "": next_url = "%s?action=browse-authors&page=%i&sort=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.sort_method)) else: next_url = "%s?action=search-authors&page=%i&query=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.search_term)) print next_url utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return
def browse_group(self): skip = (self.current_page - 1) * self.per_page take = self.per_page select_filter = utils.create_filter_criteria(self.section, self.group) results_count = utils.get_course_meta( select_filter)["totalResultCount"] courses = utils.get_course_data(select_filter, skip, take) utils.add_directory( utils.text_green % control.lang(30501), utils.icon_search, utils.icon_search, "%s?action=search§ion=%s&group=%s" % (sys.argv[0], self.section, self.group)) for course in courses: name = course["courseName"].encode('utf-8') thumb = course["courseImage"] if thumb is None or thumb == '': thumb = self.icon cid = course["id"] utils.add_directory( name, thumb, thumb, "%s?action=view-course&id=%s&url=%s" % (sys.argv[0], cid, utils.url_course % cid)) has_more = (results_count - (skip + self.per_page)) > 0 if has_more: next_url = "%s?action=browse-group&group=%s§ion=%s&page=%i" % ( sys.argv[0], urllib.quote_plus(self.group, safe=':/'), urllib.quote_plus(self.section, safe=':/'), self.current_page + 1) utils.add_next_page(next_url, self.current_page + 1) control.directory_end(False) return
def browse_group(self): skip = (self.current_page - 1) * self.per_page take = self.per_page select_filter = utils.create_filter_criteria(self.section, self.group) results_count = utils.get_course_meta(select_filter)["totalResultCount"] courses = utils.get_course_data(select_filter, skip, take) utils.add_directory(utils.text_green % control.lang(30501), utils.icon_search, utils.icon_search, "%s?action=search§ion=%s&group=%s" % (sys.argv[0], self.section, self.group)) for course in courses: name = course["courseName"].encode('utf-8') thumb = course["courseImage"] if thumb is None or thumb == '': thumb = self.icon cid = course["id"] utils.add_directory(name, thumb, thumb, "%s?action=view-course&id=%s&url=%s" % (sys.argv[0], cid, utils.url_course % cid)) has_more = (results_count - (skip+self.per_page)) > 0 if has_more: next_url = "%s?action=browse-group&group=%s§ion=%s&page=%i" % ( sys.argv[0], urllib.quote_plus(self.group, safe=':/'), urllib.quote_plus(self.section, safe=':/'), self.current_page + 1) utils.add_next_page(next_url, self.current_page + 1) control.directory_end(False) return
def search(self): if self.search_term is None or self.search_term == '': t = control.lang(30201).encode('utf-8') k = control.keyboard('', t) k.doModal() self.search_term = k.getText() if k.isConfirmed() else None if self.search_term is None or self.search_term == '': return skip = (self.current_page-1) * self.per_page take = self.per_page select_filter = utils.create_filter_criteria(self.section, self.group, self.search_term) results_count = utils.get_course_meta(select_filter)["totalResultCount"] courses = utils.get_course_data(select_filter, skip, take) for course in courses: name = course["courseName"].encode('utf-8') thumb = course["courseImage"] if thumb is None or thumb == '': thumb = self.icon cid = course["id"] utils.add_directory(name, thumb, thumb, "%s?action=view-course&id=%s&url=%s" % (sys.argv[0], cid, utils.url_course % cid)) has_more = (results_count - (skip+self.per_page)) > 0 if has_more: next_url = "%s?action=search&group=%s§ion=%s&page=%i&query=%s" % ( sys.argv[0], urllib.quote_plus(self.group, safe=':/'), urllib.quote_plus(self.section, safe=':/'), self.current_page + 1, self.search_term) utils.add_next_page(next_url, self.current_page + 1) control.directory_end(False) return
def browse(self): url = self.browse_url % (utils.url_root, urllib.quote_plus( self.sort), self.current_page, utils.selected_languages()) html_data = http_request.get(url) soup_strainer = SoupStrainer("main") beautiful_soup = BeautifulSoup( html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) articles = beautiful_soup.findAll("article") if articles is None: control.directory_end() return for article in articles: action_url = ("%s?action=list-show&show-url=" % (sys.argv[0])) + "%s" utils.add_show_directory(article, action_url) next_url = "%s?action=browse-shows&page=%i&sort=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.sort_method)) utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return
def list(self): url = "" if not re.match("^https?:", self.event_url): url = "%s%s" % (utils.url_root, self.event_url) else: url = self.event_url url = "%s?sort=%s&page=%i&direction=asc&%s" % ( url, self.sort, self.current_page, utils.selected_languages()) html_data = http_request.get(url) soup_strainer = SoupStrainer("main") beautiful_soup = BeautifulSoup( html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) articles = beautiful_soup.findAll("article") if articles is None: control.directory_end() return for article in articles: utils.add_entry_video(article) next_url = "%s?action=list-event&page=%i&sort=%s&event-url=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus( self.sort_method), urllib.quote_plus(self.event_url)) utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return
def browse_gallery(self): epoch_time = int(time.time()) api = imgur.Api() data = api.get_random_gallery(self.page) for g in data["galleries"]: utils.add_gallery_item(g) next_page = self.page + 1 utils.add_next_page( "%s%s?action=random&page=%s" % (sys.argv[0], epoch_time, next_page), next_page + 1) control.directory_end(force_thumb=True)
def list(self): url = "%s%s?sort=%s&page=%i&%s" % ( utils.url_root, self.series_url, self.sort, self.current_page, utils.selected_languages()) html_data = http_request.get(url) print url soup_strainer = SoupStrainer("main") beautiful_soup = BeautifulSoup(html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) articles = beautiful_soup.findAll("article") for article in articles: utils.add_entry_video(article) next_url = "%s?action=list-series&page=%i&sort=%s&series-url=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.sort_method), urllib.quote_plus(self.series_url)) utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return
def list(self): url = "%s%s?sort=%s&page=%i&%s" % ( utils.url_root, self.series_url, self.sort, self.current_page, utils.selected_languages()) html_data = http_request.get(url) print url soup_strainer = SoupStrainer("div", {"class": "tab-content"}) beautiful_soup = BeautifulSoup(html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) ul_entries = beautiful_soup.find("ul", {"class": "entries"}) li_entries = ul_entries.findAll("li") for li_entry in li_entries: utils.add_entry_video(li_entry) next_url = "%s?action=list-series&page=%i&sort=%s&series-url=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.sort_method), urllib.quote_plus(self.series_url)) utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return
def browse_gallery(self, gallery_type=None): epoch_time = int(time.time()) api = imgur.Api() if gallery_type == 'reddit': data = api.get_reddit_gallery(self.section, self.sort, "all", self.page) else: data = api.get_gallery(self.section, self.sort, "all", self.page, False) for g in data["galleries"]: utils.add_gallery_item(g) next_page = self.page + 1 utils.add_next_page( "%s%s?action=gallery&page=%s§ion=%s&sort=%s&type=%s" % (sys.argv[0], epoch_time, next_page, self.section, self.sort, self.gallery_type), next_page + 1) control.directory_end(force_thumb=True)
def list(self): url = "%s/%s/posts?page=%u&%s" % ( utils.url_root, self.author_url, self.current_page, utils.selected_languages()) html_data = http_request.get(url) soup_strainer = SoupStrainer("div", {"class": "user-content"}) beautiful_soup = BeautifulSoup(html_data, soup_strainer, convertEntities=BeautifulSoup.HTML_ENTITIES) ul_entries = beautiful_soup.find("ul", {"class": "entries"}) if ul_entries is None: control.directory_end() return li_entries = ul_entries.findAll("li") for li_entry in li_entries: utils.add_entry_video(li_entry) next_url = "%s?action=list-author&page=%i&author-url=%s" % ( sys.argv[0], self.current_page + 1, urllib.quote_plus(self.author_url)) utils.add_next_page(beautiful_soup, next_url, self.current_page + 1) control.directory_end() return