def get(self, *args):
        self.response.headers["Content-Type"] = "application/json; charset=UTF-8"
        url_str = "http://www.mangareader.net" + self.request.path

        saved, age = JsonDump.get_page(url_str)
        if saved:
            age = (datetime.utcnow().date() - saved.last_modified).total_seconds()

        if saved and age < 60 * 60:
            self.response.out.write(saved.content)
            return

        mangareader = urllib.urlopen(url_str)
        HTML = mangareader.read()
        soup = BeautifulSoup.BeautifulSoup(HTML)
        array = []
        for chicodiv in soup.findAll(attrs={"class": "chico_manga"}):
            tableId = chicodiv.parent.parent.parent.parent["id"]
            if tableId != "chapterlist":
                continue

            anchor = chicodiv.parent.a
            name = str(anchor.string)
            path = anchor["href"]
            dict = {"name": name, "path": path}
            array.append(dict)

        json_text = json.dumps(array)
        if not saved:
            saved = JsonDump.JsonDump(url=url_str, content=json_text)
        else:
            saved.content = json_text

        JsonDump.age_set(url_str, saved)
        self.response.out.write(json_text)
Beispiel #2
0
    def get(self):
        self.response.headers['Content-Type'] = 'application/json; charset=UTF-8'

        url_str = "http://www.mangareader.net"
        saved, age = JsonDump.get_page(url_str)
        if saved:
            age = (datetime.utcnow().date() - saved.last_modified).total_seconds()

        if saved and age < 60 * 60:
            self.response.out.write(saved.content)
            return

        mangareader = urllib.urlopen(url_str)
        HTML = mangareader.read()
        soup = BeautifulSoup.BeautifulSoup(HTML)
        array = []
        for popular in soup.findAll(attrs={"class":"popularitemcaption"}):

            path = str(popular['href'])
            name = str(popular.string)
            dict = {"name":name, "path":path}
            array.append(dict)
            if len(array) == 5:
                break

        json_text = json.dumps(array)
        if not saved:
            saved = JsonDump.JsonDump(url = url_str, content = json_text)
        else:
            saved.content = json_text

        JsonDump.age_set(url_str, saved)
        self.response.out.write(json_text)
    def get(self, *args):
        self.response.headers['Content-Type'] = 'application/json; charset=UTF-8'
        url_str = "http://www.mangareader.net" + self.request.path

        saved, age = JsonDump.get_page(url_str)
        if saved:
            self.response.out.write(saved.content)
            return

        mangareader = urllib.urlopen(url_str)
        HTML = mangareader.read()
        soup = BeautifulSoup.BeautifulSoup(HTML)
        pages = []
        menu = soup.find(attrs={"id":"pageMenu"})
        error = ""
        for option in menu.contents:
            try:
                url_str =  "http://www.mangareader.net" + option['value']
                page = urllib.urlopen(url_str)
                page_html = page.read()
                page_soup = BeautifulSoup.BeautifulSoup(page_html)
                img = page_soup.find(attrs={"id":"img"})['src']
                dict = {"index":int(option.string), "image_url":img}
                pages.append(dict)
            except TypeError:
                error = "TypeError"


        chapter = {"page_count":pages, "pages":pages}

        json_text = json.dumps(pages)
        saved = JsonDump.JsonDump(url = url_str, content = json_text)
        JsonDump.age_set(url_str, saved)

        self.response.out.write(saved.content)
Beispiel #4
0
    def get(self):
        JsonDump.flush()
        self.response.headers['Content-Type'] = 'application/json; charset=UTF-8'
        url_str = "http://www.mangareader.net/alphabetical"
        saved, age = JsonDump.get_page(url_str)
        if saved:
            age = (datetime.utcnow().date() - saved.last_modified).total_seconds()

        if saved and age < 60 * 60:
            self.response.out.write(saved.content)
            return

        mangareader = urllib.urlopen(url_str)
        HTML = mangareader.read()
        soup = BeautifulSoup.BeautifulSoup(HTML)
        array = []
        for letter in soup.findAll("ul",attrs={"class":"series_alpha"}):
            for series in letter.contents:
                try:
                    path = str(series.a['href'])
                    name = str(series.a.string)
                    dict = {"name":name, "path":path}
                    array.append(dict)
                except KeyError:
                    continue
                except AttributeError:
                    continue

        json_text = json.dumps(array)
        saved = JsonDump.JsonDump(url=url_str, content=json_text)
        if not saved:
            saved = JsonDump.JsonDump(url = url_str, content = json_text)
        else:
            saved.content = json_text

        JsonDump.age_set(url_str, saved)
        self.response.out.write(json_text)