コード例 #1
0
 def clear(self, domain=None, path=None, name=None):
     CookieJar.clear(self, domain, path, name)
     where_parts = []
     sql_params = []
     if domain is not None:
         where_parts.append("host = ?")
         sql_params.append(domain)
         if path is not None:
             where_parts.append("path = ?")
             sql_params.append(path)
             if name is not None:
                 where_parts.append("name = ?")
                 sql_params.append(name)
     where = " AND ".join(where_parts)
     if where:
         where = " WHERE " + where
     def clear(cur):
         cur.execute("DELETE FROM moz_cookies%s" % where,
                     tuple(sql_params))
     self._transaction(clear)
コード例 #2
0
ファイル: browser.py プロジェクト: fakegit/webpy
class Browser(object):
    def __init__(self):
        self.cookiejar = CookieJar()
        self._cookie_processor = HTTPCookieProcessor(self.cookiejar)
        self.form = None

        self.url = "http://0.0.0.0:8080/"
        self.path = "/"

        self.status = None
        self.data = None
        self._response = None
        self._forms = None

    @property
    def text(self):
        return self.data.decode('utf-8')

    def reset(self):
        """Clears all cookies and history."""
        self.cookiejar.clear()

    def build_opener(self):
        """Builds the opener using (urllib2/urllib.request).build_opener.
        Subclasses can override this function to prodive custom openers.
        """
        return urllib_build_opener()

    def do_request(self, req):
        if DEBUG:
            print('requesting', req.get_method(), req.get_full_url())

        opener = self.build_opener()
        opener.add_handler(self._cookie_processor)
        try:
            self._response = opener.open(req)
        except HTTPError as e:
            self._response = e

        self.url = self._response.geturl()
        self.path = get_selector(Request(self.url))
        self.data = self._response.read()
        self.status = self._response.code
        self._forms = None
        self.form = None

        return self.get_response()

    def open(self, url, data=None, headers={}):
        """Opens the specified url."""
        url = urljoin(self.url, url)
        req = Request(url, data, headers)

        return self.do_request(req)

    def show(self):
        """Opens the current page in real web browser."""
        f = open('page.html', 'w')
        f.write(self.data)
        f.close()

        url = 'file://' + os.path.abspath('page.html')
        webbrowser.open(url)

    def get_response(self):
        """Returns a copy of the current response."""
        return addinfourl(BytesIO(self.data), self._response.info(), self._response.geturl())

    def get_soup(self):
        """Returns beautiful soup of the current document."""
        import BeautifulSoup
        return BeautifulSoup.BeautifulSoup(self.data)

    def get_text(self, e=None):
        """Returns content of e or the current document as plain text."""
        e = e or self.get_soup()
        return ''.join([htmlunquote(c) for c in e.recursiveChildGenerator()
                       if isinstance(c, text_type)])

    def _get_links(self):
        soup = self.get_soup()
        return [a for a in soup.findAll(name='a')]

    def get_links(self, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
        """Returns all links in the document."""
        return self._filter_links(self._get_links(),
            text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)

    def follow_link(self, link=None, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
        if link is None:
            links = self._filter_links(self.get_links(),
                text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)
            link = links and links[0]

        if link:
            return self.open(link['href'])
        else:
            raise BrowserError("No link found")

    def find_link(self, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
        links = self._filter_links(self.get_links(),
            text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)
        return links and links[0] or None

    def _filter_links(self, links,
            text=None, text_regex=None,
            url=None, url_regex=None,
            predicate=None):
        predicates = []
        if text is not None:
            predicates.append(lambda link: link.string == text)
        if text_regex is not None:
            predicates.append(lambda link: re_compile(text_regex).search(link.string or ''))
        if url is not None:
            predicates.append(lambda link: link.get('href') == url)
        if url_regex is not None:
            predicates.append(lambda link: re_compile(url_regex).search(link.get('href', '')))
        if predicate:
            predicate.append(predicate)

        def f(link):
            for p in predicates:
                if not p(link):
                    return False
            return True

        return [link for link in links if f(link)]

    def get_forms(self):
        """Returns all forms in the current document.
        The returned form objects implement the ClientForm.HTMLForm interface.
        """
        if self._forms is None:
            import ClientForm
            self._forms = ClientForm.ParseResponse(self.get_response(), backwards_compat=False)
        return self._forms

    def select_form(self, name=None, predicate=None, index=0):
        """Selects the specified form."""
        forms = self.get_forms()

        if name is not None:
            forms = [f for f in forms if f.name == name]
        if predicate:
            forms = [f for f in forms if predicate(f)]

        if forms:
            self.form = forms[index]
            return self.form
        else:
            raise BrowserError("No form selected.")

    def submit(self, **kw):
        """submits the currently selected form."""
        if self.form is None:
            raise BrowserError("No form selected.")
        req = self.form.click(**kw)
        return self.do_request(req)

    def __getitem__(self, key):
        return self.form[key]

    def __setitem__(self, key, value):
        self.form[key] = value
コード例 #3
0
ファイル: _models.py プロジェクト: stjordanis/httpx
class Cookies(MutableMapping):
    """
    HTTP Cookies, as a mutable mapping.
    """

    def __init__(self, cookies: CookieTypes = None) -> None:
        if cookies is None or isinstance(cookies, dict):
            self.jar = CookieJar()
            if isinstance(cookies, dict):
                for key, value in cookies.items():
                    self.set(key, value)
        elif isinstance(cookies, Cookies):
            self.jar = CookieJar()
            for cookie in cookies.jar:
                self.jar.set_cookie(cookie)
        else:
            self.jar = cookies

    def extract_cookies(self, response: Response) -> None:
        """
        Loads any cookies based on the response `Set-Cookie` headers.
        """
        urlib_response = self._CookieCompatResponse(response)
        urllib_request = self._CookieCompatRequest(response.request)

        self.jar.extract_cookies(urlib_response, urllib_request)  # type: ignore

    def set_cookie_header(self, request: Request) -> None:
        """
        Sets an appropriate 'Cookie:' HTTP header on the `Request`.
        """
        urllib_request = self._CookieCompatRequest(request)
        self.jar.add_cookie_header(urllib_request)

    def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None:
        """
        Set a cookie value by name. May optionally include domain and path.
        """
        kwargs = {
            "version": 0,
            "name": name,
            "value": value,
            "port": None,
            "port_specified": False,
            "domain": domain,
            "domain_specified": bool(domain),
            "domain_initial_dot": domain.startswith("."),
            "path": path,
            "path_specified": bool(path),
            "secure": False,
            "expires": None,
            "discard": True,
            "comment": None,
            "comment_url": None,
            "rest": {"HttpOnly": None},
            "rfc2109": False,
        }
        cookie = Cookie(**kwargs)  # type: ignore
        self.jar.set_cookie(cookie)

    def get(  # type: ignore
        self, name: str, default: str = None, domain: str = None, path: str = None
    ) -> typing.Optional[str]:
        """
        Get a cookie by name. May optionally include domain and path
        in order to specify exactly which cookie to retrieve.
        """
        value = None
        for cookie in self.jar:
            if cookie.name == name:
                if domain is None or cookie.domain == domain:
                    if path is None or cookie.path == path:
                        if value is not None:
                            message = f"Multiple cookies exist with name={name}"
                            raise CookieConflict(message)
                        value = cookie.value

        if value is None:
            return default
        return value

    def delete(self, name: str, domain: str = None, path: str = None) -> None:
        """
        Delete a cookie by name. May optionally include domain and path
        in order to specify exactly which cookie to delete.
        """
        if domain is not None and path is not None:
            return self.jar.clear(domain, path, name)

        remove = []
        for cookie in self.jar:
            if cookie.name == name:
                if domain is None or cookie.domain == domain:
                    if path is None or cookie.path == path:
                        remove.append(cookie)

        for cookie in remove:
            self.jar.clear(cookie.domain, cookie.path, cookie.name)

    def clear(self, domain: str = None, path: str = None) -> None:
        """
        Delete all cookies. Optionally include a domain and path in
        order to only delete a subset of all the cookies.
        """
        args = []
        if domain is not None:
            args.append(domain)
        if path is not None:
            assert domain is not None
            args.append(path)
        self.jar.clear(*args)

    def update(self, cookies: CookieTypes = None) -> None:  # type: ignore
        cookies = Cookies(cookies)
        for cookie in cookies.jar:
            self.jar.set_cookie(cookie)

    def __setitem__(self, name: str, value: str) -> None:
        return self.set(name, value)

    def __getitem__(self, name: str) -> str:
        value = self.get(name)
        if value is None:
            raise KeyError(name)
        return value

    def __delitem__(self, name: str) -> None:
        return self.delete(name)

    def __len__(self) -> int:
        return len(self.jar)

    def __iter__(self) -> typing.Iterator[str]:
        return (cookie.name for cookie in self.jar)

    def __bool__(self) -> bool:
        for _ in self.jar:
            return True
        return False

    class _CookieCompatRequest(urllib.request.Request):
        """
        Wraps a `Request` instance up in a compatibility interface suitable
        for use with `CookieJar` operations.
        """

        def __init__(self, request: Request) -> None:
            super().__init__(
                url=str(request.url),
                headers=dict(request.headers),
                method=request.method,
            )
            self.request = request

        def add_unredirected_header(self, key: str, value: str) -> None:
            super().add_unredirected_header(key, value)
            self.request.headers[key] = value

    class _CookieCompatResponse:
        """
        Wraps a `Request` instance up in a compatibility interface suitable
        for use with `CookieJar` operations.
        """

        def __init__(self, response: Response):
            self.response = response

        def info(self) -> email.message.Message:
            info = email.message.Message()
            for key, value in self.response.headers.items():
                info[key] = value
            return info
コード例 #4
0
class Browser(object):
    def __init__(self):
        self.cookiejar = CookieJar()
        self._cookie_processor = HTTPCookieProcessor(self.cookiejar)
        self.form = None

        self.url = "http://0.0.0.0:8080/"
        self.path = "/"

        self.status = None
        self.data = None
        self._response = None
        self._forms = None

    @property
    def text(self):
        return self.data.decode("utf-8")

    def reset(self):
        """Clears all cookies and history."""
        self.cookiejar.clear()

    def build_opener(self):
        """Builds the opener using (urllib2/urllib.request).build_opener.
        Subclasses can override this function to prodive custom openers.
        """
        return urllib_build_opener()

    def do_request(self, req):
        if DEBUG:
            print("requesting", req.get_method(), req.get_full_url())

        opener = self.build_opener()
        opener.add_handler(self._cookie_processor)
        try:
            self._response = opener.open(req)
        except HTTPError as e:
            self._response = e

        self.url = self._response.geturl()
        self.path = Request(self.url).selector
        self.data = self._response.read()
        self.status = self._response.code
        self._forms = None
        self.form = None

        return self.get_response()

    def open(self, url, data=None, headers={}):
        """Opens the specified url."""
        url = urljoin(self.url, url)
        req = Request(url, data, headers)

        return self.do_request(req)

    def show(self):
        """Opens the current page in real web browser."""
        f = open("page.html", "w")
        f.write(self.data)
        f.close()

        url = "file://" + os.path.abspath("page.html")
        webbrowser.open(url)

    def get_response(self):
        """Returns a copy of the current response."""
        return addinfourl(BytesIO(self.data), self._response.info(),
                          self._response.geturl())

    def get_soup(self):
        """Returns beautiful soup of the current document."""
        import BeautifulSoup

        return BeautifulSoup.BeautifulSoup(self.data)

    def get_text(self, e=None):
        """Returns content of e or the current document as plain text."""
        e = e or self.get_soup()
        return "".join([
            htmlunquote(c) for c in e.recursiveChildGenerator()
            if isinstance(c, str)
        ])

    def _get_links(self):
        soup = self.get_soup()
        return [a for a in soup.findAll(name="a")]

    def get_links(self,
                  text=None,
                  text_regex=None,
                  url=None,
                  url_regex=None,
                  predicate=None):
        """Returns all links in the document."""
        return self._filter_links(
            self._get_links(),
            text=text,
            text_regex=text_regex,
            url=url,
            url_regex=url_regex,
            predicate=predicate,
        )

    def follow_link(
        self,
        link=None,
        text=None,
        text_regex=None,
        url=None,
        url_regex=None,
        predicate=None,
    ):
        if link is None:
            links = self._filter_links(
                self.get_links(),
                text=text,
                text_regex=text_regex,
                url=url,
                url_regex=url_regex,
                predicate=predicate,
            )
            link = links and links[0]

        if link:
            return self.open(link["href"])
        else:
            raise BrowserError("No link found")

    def find_link(self,
                  text=None,
                  text_regex=None,
                  url=None,
                  url_regex=None,
                  predicate=None):
        links = self._filter_links(
            self.get_links(),
            text=text,
            text_regex=text_regex,
            url=url,
            url_regex=url_regex,
            predicate=predicate,
        )
        return links and links[0] or None

    def _filter_links(
        self,
        links,
        text=None,
        text_regex=None,
        url=None,
        url_regex=None,
        predicate=None,
    ):
        predicates = []
        if text is not None:
            predicates.append(lambda link: link.string == text)
        if text_regex is not None:
            predicates.append(
                lambda link: re_compile(text_regex).search(link.string or ""))
        if url is not None:
            predicates.append(lambda link: link.get("href") == url)
        if url_regex is not None:
            predicates.append(lambda link: re_compile(url_regex).search(
                link.get("href", "")))
        if predicate:
            predicate.append(predicate)

        def f(link):
            for p in predicates:
                if not p(link):
                    return False
            return True

        return [link for link in links if f(link)]

    def get_forms(self):
        """Returns all forms in the current document.
        The returned form objects implement the ClientForm.HTMLForm interface.
        """
        if self._forms is None:
            import ClientForm

            self._forms = ClientForm.ParseResponse(self.get_response(),
                                                   backwards_compat=False)
        return self._forms

    def select_form(self, name=None, predicate=None, index=0):
        """Selects the specified form."""
        forms = self.get_forms()

        if name is not None:
            forms = [f for f in forms if f.name == name]
        if predicate:
            forms = [f for f in forms if predicate(f)]

        if forms:
            self.form = forms[index]
            return self.form
        else:
            raise BrowserError("No form selected.")

    def submit(self, **kw):
        """submits the currently selected form."""
        if self.form is None:
            raise BrowserError("No form selected.")
        req = self.form.click(**kw)
        return self.do_request(req)

    def __getitem__(self, key):
        return self.form[key]

    def __setitem__(self, key, value):
        self.form[key] = value
コード例 #5
0
ファイル: myshowsru.py プロジェクト: dimasg/webtools
class MyShowsRu(object):
    """ work with api.myshows.ru """
    def __init__(self, config_name_name):
        cfg_file = file(config_name_name)
        self.config = config.Config(cfg_file)
        logging.info('Config file %s loaded!', config_name_name)

        self.cookie_jar = CookieJar()
        self.opener = build_opener(
            HTTPCookieProcessor(self.cookie_jar)
        )
        self.logged_ = False
        self.list_loaded_ = False
        self.api_url = 'http://' + self.config.api_domain
        self.shows_data = {}
        self.episodes_data = {}
        self.watched_data = {}

    def do_login(self):
        """ authorization """
        if self.logged_:
            return
        try:
            req_data = urllib.urlencode({
                'login': self.config.login.name,
                'password': self.config.login.md5pass
            })
            logging.debug(
                'Login, url: %s%s, data: %s', self.api_url, self.config.url.login, req_data
            )
            request = Request(
                self.api_url + self.config.url.login, req_data
            )
            handle = self.opener.open(request)
            logging.debug('Login result: %s/%s', handle.headers, handle.read())
            self.cookie_jar.clear(
                self.config.api_domain, '/', 'SiteUser[login]'
            )
            self.cookie_jar.clear(
                self.config.api_domain, '/', 'SiteUser[password]'
            )
        except HTTPError as ex:
            if ex.code == 403:
                stderr.write('Bad login name or password!\n')
            else:
                stderr.write('Login error!\n')
            logging.debug('HTTP error #%s: %s\n', ex.code, ex.read())
            exit(1)
        except URLError as ex:
            stderr.write('Login error!\n')
            logging.debug('URLError - %s\n', ex.reason)
            exit(1)

        self.logged_ = True

    def load_shows(self):
        """ load user shows """
        if self.list_loaded_:
            return
        if not self.logged_:
            self.do_login()
        logging.debug('Login: %s%s', self.api_url, self.config.url.list_shows)
        request = Request(
            self.api_url + self.config.url.list_shows
        )
        handle = self.opener.open(request)
        self.shows_data = json.loads(handle.read())
        self.list_loaded_ = True

    def list_all_shows(self):
        """ list all user shows """
        self.load_shows()
        print()
        for show_id in sorted(
            self.shows_data, key=lambda show_id: self.shows_data[show_id]['title']
        ):
            next_show = self.shows_data[show_id]
            if next_show['watchedEpisodes'] <= 0:
                show_sign = '-'
            elif next_show['watchedEpisodes'] < next_show['totalEpisodes']:
                show_sign = '+'
            else:
                show_sign = ' '

            alias = self.alias_by_title(next_show['title'])
            if not alias:
                alias = '-'

            print('{0}{1}({7}): {2}/{3} ({4}%), rating = {5}({6})'.format(
                show_sign,
                tr_out(next_show['title']),
                # next_show['ruTitle'],
                next_show['watchedEpisodes'], next_show['totalEpisodes'],
                100 * next_show['watchedEpisodes'] / next_show['totalEpisodes'],
                next_show['rating'],
                next_show['watchStatus'][0],
                alias
            ))
        print()

    def list_show(self, alias):
        """ list user show by alias """
        re_m = re.match(r'^(.*\D)(\d{1,2}){0,1}$', alias)
        if not re_m:
            print('Bad format for list - "{0}"'.format(alias))
        else:
            season = -1
            if re_m.lastindex == 2:
                season = int(re_m.group(2))
            show_id = self.id_by_title(
                self.title_by_alias(re_m.group(1), no_exit=True)
            )
            epis = self.load_episodes(show_id)
            episodes = epis['episodes']
            list_map = {}
            for epi_id in episodes:
                next_episode = episodes[epi_id]
                if season == -1 or next_episode['seasonNumber'] == season:
                    list_map[
                        next_episode['seasonNumber'] * 1000
                        + next_episode['episodeNumber']
                    ] = next_episode

            watched = self.load_watched(show_id)
            current_season = -1
            for epi_num in sorted(list_map.keys()):
                next_episode = list_map[epi_num]
                next_season = next_episode['seasonNumber']
                if current_season != next_season:
                    current_season = next_season
                    print('{0} Season {1}:'.format(
                        tr_out(epis['title']), current_season
                    ))
                comment = ''
                epi_id = str(next_episode['id'])
                if epi_id in watched:
                    comment = 'watched ' + watched[epi_id]['watchDate']
                print('  "{0}" (s{1:02d}e{2:02d}) {3}'.format(
                    tr_out(next_episode['title']),
                    next_episode['seasonNumber'],
                    next_episode['episodeNumber'],
                    comment
                ))

    def list_shows(self, alias):
        """ list user shows """
        if alias == 'all':
            self.list_all_shows()
        else:
            self.list_show(alias)

    def title_by_alias(self, query, no_exit=False):
        """ return show id by alias """
        logging.debug('title_by_alias(%s)', query)
        alias = query.lower()
        if alias not in self.config.alias:
            logging.debug('Unknown alias - "%s"', alias)
            if no_exit:
                print('Cannot find alias "{0}", will try it as title!'.format(query))
                return query
            else:
                print('Unknown alias - {0}'.format(query))
                exit(1)
        else:
            logging.debug('title_by_alias(%s) = %s', query, self.config.alias[alias])
            return self.config.alias[alias]

    def alias_by_title(self, title):
        """ return show alias by title """
        logging.debug('alias_by_title(%s)', title)
        for alias, a_title in self.config.alias.iteritems():
            if a_title == title:
                return alias

        return ''

    def id_by_title(self, title):
        """ return show id by title """
        logging.debug('id_by_title(%s)', title)
        if not self.list_loaded_:
            self.load_shows()

        for show_id in self.shows_data:
            next_show = self.shows_data[show_id]
            logging.debug('id_by_title(%s) = %s', next_show['title'], show_id)
            if next_show['title'] == title:
                logging.debug('Found id_by_title(%s) = %s', title, show_id)
                return show_id

        print('Unknown title - {0}'.format(title))
        exit(1)

    def load_episodes(self, show_id):
        """ load episode data by show id """
        if not self.logged_:
            self.do_login()
        if show_id not in self.episodes_data:
            logging.debug(
                'Load episodes: %s%s',
                self.api_url, self.config.url.list_episodes.format(show_id)
            )
            request = Request(
                self.api_url + self.config.url.list_episodes.format(show_id)
            )
            handle = self.opener.open(request)
            self.episodes_data[show_id] = json.loads(handle.read())

        return self.episodes_data[show_id]

    def load_watched(self, show_id):
        """ load watched data by show id """
        if not self.logged_:
            self.do_login()
        if show_id not in self.watched_data:
            logging.debug(
                'Load watched: %s%s',
                self.api_url, self.config.url.list_watched.format(show_id)
            )
            request = Request(
                self.api_url + self.config.url.list_watched.format(show_id)
            )
            handle = self.opener.open(request)
            self.watched_data[show_id] = json.loads(handle.read())

        return self.watched_data[show_id]

    def get_last_watched(self, show_id):
        """ return last watched episode id for show id """
        logging.debug('Searching last watched for show %s', show_id)
        episodes = self.load_episodes(show_id)['episodes']
        watched = self.load_watched(show_id)

        last_number = 0
        episode_id = None
        for epi_id in watched:
            logging.debug('Next watched id: %s', epi_id)
            next_episode = episodes[epi_id]
            logging.debug(
                'Trying next episode: %s - %s (id: %s/seq: %s)',
                next_episode['shortName'], next_episode['title'],
                next_episode['id'], next_episode['sequenceNumber']
            )
            if last_number < next_episode['sequenceNumber']:
                last_number = next_episode['sequenceNumber']
                episode_id = epi_id
                logging.debug(
                    'Saved next last episode: %s - %s (id: %s)',
                    next_episode['shortName'], next_episode['title'], episode_id
                )

        logging.debug('Found last watched %s', episode_id)

        return episode_id

    def show_last_watched_by_alias(self, alias):
        """ show last watched episode for alias """
        show_id = self.id_by_title(self.title_by_alias(alias, no_exit=True))
        epis = self.load_episodes(show_id)
        watched = self.load_watched(show_id)
        episode_id = self.get_last_watched(show_id)
        print()
        if episode_id is None:
            print('{0} is unwatched'.format(tr_out(epis['title'])))
        else:
            episode = epis['episodes'][episode_id]
            print('Last for {0} is s{1:02d}e{2:02d} ("{3}") at {4}'.format(
                tr_out(epis['title']),
                episode['seasonNumber'], episode['episodeNumber'],
                tr_out(episode['title']),
                watched[episode_id]['watchDate']
            ))
        print()

    def show_last_watched_by_date(self, alias):
        """ show last watched episode(s) for date """
        date_to = datetime.date.today()
        if alias == 'day':
            date_from = date_to + datetime.timedelta(days=-1)
        elif alias == 'week':
            date_from = date_to + datetime.timedelta(days=-7)
        elif alias == 'month':
            prev_month = date_to.replace(day=1) + datetime.timedelta(days=-1)
            date_from = date_to + datetime.timedelta(days=-prev_month.day)
        else:
            print('Unknown alias - {0}'.format(alias))
            exit(1)

        self.load_shows()
        print()
        print('Watched from {0} to {1}'.format(
            date_from.strftime('%Y-%m-%d'),
            date_to.strftime('%Y-%m-%d')
        ))
        print()
        re_c = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{4})')
        count = 0
        for show_id in self.shows_data:
            next_show = self.shows_data[show_id]
            if next_show['watchedEpisodes'] <= 0:
                continue
            watched = self.load_watched(next_show['showId'])
            epis = None
            last_map = {}
            for epi_id in watched:
                next_episode = watched[epi_id]
                re_m = re_c.match(next_episode['watchDate'])
                if not re_m:
                    print('Warning: unknown date format - {0}'.format(
                        next_episode['watchDate']))
                    continue
                dtv = [int(s) for s in re_m.group(3, 2, 1)]
                epi_date = datetime.date(dtv[0], dtv[1], dtv[2])
                if date_from <= epi_date and epi_date <= date_to:
                    if not epis:
                        epis = self.load_episodes(show_id)
                    count += 1
                    if epi_id not in epis['episodes']:
                        print('Episode not found: {0}'.format(epi_id))
                        logging.debug('Episodes:')
                        logging.debug(epis)
                        continue
                    else:
                        episode = epis['episodes'][epi_id]
                        date_key = epi_date.toordinal() * 1000\
                            + episode['seasonNumber'] * 10\
                            + episode['episodeNumber']
                        last_map[date_key] = episode

            for date_key in sorted(last_map.keys()):
                episode = last_map[date_key]
                print('{0} s{1:02d}e{2:02d} "{3}" at {4}'.format(
                    tr_out(epis['title']),
                    episode['seasonNumber'], episode['episodeNumber'],
                    tr_out(episode['title']),
                    watched[str(episode['id'])]['watchDate']
                ))
        print()
        print('Total count: {0}'.format(count))
        print()

    def show_last_watched(self, query):
        """ show last watched episode(s) """
        alias = query.lower()
        if alias in ['day', 'week', 'month']:
            self.show_last_watched_by_date(alias)
        else:
            self.show_last_watched_by_alias(query)

    def get_first_unwatched(self, show_id):
        """ return first unwathced episode for show id """
        logging.debug('Searching first unwatched for show %s', show_id)
        episodes = self.load_episodes(show_id)['episodes']
        last_watched = self.get_last_watched(show_id)
        if last_watched is None:
            last_watched = 0
        else:
            logging.debug(
                'Last watched is: %s - %s (%s)',
                episodes[last_watched]['shortName'], episodes[last_watched]['title'],
                episodes[last_watched]['sequenceNumber']
            )
            last_watched = episodes[last_watched]['sequenceNumber']

        logging.debug('Last watched: %s', last_watched)

        episode_id = None
        first_unwatched = None
        for epi_id in episodes:
            next_episode = episodes[epi_id]
            logging.debug(
                'Trying next episode: %s - %s (%s)',
                next_episode['shortName'], next_episode['title'],
                next_episode['sequenceNumber']
            )
            if (
                (first_unwatched > next_episode['sequenceNumber']
                or not first_unwatched)
                and last_watched < next_episode['sequenceNumber']
            ):
                #
                first_unwatched = next_episode['sequenceNumber']
                episode_id = epi_id
                logging.debug(
                    'Saved next last unwatched: %s - %s (%s)',
                    next_episode['shortName'], next_episode['title'],
                    next_episode['sequenceNumber']
                )

        return episode_id

    def show_next_for_watch(self, alias):
        """ show next episode for watch for alias """
        show_id = self.id_by_title(self.title_by_alias(alias, no_exit=True))
        epis = self.load_episodes(show_id)
        episode_id = self.get_first_unwatched(show_id)
        if episode_id is None:
            print("\nCannot find first watch for {0}\n".format(tr_out(epis['title'])))
        else:
            episode = epis['episodes'][episode_id]
            print('\nFirst watch for {0} is s{1:02d}e{2:02d} ("{3}")\n'.format(
                tr_out(epis['title']),
                episode['seasonNumber'], episode['episodeNumber'],
                tr_out(episode['title']),
            ))

    def set_episode_check(self, alias, epi, check):
        """ set epi episode as watched """
        re_m = re.match(r's(\d{1,2})e(\d{1,2})', epi.lower())
        if not re_m:
            print('Bad format for check - "{0}"'.format(epi))
        else:
            season = int(re_m.group(1))
            episode = int(re_m.group(2))
            show_id = self.id_by_title(self.title_by_alias(alias, no_exit=True))
            epis = self.load_episodes(show_id)
            watched = self.load_watched(show_id)
            episodes = epis['episodes']
            for epi_id in episodes:
                next_episode = episodes[epi_id]
                if (
                    next_episode['seasonNumber'] == season
                    and next_episode['episodeNumber'] == episode
                ):
                    valid_op = False
                    old_date = ''
                    if check:
                        msg = 'checked'
                        if epi_id in watched:
                            old_date = watched[epi_id]['watchDate']
                        else:
                            url = self.config.url.check_episode.format(epi_id)
                            valid_op = True
                    else:
                        msg = 'unchecked'
                        if epi_id in watched:
                            url = self.config.url.uncheck_episode.format(epi_id)
                            valid_op = True

                    if not valid_op:
                        print()
                        print('Episode "{0}" (s{1:02d}e{2:02d}) of "{3}" already {4} {5}'\
                             .format(
                                 tr_out(next_episode['title']),
                                 next_episode['seasonNumber'],
                                 next_episode['episodeNumber'],
                                 tr_out(epis['title']),
                                 msg,
                                 old_date
                             ))
                    else:
                        logging.debug('Set checked: %s%s', self.api_url, url)
                        request = Request(self.api_url + url)
                        self.opener.open(request)
                        print()
                        print(
                            'Episode "{0}" (s{1:02d}e{2:02d}) of "{3}" set {4}'\
                            .format(
                                tr_out(next_episode['title']),
                                next_episode['seasonNumber'],
                                next_episode['episodeNumber'],
                                tr_out(epis['title']),
                                msg
                            ))
                    break

    def search_show(self, query):
        """ search show """
        if not self.logged_:
            self.do_login()
        req_data = urllib.urlencode({
            'q': query,
        })
        logging.debug(
            'Search url/data: %s%s%s',
            self.api_url, self.config.url.search, req_data
        )
        request = Request(
            self.api_url + self.config.url.search, req_data
        )
        handle = self.opener.open(request)
        search_result = json.loads(handle.read())
        logging.debug('Search result: %s', search_result)
        return search_result

    def show_search_result(self, query):
        """ show search result """
        search_result = self.search_show(query)
        print()
        for show_id in search_result:
            show = search_result[show_id]
            print('"{1}", started: {2} (id={0})'.format(
                show_id, tr_out(show['title']), show['started']
            ))
        print()

    def set_show_status(self, alias, status, accurate):
        """ set show status """
        title = self.title_by_alias(alias, no_exit=True)

        search_result = self.search_show(title)
        for show_id in search_result:
            show = search_result[show_id]
            if accurate and show['title'] != alias:
                continue
            url = self.config.url.status.format(show['id'], status)
            logging.debug('Set show status: %s%s', self.api_url, url)
            request = Request(self.api_url + url)
            self.opener.open(request)
            print('Show "{0}" status set to {1}'.format(
                tr_out(show['title']), status
            ))
            print()
コード例 #6
0
ファイル: mediawiki.py プロジェクト: pune-lug/mediawiki
class Site():
    def __init__(self,
            host=None,
            apiurl='/w/api.php',
            timeout=100,
            srlimit=500,
            apfrom=None,
            aplimit=5000,
            bllimit=5000,
            aulimit=5000,
            aclimit=5000,
            rclimit=5000,
            lelimit=5000,
        ):
        if not host:
            raise(Exception("host not defined"))
        self.host = host
        self.apiurl = apiurl
        self.url = '%s%s' % (self.host, self.apiurl)
        self.format = 'json'
        self.cj = CookieJar()
        self.opener = urllib.request.build_opener(
            urllib.request.HTTPCookieProcessor(self.cj)
        )
        self.token = None
        self.defaults = {}
        self.defaults['srlimit'] = srlimit
        self.defaults['aplimit'] = aplimit
        self.defaults['aclimit'] = aclimit
        self.defaults['bllimit'] = bllimit
        self.defaults['rclimit'] = rclimit
        self.defaults['lelimit'] = lelimit
        self.srlimit = srlimit
        self.apfrom = apfrom
        self.aplimit = aplimit
        self.bllimit = bllimit
        self.aulimit = aulimit
        self.aclimit = aclimit
        self.rclimit = rclimit
        self.lelimit = lelimit
        self.search_info = {}
        self.aufinished = False

    def return_json(self, data):
        return json.loads(bytes.decode(data, 'utf-8'))

    def sitematrix(self):
        t = {}
        t['action'] = 'sitematrix'
        t['format'] = self.format
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        return self.return_json(f.read())

    def login(self, username=None, password=None):
        self.username = username
        t = {}
        t['action'] = 'login'
        t['lgname'] = username
        t['lgpassword'] = password
        t['format'] = self.format
        self.cj.clear()
        params = urllib.parse.urlencode(t)
        if username:
            f = self.opener.open(self.url, params.encode('utf-8'))
            d = f.read()
            try:
                d = self.return_json(d)
                self.token = d['login']['token']
            except Exception as e:
                raise(Exception('Unable to login:'******'login']['result'] == 'NeedToken':
                t['lgtoken'] = d['login']['token']
                params = urllib.parse.urlencode(t)
                f = self.opener.open(self.url, params.encode('utf-8'))
                d = f.read()
                try:
                    d = self.return_json(d)
                    self.token = d['login']['lgtoken']
                except Exception as e:
                    raise(Exception('Unable to login:'******'action'] = 'logout'
        t['format'] = self.format
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
        except Exception as e:
            raise(Exception('Already logged out'))

    def list_backlinks(self, title=None, blcontinue=False, blfilterredir='all', blredirect=False):
        t = {}
        t['format'] = self.format
        t['action'] = 'query'
        t['list'] = 'backlinks'
        t['bllimit'] = self.bllimit
        t['blfilterredir'] = blfilterredir
        t['bltitle'] = title
        if blredirect:
            t['blredirect'] = ''
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
        except:
            pass
        retval = []
        try:
            for x in d['query']['backlinks']:
                retval.append(x['title'])
        except:
            pass
        return retval

    def list_allcategories(self, **kargs):
        acfrom = kargs.get('acfrom', None)
        acto = kargs.get('acto', None)
        accontinue = kargs.get('accontinue', None)
        acprefix = kargs.get('acprefix', None)
        t = {}
        t['format'] = self.format
        t['action'] = 'query'
        t['list'] = 'allcategories'
        t['aclimit'] = kargs.get('aclimit', self.aclimit)
        t['acdir'] = kargs.get('acdir', 'ascending')
        if acfrom:
            t['acfrom'] = acfrom
        if acto:
            t['acto'] = acto
        if acprefix:
            t['acprefix'] = acprefix
        if not accontinue:
            self.search_info = {}
            self.aclimit = self.defaults['aclimit']
        else:
            if self.aclimit < 0:
                return []
            t['acfrom'] = self.search_info['acfrom']
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
            self.search_info = {}
            try:
                self.search_info['acfrom'] = \
                    d['query-continue']['allcategories']['acfrom']
            except:
                pass
            retval = []
            try:
                for x in d['query']['allcategories']:
                    retval.append(x['*'])
            except:
                pass
            if len(retval) < self.srlimit:
                self.srlimit = -1
            return retval
        except Exception as e:
            raise(Exception('Data not found', e))

    def list_all(self, **kargs):
        apcontinue = kargs.get('apcontinue', False)
        t = {}
        t['format'] = self.format
        t['action'] = 'query'
        t['list'] = 'allpages'
        t['aplimit'] = self.aplimit
        t['apdir'] = kargs.get('apdir', 'ascending')
        t['apnamespace'] = kargs.get('apnamespace', '0')
        t['apfilterredir'] = kargs.get('apfilterredir', 'all')
        apfrom = kargs.get('apfrom', None)
        apto = kargs.get('apto', None)
        if apfrom:
            t['apfrom'] = apfrom
        if apto:
            t['apto'] = apto
        if not apcontinue:
            self.search_info = {}
            self.aplimit = self.defaults['aplimit']
        else:
            if self.aplimit < 0:
                return []
            t['apfrom'] = self.search_info['apfrom']
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
            self.search_info = {}
            try:
                self.search_info['apfrom'] = \
                    d['query-continue']['allpages']['apfrom']
            except:
                pass
            retval = []
            try:
                for x in d['query']['allpages']:
                    retval.append(x['title'])
            except:
                pass
            if len(retval) < self.srlimit:
                self.srlimit = -1
            return retval
        except Exception as e:
            raise(Exception('Data not found', e))

    def search(self, s, srcontinue=False):
        t = {}
        t['format'] = self.format
        t['action'] = 'query'
        t['list'] = 'search'
        t['srsearch'] = s
        t['srlimit'] = self.srlimit
        if not srcontinue:
            self.serach_info = {}
            self.srlimit = self.defaults['srlimit']
        if srcontinue and self.srlimit < 0:
            return []
        if srcontinue and s == self.search_info.get('string', ''):
            t['sroffset'] = self.search_info['offset']
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
            self.search_info = {}
            self.search_info['string'] = s
            try:
                self.search_info['offset'] = \
                    d['query-continue']['search']['sroffset']
            except:
                pass
            retval = []
            try:
                for x in d['query']['search']:
                    retval.append(x['title'])
            except:
                pass
            if len(retval) < self.srlimit:
                self.srlimit = -1
            return retval
        except Exception as e:
            raise(Exception('Data not found', e))

    def listall(self, srcontinue=False):
        t = {}
        t['format'] = self.format
        t['action'] = 'query'
        t['list'] = 'allpages'
        t['aplimit'] = self.aplimit
        if not srcontinue:
            self.apfrom = None
        if srcontinue and self.apfrom:
            t['apfrom'] = self.apfrom
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
            self.apfrom = d['query-continue']['allpages']['apfrom']
            retval = []
            for x in d['query']['allpages']:
                retval.append(x['title'])
            return retval
        except Exception as e:
            raise(Exception('Data not found', e))

    def userdailycontribs(self, username=None, daysago=0):
        if not username and self.username:
            username = self.username
        if not username:
            return
        if daysago < 0:
            return
        params = urllib.parse.urlencode({
            'action': 'userdailycontribs',
            'format': self.format,
            'user': username,
            'daysago': daysago,
        })
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
            return d
        except Exception as e:
            raise(Exception('Data not found', e))

    def list_allusers(self, **kargs):
        t ={}
        t['format'] = self.format
        t['action'] = 'query'
        t['list'] = 'allusers'
        for x in ['aufrom', 'auto', 'audir', 'augroup', 'auexcludegroup', 'aurights', 'auprop', 'aulimit']:
            if kargs.get(x, None):
                t[x] = kargs[x]
        for x in ['auwitheditsonly', 'auactiveusers']:
            if kargs.get(x, None):
                t[x] = ''
        aucontinue = kargs.get('aucontinue', None)
        t['aulimit'] = t.get('aulimit', self.aulimit)
        if not aucontinue:
            self.aufrom = None
            self.aufinished = False
        if aucontinue and self.aufrom:
            t['aufrom'] = self.aufrom
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
            try:
                self.aufrom = d['query-continue']['allusers']['aufrom']
            except:
                self.aufinished = True
            retval = []
            for x in d['query']['allusers']:
                retval.append(x['name'])
            return retval
        except Exception as e:
            raise(Exception('Data not found', e))

    def list_recentchanges(self, **kargs):
        t = {}
        t['format'] = self.format
        t['action'] = 'query'
        t['list'] = 'recentchanges'
        t['rcprop'] = '|'.join(kargs.get('rcprop', ['title', 'ids', 'type', 'user']))
        t['rclimit'] = self.rclimit
        rctype = kargs.get('rctype', None)
        if rctype:
            t['rctype'] = rctype
        rcstart = kargs.get('rcstart', None)
        rcstop = kargs.get('rcstop', None)
        rccontinue = kargs.get('rccontinue', None)
        if not rccontinue:
            self.rcstart= None
            self.rcfinished = False
        if rccontinue and self.rcstart:
            t['rcstart'] = self.rcstart
        rccontinue = kargs.get('rccontinue', None)
        if rccontinue:
            t['rccontinue'] = rccontinue
        params = urllib.parse.urlencode(t)
        if rcstart:
            params = '%s&rcstart=%s' % (params, rcstart)
        if rcstop:
            params = '%s&rcstop=%s' % (params, rcstop)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
            try:
                self.rcstart = d['query-continue']['recentchanges']['rcstart']
            except:
                self.rcfinished = True
            retval = []
            for x in d['query']['recentchanges']:
                tmp_retval = {}
                for y in t['rcprop'].split('|'):
                    if y == 'ids':
                        for z in ['rcid', 'pageid', 'revid', 'old_revid']:
                            tmp_retval[z] = x[z]
                    else:
                        tmp_retval[y] = x[y]
                retval.append(tmp_retval)
            return retval
        except Exception as e:
            raise(Exception('Data not found', e))

    def list_logevents(self, **kargs):
        t = {}
        t['format'] = self.format
        t['action'] = 'query'
        t['list'] = 'logevents'
        letype = kargs.get('letype', None)
        if letype:
            t['letype'] = letype
        t['leprop'] = '|'.join(kargs.get('leprop', ['ids', 'title', 'type', 'user', 'timestamp', 'comment', 'details', 'action']))
        leaction = kargs.get('leaction', None)
        if leaction:
            t['leaction'] = leaction
        lestart = kargs.get('lestart', None)
        if lestart:
            t['lestart'] = lestart
        leend = kargs.get('leend', None)
        if leend:
            t['leend'] = leend
        ledir = kargs.get('ledir', None)
        if ledir:
            t['ledir'] = ledir
        leuser = kargs.get('leuser', None)
        if leuser:
            t['leuser'] = leuser
        letitle = kargs.get('letitle', None)
        if letitle:
            t['letitle'] = letitle
        leprefix = kargs.get('leprefix', None)
        if leprefix:
            t['leprefix'] = leprefix
        letag = kargs.get('letag', None)
        if letag:
            t['letag'] = letag
        t['lelimit'] = kargs.get('lelimit', self.lelimit)
        lecontinue = kargs.get('lecontinue', None)
        if not lecontinue:
            self.lestart= None
            self.lefinished = False
        if lecontinue and self.lestart:
            t['lestart'] = self.lestart
        lecontinue = kargs.get('lecontinue', None)
        if lecontinue:
            t['lecontinue'] = lecontinue
        params = urllib.parse.urlencode(t)
        f = self.opener.open('%s?%s' % (self.url, params))
        d = f.read()
        try:
            d = self.return_json(d)
            try:
                self.lestart = d['query-continue']['logevents']['lestart']
            except:
                self.lefinished = True
            retval = []
            for x in d['query']['logevents']:
                tmp_retval = {}
                for y in t['leprop'].split('|'):
                    if y == 'ids':
                        for z in ['logid', 'pageid']:
                            tmp_retval[z] = x[z]
                    elif y == 'details':
                        pass
                    else:
                        tmp_retval[y] = x[y]
                retval.append(tmp_retval)
            return retval
        except Exception as e:
            raise(Exception('Data not found', e))

    def close(self):
        self.conn.close()