Example #1
0
    def mark_all_as_read(self):
        f = feedparser.parse(self.url)

        if check_status(f) == -1:
            print_error("in mark_all_as_read()")

        for entry in f.entries:
            e = self.add(entry)
Example #2
0
  def mark_all_as_read(self):
    f = feedparser.parse(self.url)

    if check_status(f, self.feedid) == -1:
      print_error("in mark_all_as_read()")

    for entry in f.entries:
      e = self.add(entry)
Example #3
0
    def save(self):
        try:
            with FileLock(self.feedfile, timeout=5):
                f = open(self.feedfile, "w+")
                obj = f
                pickle.dump(self.entries, obj)
                f.close()

        except Exception as e:
            print_console(e)

        except FileLockException:
            print_error("Lock Timeout")
Example #4
0
  def save(self):
    try:
      with FileLock(self.feedfile, timeout=5):
        f = open(self.feedfile, "w+")
        obj = f
        pickle.dump(self.entries, obj)
        f.close()

    except Exception as e:
      print_console(e)

    except FileLockException:
      print_error("Lock Timeout")
Example #5
0
  def get_recent(self, n, mark_all_as_read=False):
    f = feedparser.parse(self.url)

    if check_status(f, self.feedid) == -1:
      print_error("in get_recent()")

    i = 0
    for entry in f.entries:
      e = self.add(entry)
      if e is None:
        pass
      else:
        e.print_me()
        i = i + 1

      if not i < n:
        break
Example #6
0
  def load(self):
    try:
      with FileLock(self.feedfile, timeout=5):
        f = open(self.feedfile, "rb")
        obj = pickle.load(f)
        self.entries = obj
        f.close()

    except IOError as e:
      if e.errno == 2: #ignore when file doesn't exist
        pass

    except Exception as e:
      print_error(e)

    except FileLockException:
      print_error("Lock Timeout")
Example #7
0
    def get_recent(self, n, mark_all_as_read=False):
        f = feedparser.parse(self.url)

        if check_status(f) == -1:
            print_error("in get_recent()")

        i = 0
        for entry in f.entries:
            e = self.add(entry)
            if e is None:
                pass
            else:
                e.print_me()
                i = i + 1

            if not i < n:
                break
Example #8
0
    def load(self):
        try:
            with FileLock(self.feedfile, timeout=5):
                f = open(self.feedfile, "rb")
                obj = pickle.load(f)
                self.entries = obj
                f.close()

        except IOError as e:
            # ignore when file doesn't exist
            if e.errno == 2:
                pass

        except Exception as e:
            print_error(e)

        except FileLockException:
            print_error("Lock Timeout")
Example #9
0
def init(mark_all_as_read = False):
  list_a = [
    {"id":"b4chan", "logo":"-4chan /b/-", "url":"http://boards.4chan.org/b/index.rss"},
    {"id":"a4chan", "logo":"-4chan /a/-", "url":"http://boards.4chan.org/a/index.rss"},
    {"id":"g4chan", "logo":"-4chan /g/-", "url":"http://boards.4chan.org/g/index.rss"},
    {"id":"v4chan", "logo":"-4chan /v/-", "url":"http://boards.4chan.org/v/index.rss"},
    {"id":"gif4chan", "logo":"-4chan /gif/-", "url":"http://boards.4chan.org/gif/index.rss"},
    {"id":"pplware", "logo": "11,2PPLWARE", "url":"http://pplware.sapo.pt/feed/"},
    {"id":"apod", "logo": "1,15APOD", "url":"http://apod.nasa.gov/apod.rss"},
    {"id":"tugaleaks", "logo": "14,01TUGALEAKS", "url":"http://feeds.feedburner.com/tugaleaks"},
    {"id":"gunshow", "logo": "0,1Gun Show", "url":"http://www.rsspect.com/rss/gunshowcomic.xml"},
    {"id":"qc", "logo": "10,12QC", "url":"http://www.questionablecontent.net/QCRSS.xml"},
    {"id":"xkcd", "logo": "1,0xkcd", "url":"http://xkcd.com/rss.xml"},
    {"id":"mojang", "logo":"Mojang", "url":"http://mojang.com/feed"},
    {"id":"bukkit", "logo":"bukkit", "url":"http://forums.bukkit.org/forums/bukkit-news.2/index.rss"},
    {"id":"wotd", "logo":"-palavra do dia-", "url":"http://priberam.pt/dlpo/DoDiaRSS.aspx"},
    {"id":"blitz", "logo":"BLITZ.pt", "url":"http://blitz.aeiou.pt/gen.pl?p=rss"},
    {"id":"smbc", "logo":"smbc", "url":"http://www.smbc-comics.com/rss.php"},
    {"id":"ptsec", "logo":"ptsec", "url":"https://ptsec.info/wp/feed/"},
    {"id":"kritzkast", "logo":"kritzkast", "url":"http://www.kritzkast.com/feed?cat=-14"},
    {"id":"tf2", "logo":"TF2 Official Blog", "url":"http://www.teamfortress.com/rss.xml"},
    {"id":"universetoday", "logo":"Universe Today", "url":"http://www.universetoday.com/feed/"},
    {"id":"hackernews", "logo":"Hacker News", "url":"http://news.ycombinator.com/rss"},
    {"id":"sceper", "logo":"Sceper", "url":"http://sceper.eu/feed"},
    {"id":"thepiratebay", "logo":"ThePirateBay", "url":"https://rss.thepiratebay.se/0"},
    {"id":"hackaday", "logo":"Hack A Day", "url":"http://www.hackaday.com/rss.xml"},
    {"id":"astronomycast", "logo":"Astronomy Cast", "url":"http://feeds.feedburner.com/astronomycast"},
    {"id":"yt_jamesnintendonerd", "logo":"1,00,4 JamesNintendoNerd", "url":"http://www.youtube.com/rss/user/JamesNintendoNerd/videos.rss"},
    {"id":"blol", "logo":"0,13BLOL", "url":"http://blol.org/feed"},
    ##{"id":"", "logo":"", "url":""},
  ]

  for a in list_a:
    r.add(a["id"], a["logo"], a["url"])

  if mark_all_as_read:
    for f in r.feeds:
      print_error("Init ", r.feeds[f].feedid)
      r.feeds[f].mark_all_as_read()
    print_console("All unseen items marked as read.")

  r.save()
Example #10
0
  def get_item(self, n=0):
    f = feedparser.parse(self.url)

    if check_status(f, self.feedid) == -1:
      print_error("in get_item()")

    try:
      entry = f.entries[n]
    except IndexError:
      print_console("%s Entry %s not available" % (self.logo, n))
      exit(-1)

    e = self.add(entry)
    # is old
    if e is None:
      Entry(entry, self).print_me(print_summary=True)

    # is new
    else:
      e.print_me(print_summary=True, seen_as_new=True)
Example #11
0
    def get_item(self, n=0):
        f = feedparser.parse(self.url)

        if check_status(f) == -1:
            print_error("in get_item()")

        try:
            entry = f.entries[n]
        except IndexError:
            print_console("%s Entry %s not available" % (self.logo, n))
            exit(-1)

        e = self.add(entry)
        # is old
        if e is None:
            Entry(entry, self).print_me(print_summary=True)

        #is new
        else:
            e.print_me(print_summary=True, seen_as_new=True)
Example #12
0
def check_status(f, feedid):

  try:
    if f.status == '404':
      print_error("404 Not Found")
      return -1

    if f.bozo == 1:
      # these do not warrant an error
      if (type(f.bozo_exception) not in [feedparser.NonXMLContentType, feedparser.CharacterEncodingOverride, SAXParseException]):
        print_error("omg :( %s %s " % (type(f.bozo_exception), f.bozo_exception))
        return -1

  except (IndexError, AttributeError):
    print_error("%s: Unable to determine feed status" % feedid)
    return -1

  return 0
Example #13
0
def check_status(f):

    try:
        if f.status == '404':
            print_error("404 Not Found")
            return -1

        if f.bozo == 1:
            # these do not warrant an error
            if (type(f.bozo_exception) not in [
                    feedparser.NonXMLContentType,
                    feedparser.CharacterEncodingOverride
            ]):
                print_error("omg :( %s" % (f.bozo_exception))
                return -1

    except (IndexError, AttributeError):
        print_error("Unable to determine feed status")
        return -1

    return 0
Example #14
0
import logging
import mylib

logging.basicConfig(format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO)
mylib.print_error()
logging.info("This is an info message in the main program")
logging.error("This is an error in the main program!")
Example #15
0
            n = 0
            if l >= 3:
                try:
                    n = int(sys.argv[3])
                except ValueError:
                    n = 0
                except IndexError:
                    n = 0

            if arg1 == 'feed':
                if n < 0:
                    n = 0
                item(feedid, n)
            elif arg1 == 'new':
                if n == 0:
                    n = 1
                feedids = sys.argv[2:]
                for feedid in feedids:
                    recent(feedid, n)

        elif arg1 == 'init':
            init(mark_all_as_read=True)

        else:
            usage()

except FileLockException:
    print_error("Lock Timeout")

#print_error("%s pid stop " % os.getpid())
Example #16
0
      n = 0
      if l >= 3:
        try: 
          n = int(sys.argv[3])
        except ValueError:
          n = 0
        except IndexError:
          n = 0

      if arg1 == 'feed':  
        if n < 0:
          n = 0
        item(feedid, n)
      elif arg1 == 'new':
        if n == 0:
          n = 1
        feedids = sys.argv[2:]
        for feedid in feedids:
          recent(feedid, n)

    elif arg1 == 'init':
      init(mark_all_as_read = True)

    else:
      usage()

except FileLockException:
  print_error("Lock Timeout")

#print_error("%s pid stop " % os.getpid())
Example #17
0
def init(mark_all_as_read=False):
    list_a = [
        {
            "id": "b4chan",
            "logo": "-4chan /b/-",
            "url": "http://boards.4chan.org/b/index.rss"
        },
        {
            "id": "a4chan",
            "logo": "-4chan /a/-",
            "url": "http://boards.4chan.org/a/index.rss"
        },
        {
            "id": "g4chan",
            "logo": "-4chan /g/-",
            "url": "http://boards.4chan.org/g/index.rss"
        },
        {
            "id": "v4chan",
            "logo": "-4chan /v/-",
            "url": "http://boards.4chan.org/v/index.rss"
        },
        {
            "id": "gif4chan",
            "logo": "-4chan /gif/-",
            "url": "http://boards.4chan.org/gif/index.rss"
        },
        {
            "id": "pplware",
            "logo": "11,2PPLWARE",
            "url": "http://pplware.sapo.pt/feed/"
        },
        {
            "id": "apod",
            "logo": "1,15APOD",
            "url": "http://apod.nasa.gov/apod.rss"
        },
        {
            "id": "tugaleaks",
            "logo": "14,01TUGALEAKS",
            "url": "http://feeds.feedburner.com/tugaleaks"
        },
        {
            "id": "gunshow",
            "logo": "0,1Gun Show",
            "url": "http://www.rsspect.com/rss/gunshowcomic.xml"
        },
        {
            "id": "qc",
            "logo": "10,12QC",
            "url": "http://www.questionablecontent.net/QCRSS.xml"
        },
        {
            "id": "xkcd",
            "logo": "1,0xkcd",
            "url": "http://xkcd.com/rss.xml"
        },
        {
            "id": "mojang",
            "logo": "Mojang",
            "url": "http://mojang.com/feed"
        },
        {
            "id": "bukkit",
            "logo": "bukkit",
            "url": "http://forums.bukkit.org/forums/bukkit-news.2/index.rss"
        },
        {
            "id": "wotd",
            "logo": "-palavra do dia-",
            "url": "http://priberam.pt/dlpo/DoDiaRSS.aspx"
        },
        {
            "id": "blitz",
            "logo": "BLITZ.pt",
            "url": "http://blitz.aeiou.pt/gen.pl?p=rss"
        },
        {
            "id": "smbc",
            "logo": "smbc",
            "url": "http://www.smbc-comics.com/rss.php"
        },
        {
            "id": "ptsec",
            "logo": "ptsec",
            "url": "https://ptsec.info/wp/feed/"
        },
        {
            "id": "kritzkast",
            "logo": "kritzkast",
            "url": "http://www.kritzkast.com/feed?cat=-14"
        },
        {
            "id": "tf2",
            "logo": "TF2 Official Blog",
            "url": "http://www.teamfortress.com/rss.xml"
        },
        {
            "id": "universetoday",
            "logo": "Universe Today",
            "url": "http://www.universetoday.com/feed/"
        },
        {
            "id": "hackernews",
            "logo": "Hacker News",
            "url": "http://news.ycombinator.com/rss"
        },
        {
            "id": "sceper",
            "logo": "Sceper",
            "url": "http://sceper.eu/feed"
        },
        {
            "id": "thepiratebay",
            "logo": "ThePirateBay",
            "url": "https://rss.thepiratebay.se/0"
        },
        {
            "id": "hackaday",
            "logo": "Hack A Day",
            "url": "http://www.hackaday.com/rss.xml"
        },
        {
            "id": "astronomycast",
            "logo": "Astronomy Cast",
            "url": "http://feeds.feedburner.com/astronomycast"
        },
        {
            "id": "yt_jamesnintendonerd",
            "logo": "1,00,4 JamesNintendoNerd",
            "url":
            "http://www.youtube.com/rss/user/JamesNintendoNerd/videos.rss"
        },
        {
            "id": "blol",
            "logo": "0,13BLOL",
            "url": "http://blol.org/feed"
        },
        ##{"id":"", "logo":"", "url":""},
    ]

    for a in list_a:
        r.add(a["id"], a["logo"], a["url"])

    if mark_all_as_read:
        for f in r.feeds:
            print_error("Init ", r.feeds[f].feedid)
            r.feeds[f].mark_all_as_read()
        print_console("All unseen items marked as read.")

    r.save()