Beispiel #1
0
    def __init__(self, config={}):
        if 'name' in config:
            self.name = config['name']
        else:
            self.name = None

        if 'url' in config:
            self.url = config['url']
        else:
            self.url = ARCH_NEWS

        if 'title-key' in config:
            self.title_key = config['title-key']
        else:
            self.title_key = 'title'

        if 'body-key' in config:
            self.body_key = config['body-key']
        else:
            self.body_key = 'summary'

        if 'timestamp-key' in config:
            self.timestamp_key = config['timestamp-key']
        else:
            self.timestamp_key = 'published'

        ui.debug_print('building feed for: {}'.format(
            self.name if self.name is not None else self.url))

        self.feed = self.fetch()  # the complete feed as returned by feedparser
        self.entries = self.build_feed(
        )  # the list of entries informant will use
def main():
    """ The main function.
    Check given arguments get feed and run given command. """
    argv = docopt.docopt(__doc__, version='informant v{}'.format(__version__))
    InformantConfig().set_argv(argv)
    InformantConfig().debug_print = ui.debug_print
    InformantConfig().readlist = fs.read_datfile()
    config = InformantConfig().get_config()
    ui.debug_print('cli args: {}'.format(argv))

    if 'feeds' in config:
        feed = []
        for config_feed in config['feeds']:
            feed += Feed(config_feed).entries
    else:
        feed = Feed().entries

    if not feed:
        ui.warn_print('no news feed items, informant is performing no action')
        sys.exit()

    feed = sorted(feed, key=lambda k: k.timestamp, reverse=True)

    if argv.get(CHECK_CMD):
        check_cmd(feed)
    elif argv.get(LIST_CMD):
        list_cmd(feed)
    elif argv.get(READ_CMD):
        read_cmd(feed)
    sys.exit()
Beispiel #3
0
def clear_cachefile():
    """ Empty the cachefile directory """
    cache_dir = InformantConfig().get_cachefile()
    pattern = os.path.join(cache_dir, '*')
    ui.debug_print('Removing based on pattern: {}'.format(pattern))
    for filename in glob.glob(pattern):
        if os.path.isdir(filename):
            shutil.rmtree(filename)
        else:
            os.remove(filename)
Beispiel #4
0
 def has_been_read(self):
     """ Check if this entry has been read and return True or False. """
     debug = InformantConfig().get_argv_debug()
     readlist = InformantConfig().readlist
     if debug:
         ui.debug_print(readlist)
     title = self.title
     date = self.timestamp
     if str(date.timestamp()) + '|' + title in readlist:
         return True
     return False
Beispiel #5
0
    def fetch(self):
        feed = None
        if InformantConfig().get_argv_clear_cache():
            ui.debug_print('Clearing cache')
            fs.clear_cachefile()
        if InformantConfig().get_argv_use_cache():
            ui.debug_print('Checking cache in {}'.format(
                InformantConfig().get_cachefile()))
            cachefile = InformantConfig().get_cachefile()
            os.umask(
                0o0002
            )  # unrestrict umask so we can cache with proper permissions
            try:
                session = CacheControl(requests.Session(),
                                       cache=FileCache(cachefile,
                                                       filemode=0o0664,
                                                       dirmode=0o0775))
                feed = feedparser.parse(session.get(self.url).content)
            except Exception as e:
                ui.err_print('Unable to read cache information: {}'.format(e))
                ui.debug_print('Falling back to fetching feed')
                feed = feedparser.parse(self.url)
        else:
            feed = feedparser.parse(self.url)

        if feed.bozo:
            e = feed.bozo_exception
            if isinstance(e, URLError):
                # most likely this is an internet issue (no connection)
                ui.warn_print('News could not be fetched for {}'.format(
                    self.name if self.name is not None else self.url))
                ui.debug_print('URLError: {}'.format(e.reason))
            else:
                # I think this is most likely to be a malformed feed
                ui.err_print('Encountered feed error: {}'.format(
                    feed.bozo_exception))
                ui.debug_print('bozo message: {}'.format(
                    feed.bozo_exception.getMessage()))
            # In either of these error cases we probably shouldn't return error
            # so the pacman hook won't hold up an operation.
            # Here return an empty set of entries in case only one of multiple
            # feeds failed to fetch
            try:
                feed = feedparser.util.FeedParserDict()
                feed.update({'entries': []})
            except Exception as e:
                ui.err_print('Unexpected error: {}'.format(e))
                sys.exit()

        return feed
Beispiel #6
0
def save_datfile():
    """ Save the readlist to the datfile """
    debug = InformantConfig().get_argv_debug()
    readlist = InformantConfig().readlist
    if debug:
        ui.debug_print('running in debug mode, will not update readlist')
        return
    filename = InformantConfig().get_savefile()
    try:
        # then open as write to save updated list
        with open(filename, 'wb') as pickle_file:
            pickle.dump(readlist, pickle_file)
            pickle_file.close()
    except PermissionError:
        ui.err_print('Unable to save read information, please re-run with \
correct permissions to access "{}".'.format(filename))
        sys.exit(
            255
        )  # this should never block pacman because the hook should run with root/sudo
Beispiel #7
0
def read_datfile():
    """ Return the saved readlist from the datfile """
    debug = InformantConfig().get_argv_debug()
    filename = InformantConfig().get_savefile()
    if debug:
        ui.debug_print('Getting datfile from "{}"'.format(filename))

    try:
        with open(filename, 'rb') as pickle_file:
            try:
                readlist = pickle.load(pickle_file)
                pickle_file.close()
                if isinstance(readlist, tuple):
                    # backwards compatibility with informant < 0.4.0 save data
                    readlist = readlist[1]
            except (EOFError, ValueError):
                readlist = []
    except (FileNotFoundError, PermissionError):
        readlist = []
    return readlist
Beispiel #8
0
def run():
    """ The main function.
    Check given arguments get feed and run given command. """
    argv = InformantConfig().get_argv()
    config = InformantConfig().get_config()
    if argv.get(DEBUG_OPT):
        ui.debug_print('cli args: {}'.format(argv))

    if 'feeds' in config:
        feed = []
        for config_feed in config['feeds']:
            feed += Feed(config_feed).entries
    else:
        feed = Feed().entries

    feed = sorted(feed, key=lambda k: k.timestamp, reverse=True)

    if argv.get(CHECK_CMD):
        check_cmd(feed)
    elif argv.get(LIST_CMD):
        list_cmd(feed)
    elif argv.get(READ_CMD):
        read_cmd(feed)