def fetch_feed(self, subscription_data, rssfeed_data, fetch_data): """Search a feed with config 'subscription_data'""" self.log.info("Fetching subscription '%s'." % subscription_data["name"]) # Feed has not yet been fetched. if fetch_data["rssfeed_items"] is None: rssfeed_parsed = self.get_rssfeed_parsed(rssfeed_data, site_cookies_dict=fetch_data["site_cookies_dict"], user_agent=fetch_data["user_agent"]) if rssfeed_parsed is None: return if "bozo_exception" in rssfeed_parsed: self.log.warning("bozo_exception when parsing rssfeed: %s" % str(rssfeed_parsed["bozo_exception"])) if "items" in rssfeed_parsed: fetch_data["rssfeed_items"] = rssfeed_parsed["items"] self.handle_ttl(rssfeed_data, rssfeed_parsed, fetch_data) else: self.log.warning("No items retrieved") return # Remove the custom text lines before matching (not strictly necessary though, # but they are only for testing in the DialogSubscription) options = subscription_data.copy() del options["custom_text_lines"] matches, message = self.update_rssfeeds_dict_matching(fetch_data["rssfeed_items"], options=options) self.log.info("%d items in feed, %d matches the filter." % (len(fetch_data["rssfeed_items"]), len(matches.keys()))) last_match_dt = common.isodate_to_datetime(subscription_data["last_match"]) for key in list(matches.keys()): # Discard match only if timestamp is available, # and the timestamp is older or equal to the last matching timestamp matched_updated = common.isodate_to_datetime(matches[key]["updated"]) if matched_updated and last_match_dt >= matched_updated: if subscription_data["ignore_timestamp"] is True: self.log.info("Old timestamp: '%s', but ignore option is enabled so add torrent anyways." % matches[key]["title"]) else: self.log.info("Not adding because of old timestamp: '%s'" % matches[key]["title"]) del matches[key] continue fetch_data["matching_torrents"].append({"title": matches[key]["title"], "link": matches[key]["link"], "updated_datetime": matched_updated, "site_cookies_dict": fetch_data["site_cookies_dict"], "user_agent": fetch_data["user_agent"], "referrer": rssfeed_data["url"], "subscription_data": subscription_data})
def add_torrents(self, save_subscription_func, torrent_list, config): torrent_names = {} for torrent_match in torrent_list: torrent_download = self.add_torrent(torrent_match) if not torrent_download.success: self.log.warning( "Failed to add torrent '%s' from url '%s'" % (torrent_match["title"], torrent_match["link"])) else: self.log.info("Succesfully added torrent '%s'." % torrent_match["title"]) # Update subscription with date torrent_time = torrent_match["updated_datetime"] last_subscription_update = common.isodate_to_datetime( torrent_match["subscription_data"]["last_match"]) last_subscription_update = common.datetime_ensure_timezone( last_subscription_update) # Update subscription time if this is newer # The order of the torrents are in ordered from newest to oldest if torrent_time and last_subscription_update <= torrent_time: torrent_match["subscription_data"][ "last_match"] = torrent_time.isoformat() # Save subsription with updated timestamp save_subscription_func( subscription_data=torrent_match["subscription_data"]) # Handle email notification # key is the dictionary key used in the email_messages config. for key in torrent_match["subscription_data"][ "email_notifications"].keys(): # Must be enabled in the subscription if not torrent_match["subscription_data"][ "email_notifications"][key]["on_torrent_added"]: continue if key not in torrent_names: torrent_names[key] = ( torrent_match["subscription_data"], []) # Add the torrent file to the list of files for this notification. torrent_names[key][1].append(torrent_match["title"]) if config["email_configurations"][ "send_email_on_torrent_events"] is False: return for email_key in torrent_names.keys(): # Check that the message is active if not config["email_messages"][email_key]["active"]: continue # Send email in send_torrent_email(config["email_configurations"], config["email_messages"][email_key], subscription_data=torrent_names[key][0], torrent_name_list=torrent_names[key][1], deferred=True)
def add_torrents(self, save_subscription_func, torrent_list, config): torrent_names = {} for torrent_match in torrent_list: torrent_download = self.add_torrent(torrent_match) if not torrent_download.success: self.log.warn("Failed to add torrent '%s' from url '%s'" % (torrent_match["title"], torrent_match["link"])) else: self.log.info("Succesfully added torrent '%s'." % torrent_match["title"]) # Update subscription with date torrent_time = torrent_match["updated_datetime"] last_subscription_update = common.isodate_to_datetime(torrent_match["subscription_data"]["last_match"]) # Update subscription time if this is newer # The order of the torrents are in ordered from newest to oldest if torrent_time and last_subscription_update < torrent_time: torrent_match["subscription_data"]["last_match"] = torrent_time.isoformat() # Save subsription with updated timestamp save_subscription_func(subscription_data=torrent_match["subscription_data"]) # Handle email notification # key is the dictionary key used in the email_messages config. for key in torrent_match["subscription_data"]["email_notifications"].keys(): # Must be enabled in the subscription if not torrent_match["subscription_data"]["email_notifications"][key]["on_torrent_added"]: continue if not torrent_names.has_key(key): torrent_names[key] = (torrent_match["subscription_data"], []) # Add the torrent file to the list of files for this notification. torrent_names[key][1].append(torrent_match["title"]) if config["email_configurations"]["send_email_on_torrent_events"] is False: return for email_key in torrent_names.keys(): # Check that the message is active if not config["email_messages"][email_key]["active"]: continue # Send email in send_torrent_email(config["email_configurations"], config["email_messages"][email_key], subscription_data = torrent_names[key][0], torrent_name_list = torrent_names[key][1], defered=True)
def fetch_feed(self, subscription_data, rssfeed_data, fetch_data): """Search a feed with config 'subscription_data'""" self.log.info("Fetching subscription '%s'." % subscription_data["name"]) # Feed has not yet been fetched. if fetch_data["rssfeed_items"] is None: rssfeed_parsed = self.get_rssfeed_parsed(rssfeed_data, site_cookies_dict=fetch_data["site_cookies_dict"]) if rssfeed_parsed is None: return if rssfeed_parsed.has_key("bozo_exception"): self.log.warn("bozo_exception when parsing rssfeed: %s" % str(rssfeed_parsed["bozo_exception"])) if rssfeed_parsed.has_key("items"): fetch_data["rssfeed_items"] = rssfeed_parsed["items"] self.handle_ttl(rssfeed_data, rssfeed_parsed, fetch_data) else: self.log.warn("No items retrieved") return # Remove the custom text lines before matching (not strictly necessary though, # but they are only for testing in the DialogSubscription) options = subscription_data.copy() del options["custom_text_lines"] matches, message = self.update_rssfeeds_dict_matching(fetch_data["rssfeed_items"], options=options) self.log.info("%d items in feed, %d matches the filter." % (len(fetch_data["rssfeed_items"]), len(matches.keys()))) last_match_dt = common.isodate_to_datetime(subscription_data["last_match"]) for key in matches.keys(): # Discard match only if timestamp is available, # and the timestamp is older or equal to the last matching timestamp if matches[key]["updated_datetime"] and last_match_dt >= matches[key]["updated_datetime"]: self.log.info("Not adding because of old timestamp: '%s'" % matches[key]["title"]) del matches[key] continue fetch_data["matching_torrents"].append({"title": matches[key]["title"], "link": matches[key]["link"], "updated_datetime": matches[key]["updated_datetime"], "site_cookies_dict": fetch_data["site_cookies_dict"], "subscription_data": subscription_data})