def txn(): sub_is_new = False sub = cls.get_by_key_name(key_name) if sub is None: sub_is_new = True sub = cls(key_name=key_name, callback=callback, callback_hash=utils.sha1_hash(callback), topic=topic, topic_hash=utils.sha1_hash(topic), secret=secret, hash_func=hash_func, verify_token=verify_token, lease_seconds=lease_seconds, expiration_time=( now() + datetime.timedelta(seconds=lease_seconds))) sub.put() return (sub_is_new, sub)
def insert(cls, name): key_name = utils.sha1_hash(name) domain = Domain.get_by_key_name(key_name) if domain is None: domain = Domain(key_name=key_name, name=name) domain.put() return True else: return False
def txn(): sub_is_new = False sub = cls.get_by_key_name(key_name) if sub is None: sub_is_new = True sub = cls(key_name=key_name, callback=callback, callback_hash=utils.sha1_hash(callback), topic=topic, topic_hash=utils.sha1_hash(topic), verify_token=verify_token, secret=secret, hash_func=hash_func, lease_seconds=lease_seconds, expiration_time=now_time) sub.subscription_state = cls.STATE_VERIFIED sub.expiration_time = now_time + datetime.timedelta(seconds=lease_seconds) sub.put() return sub_is_new
def get_subscribers(cls, topic, count, starting_at_callback=None): """Gets the list of subscribers starting at an offset. Args: topic: The topic URL to retrieve subscribers for. count: How many subscribers to retrieve. starting_at_callback: A string containing the callback hash to offset to when retrieving more subscribers. The callback at the given offset *will* be included in the results. If None, then subscribers will be retrieved from the beginning. Returns: List of Subscription objects that were found, or an empty list if none were found. """ query = cls.all() query.filter('topic_hash =', utils.sha1_hash(topic)) query.filter('subscription_state = ', cls.STATE_VERIFIED) if starting_at_callback: query.filter('callback_hash >=', utils.sha1_hash(starting_at_callback)) query.order('callback_hash') return query.fetch(count)
def has_subscribers(cls, topic): """Check if a topic URL has verified subscribers. Args: topic: The topic URL to check for subscribers. Returns: True if it has verified subscribers, False otherwise. """ if (cls.all().filter('topic_hash =', utils.sha1_hash(topic)) .filter('subscription_state =', cls.STATE_VERIFIED).get() is not None): return True else: return False
def insert(cls, html, link, type, article): key_name = utils.sha1_hash(link) found_link = Link.get_by_key_name(key_name) if found_link is None: url = urlparse.urlparse(link) if type == 'headline': Domain.insert(url.netloc) clean_link = urlparse.urlunparse(url) found_link = Link(key_name=key_name, html=html, link=clean_link, type=type, article=article) found_link.put() return True else: return False
def get(self): response = [] article_hash = self.request.get("hash") page_url = self.request.get("url") if page_url and not article_hash: article_hash = utils.sha1_hash(page_url) if article_hash: article = models.Article.from_hash(article_hash) if article: response.append(article.as_dict()) self.response.headers['Content-Type'] = 'application/json' self.response.out.write(simplejson.dumps(response))
def create_event_for_topic(cls, topic, format, header_footer, entry_payloads, now=datetime.datetime.utcnow): """Creates an event to deliver for a topic and set of published entries. Args: topic: The topic that had the event. format: Format of the feed, either 'atom' or 'rss'. header_footer: The header and footer of the published feed into which the entry list will be spliced. entry_payloads: List of strings containing entry payloads (i.e., all XML data for each entry, including surrounding tags) in order of newest to oldest. now: Returns the current time as a UTC datetime. Used in tests. Returns: A new EventToDeliver instance that has not been stored. """ if format == ATOM: close_tag = '</feed>' content_type = 'application/atom+xml' elif format == RSS: close_tag = '</channel>' content_type = 'application/rss+xml' else: assert False, 'Invalid format "%s"' % format close_index = header_footer.rfind(close_tag) assert close_index != -1, 'Could not find %s in feed envelope' % close_tag payload_list = ['<?xml version="1.0" encoding="utf-8"?>', header_footer[:close_index]] payload_list.extend(entry_payloads) payload_list.append(header_footer[close_index:]) payload = '\n'.join(payload_list) return cls( parent=db.Key.from_path( FeedRecord.kind(), FeedRecord.create_key_name(topic)), topic=topic, topic_hash=utils.sha1_hash(topic), payload=payload, last_modified=now(), content_type=content_type)
def create_entry_for_topic(cls, topic, entry_id, content_hash): """Creates multiple FeedEntryRecords entities for a topic. Does not actually insert the entities into the Datastore. This is left to the caller so they can do it as part of a larger batch put(). Args: topic: The topic URL to insert entities for. entry_id: String containing the ID of the entry. content_hash: Sha1 hash of the entry's entire XML content. For example, with Atom this would apply to everything from <entry> to </entry> with the surrounding tags included. With RSS it would be everything from <item> to </item>. Returns: A new FeedEntryRecord that should be inserted into the Datastore. """ key = cls.create_key(topic, entry_id) return cls(key_name=key.name(), parent=key.parent(), entry_id=entry_id, entry_id_hash=utils.sha1_hash(entry_id), entry_content_hash=content_hash)