示例#1
0
class Comment(db.Model):
    # parent: Article
    email = db.StringProperty(required=True)  # 评论者邮箱
    content = db.TextProperty()  # 内容
    format = db.IntegerProperty(default=CONTENT_FORMAT_FLAG['plain'],
                                indexed=False)  # 解析格式
    ua = db.StringListProperty(indexed=False)  # 用户代理信息
    time = db.DateTimeProperty(auto_now_add=True)  # 发表时间

    @staticmethod
    # get_comments_with_user_by_article_key更为常用,不需要cache 2次
    def get_comments_by_article_key(article_key,
                                    order=True,
                                    cursor=None,
                                    fetch_limit=COMMENTS_PER_PAGE):
        query = Comment.all().ancestor(article_key).order(
            'time' if order else '-time')
        return get_fetch_result_with_valid_cursor(
            query_with_cursor(query, cursor),
            fetch_limit,
            config=EVENTUAL_CONSISTENCY_CONFIG)

    @staticmethod
    @memcached('get_comments_with_user_by_article_key',
               COMMENTS_CACHE_TIME,
               lambda article_key, order=True, cursor=None, fetch_limit=
               COMMENTS_PER_PAGE: ('%s_%s_%s' %
                                   (article_key.id(), order, cursor)))
    def get_comments_with_user_by_article_key(article_key,
                                              order=True,
                                              cursor=None,
                                              fetch_limit=COMMENTS_PER_PAGE):
        comments = Comment.get_comments_by_article_key(article_key, order,
                                                       cursor, fetch_limit)
        user_keys = [
            db.Key.from_path('User', comment.email) for comment in comments[0]
        ]
        return comments, db.get(user_keys, config=EVENTUAL_CONSISTENCY_CONFIG)

    @staticmethod
    def get_comment_by_id(comment_id, article_id):
        return Comment.get_by_id(comment_id,
                                 db.Key.from_path('Article', article_id))

    def html_content(self):
        format = self.format
        content = self.content
        if format & CONTENT_FORMAT_FLAG['bbcode']:
            return convert_bbcode_to_html(
                content, escape=not (format & CONTENT_FORMAT_FLAG['html']))
        if format & CONTENT_FORMAT_FLAG['html']:
            return content
        else:
            return parse_plain_text(content)

    @staticmethod
    @memcached('latest_comments', 0, lambda limit: limit)
    def latest_comments(limit):
        comments = Comment.all().order('-time').fetch(limit)
        articles = db.get([comment.parent_key() for comment in comments])
        users = db.get(
            [db.Key.from_path('User', comment.email) for comment in comments])
        return comments, articles, users
示例#2
0
class Story(storyengine.model.TransModel):
    #basic info
    main_categoryid = db.StringProperty(default=None)
    language = db.StringListProperty(default=None)

    def setter(self, req):
        main_categoryid_ = req.get('main_categoryid')
        self.main_categoryid = main_categoryid_
        super(Story, self).setter(req)

    def pages_get(self):
        pagelist = []
        for page in self.pages:
            if 1 == page.visible:
                pagelist.append(page)
        pagesortedlist = sorted(pagelist, key=lambda p: p.index)
        ret = []
        for page in pagesortedlist:
            try:
                ret.append(str(page.key().id()))
            except:
                pass
        return ret

    def to_dict(self, keyid, lang):
        d = super(Story, self).to_dict(keyid, lang)
        obj = None
        if self.expiration_days > 0 or self.expiration_mins > 0:
            obj = datetime.datetime.now() + datetime.timedelta(
                days=self.expiration_days) + datetime.timedelta(
                    minutes=self.expiration_mins)
        else:
            obj = datetime.datetime.now() + datetime.timedelta(
                days=STORY_EXPIRATION_DAYS) + datetime.timedelta(
                    minutes=STORY_EXPIRATION_MINUTES)
        d['expiration_date'] = int(time.mktime(obj.timetuple()))
        d['main_categoryid'] = str(self.main_categoryid)
        return d

    def to_formdict(self):
        f_data = super(Story, self).to_formdict()
        f_data['main_categoryid'] = self.main_categoryid
        return f_data

    @staticmethod
    def new(creator_):
        story = Story(creator=creator_, content_type='story')
        return story

    @staticmethod
    def api_index(lang):
        stories = []
        for story in Story.all():
            stories.append(str(story.key().id()))
        ret = {}
        ret["stories"] = stories
        ret["success"] = True
        return ret

    @staticmethod
    def api_read(keyid, lang):
        story = Story.get_by_id(int(keyid))
        if None is story:
            return {'success': False, 'id': keyid, 'error': 'invalid key_id'}
        cdata = super(Story, story).cache_get(lang)
        if None is not cdata:
            return cdata
        else:
            ret = story.to_dict(keyid, lang)
            ret["pages"] = story.pages_get()
            ret["success"] = True
            obj = None
            if story.expiration_days > 0 or story.expiration_mins > 0:
                obj = datetime.datetime.now() + datetime.timedelta(
                    days=story.expiration_days) + datetime.timedelta(
                        minutes=story.expiration_mins)
            else:
                obj = datetime.datetime.now() + datetime.timedelta(
                    days=STORY_EXPIRATION_DAYS) + datetime.timedelta(
                        minutes=STORY_EXPIRATION_MINUTES)
            super(Story,
                  story).cache_set(ret, int(time.mktime(obj.timetuple())),
                                   lang)
            return ret

    @staticmethod
    def api_purchase(storyid, account):
        story = Story.get_by_id(int(storyid))
        params = {}
        params['id'] = storyid
        params['cost'] = story.price
        params['itemcost'] = 0
        params['receipt_type'] = 'data'
        ret = Receipt.new(params, account)
        if True == ret['success']:
            account.storyIDs.append(storyid)
            account.put()
        ret["id"] = str(storyid)
        return ret
示例#3
0
class User(db.Model):
    """CHIRP radio's canonical user class.

    This object is designed to be contrib.auth-like but not
    necessarily compatible.  We only keep basic information about the
    user here; essentially only things we need for "sysadmin-y" tasks.
    More detailed information should go in the volunteer tracker or
    other
    """
    email = db.EmailProperty(required=True)
    first_name = db.StringProperty()
    last_name = db.StringProperty()
    # ID assigned by chirpradio.org via XML sync.
    external_id = db.IntegerProperty()

    dj_name = db.StringProperty(required=False)

    # This is an index of searchable terms for the user. Used by autocomplete.
    index = db.StringListProperty()

    # This is the SHA1 hash of the user's password.
    password = db.StringProperty()
    # We omit Django's is_staff property.
    is_active = db.BooleanProperty(default=True, required=True)
    # Superusers are given unfettered access to the site, and are
    # considered to be in every role.
    is_superuser = db.BooleanProperty(default=False, required=True)
    last_login = db.DateTimeProperty(auto_now_add=True, required=True)
    date_joined = db.DateTimeProperty(auto_now_add=True, required=True)

    # We omit Django's groups property, and replace it with 'roles'.
    # A role is just a constant string identifier.  For a list of
    # the possible roles, see the auth.roles module.
    #
    # Properties for checking if a user has a particular role are
    # automatically patched into the User class.  The following two
    # expressions are equivalent:
    #   roles.ROLE_NAME in user.roles
    #   user.is_role_name
    #
    # TODO(trow): Add validation that all roles are valid.
    roles = db.StringListProperty()


    def __unicode__(self):
        name_parts = []
        if self.first_name:
            name_parts.append(self.first_name)
        if self.last_name:
            name_parts.append(self.last_name)
        if not name_parts:
            name_parts.append(self.email)
        return u' '.join(name_parts)

    def __str__(self):
        return unicode(self).encode('utf-8')

    @classmethod
    def _hash_password(cls, plaintext):
        return hashlib.sha1(plaintext).hexdigest()

    def set_password(self, plaintext):
        """Store the SHA1 hash in the password property."""
        salt = '%04x' % int(0xffff * (time.time() % 1))
        self.password = salt + User._hash_password(salt + plaintext)

    def check_password(self, plaintext):
        if not self.password or len(self.password) < 4:
            return False
        salt = self.password[:4]
        hashed = self.password[4:]
        return hashed == User._hash_password(salt + plaintext)

    @classmethod
    def get_by_email(cls, email):
        query = db.Query(cls)
        query.filter('email =', email)
        if AutoRetry(query).count() == 0:
            return None
        elif AutoRetry(query).count() == 1:
            return AutoRetry(query).get()
        else:
            raise LookupError('User email collision for %s' % email)

    @property
    def effective_dj_name(self):
        if self.dj_name:
            return self.dj_name
        else:
            return u"%s %s" % (self.first_name, self.last_name)
示例#4
0
class User(db.Model):

    circle = db.StringListProperty(default=[])
    circlepermissions = db.StringListProperty(default=[])
    date = db.DateTimeProperty(auto_now_add=True)
    displayname = db.StringProperty()
    email_log = db.ListProperty(db.Key, default=[])
    favorites = db.ListProperty(db.Key, default=[])
    google = db.UserProperty()
    invitations = db.StringListProperty(default=[])
    invitees = db.StringListProperty(default=[])
    reputation = db.IntegerProperty(default=1)

    # people who subscribe to me in some way
    subscribers = db.StringListProperty(default=[])
    # people who subscribe to my documents by email
    subscribers_document = db.StringListProperty(default=[])
    # people who subscribe to my comments by email
    subscribers_comment = db.StringListProperty(default=[])
    # people I subscribe to
    subscriptions_user = db.StringListProperty(default=[])
    # people whose comments go in my stream
    subscriptions_comment = db.StringListProperty(default=[])
    # people whose documents go in my stream
    subscriptions_document = db.StringListProperty(default=[])
    # tags I subscribe to
    subscriptions_tag = db.StringListProperty(default=[])

    minimizeThreshold = db.IntegerProperty(default=3)
    object_type = db.StringProperty(default='User')
    username = db.StringProperty()

    def is_admin(self):
        if self.google.email() in hank['adminlist']:
            return True
        else:
            return False

    def get_age(self):
        age = datetime.datetime.now() - self.date
        return age.days

    def get_commentary(self):
        commentary = Commentary(self.username)
        return commentary

    def acceptInvitation(self, username):
        self.circlepermissions.append(username)
        self.invitations.remove(username)
        inviter = get_user(username)
        inviter.invitees.remove(self.username)
        inviter.circle.append(self.username)
        self.put()
        inviter.put()
        # stream message
        message = StreamMessage()
        message.recipient = inviter
        message.content = self.get_url(
            html=True) + ' has accepted your circle invitation.'
        message.put()

    def add_favorite(self, document):
        if not document.key in self.favorites:
            self.favorites.append(document.key())
        if not self.username in document.favorites:
            document.favorites.append(self.username)
        document.put()
        self.put()

    def declineInvitation(self, username):
        self.invitations.remove(username)
        inviter = get_user(username)
        inviter.invitees.remove(self.username)
        self.put()
        inviter.put()
        # steam message
        message = StreamMessage()
        message.recipient = inviter
        message.content = self.get_url(
            html=True) + ' has declined your circle invitation.'
        message.put()

    def fetch_favorites(self):
        favorites = []
        for key in self.favorites:
            favorites.append(Document.get(key))
        return favorites

    def get_url(self, includeDomain=False, html=False):
        if includeDomain:
            return hank['domainstring'] + 'user/' + self.username + '/'
        elif html:
            return '<a href="/user/' + self.username + '/" class="username">' + self.username + '</a>'
        else:
            return '/user/' + self.username + '/'

    def drafts(self):
        self.works.filter('draft ==', True)

    def invite(self, username):
        if not username in self.invitees:
            self.invitees.append(username)
            invited = get_user(username)
            invited.invitations.append(self.username)
            self.put()
            invited.put()
            # stream message
            message = StreamMessage()
            message.recipient = invited
            message.content = 'You\'ve been invited to join ' + self.get_url(
                html=True) + '\'s Writer\'s Circle'
            message.put()

    def leaveCircle(self, username):
        self.circlepermissions.remove(username)
        other = get_user(username)
        other.circle.remove(self.username)
        self.put()
        other.put()
        # stream message
        message = StreamMessage()
        message.recipient = other
        message.content = self.get_url(
            html=True) + ' has left your Writer\'s Circle'
        message.put()

    def publications(self):
        self.works.filter('draft ==', False)

    def fetch_stream(self, number=100):

        documents = []
        horizon = datetime.datetime.now() - datetime.timedelta(weeks=1)

        for name in self.subscriptions_comment:
            subscribee = get_user(name)
            documents.extend(Comment.all().filter(
                'author ==',
                subscribee).filter('date >=',
                                   horizon).order('-date').fetch(number))

        for name in self.subscriptions_document:
            subscribee = get_user(name)
            documents.extend(Document.all().filter(
                'author ==',
                subscribee).filter('date >=',
                                   horizon).order('-date').fetch(number))

        for tag in self.subscriptions_tag:
            documents.extend(get_documents([tag], number=number))

        documents.extend(self.streamMessages)

        ordered = sorted(documents,
                         key=lambda document: document.date,
                         reverse=True)
        return ordered[:number]

    def remove(self):
        affected = []

        for username in self.circle:
            other = get_user(username)
            other.circlepermissions.remove(self.username)
            other.put()
            affected.append(other)

        for username in self.circlepermissions:
            other = get_user(username)
            other.circle.remove(self.username)
            other.put()
            affected.append(other)

        for username in self.invitations:
            other = get_user(username)
            other.invitees.remove(self.username)
            other.put()
            affected.append(other)

        for username in self.invitees:
            other = get_user(username)
            other.invitations.remove(self.username)
            other.put()
            affected.append(other)

        for username in self.subscribers:
            other = get_user(username)
            other.set_subscription([], self.username)
            affected.append(other)

        for username in self.subscriptions_user:
            self.set_subscription([], username)
            affected.append(other)

        for comment in self.mycomments:
            #potential ERROR if some comments are descendants of others
            comment.remove()

        for document in self.works:
            document.remove()

        for message in self.streamMessages:
            message.remove()

        for rating in self.ratings:
            rating.delete()

        self.delete()

    def removeCircle(self, username):
        """Ejects username from this user's circle"""
        self.circle.remove(username)
        other = get_user(username)
        other.circlepermissions.remove(self.username)
        self.put()
        other.put()
        # stream message
        message = StreamMessage()
        message.recipient = other
        message.content = 'You have been removed from ' + self.get_url(
            html=True) + '\'s Writer\'s Circle'
        message.put()

    def set_reputation(self):

        reputation = 1
        myworks = self.works.fetch(1000)
        mycomments = self.mycomments
        total_views = 0

        for document in myworks:
            reputation = reputation + 4 * document.rating
            total_views = total_views + document.views

        for comment in mycomments:
            reputation = reputation + comment.rating

        if self.get_age() < 100:
            reputation = reputation * math.sqrt(self.get_age()) / 10

        if reputation < 0:
            reputation = -(math.sqrt(math.fabs(reputation)) / 100) * 97
        else:
            reputation = (math.sqrt(reputation) / 100) * 97

        prolificity = len(myworks)
        if prolificity >= 1:
            reputation = reputation + 1
            if prolificity >= 10:
                reputation = reputation + 1
                if total_views / prolificity <= 20:
                    reputation = reputation - 2
                if prolificity >= 30:
                    reputation = reputation + 1
                    if total_views / prolificity <= 30:
                        reputation = reputation - 2

        self.reputation = int(reputation)
        self.put()

    def set_subscription(self, subscriptions, subscribee):
        """ SUBSCRIPTIONS is a list of values one each for email and stream 
        subscription on either comments or documents. Subscribee should be a 
        username string. """
        flag = None
        subscribee = get_user(subscribee)
        if not subscriptions:
            if self.username in subscribee.subscribers:
                subscribee.subscribers.remove(self.username)

            if subscribee.username in self.subscriptions_user:
                self.subscriptions_user.remove(subscribee.username)

            message = 'This user has been removed from your subscriptions.'
        else:
            if self.username not in subscribee.subscribers:
                subscribee.subscribers.append(self.username)
                flag = 1

            if subscribee.username not in self.subscriptions_user:
                self.subscriptions_user.append(subscribee.username)
                flag = 1

        if flag:
            message = 'This user has been added to your subscriptions.'
        else:
            message = 'Your settings have been saved.'

        if 'subscribe_publish' in subscriptions:
            if not subscribee.username in self.subscriptions_document:
                self.subscriptions_document.append(subscribee.username)
        else:
            if subscribee.username in self.subscriptions_document:
                self.subscriptions_document.remove(subscribee.username)

        if 'email_publish' in subscriptions:
            if not self.username in subscribee.subscribers_document:
                subscribee.subscribers_document.append(self.username)
        else:
            if self.username in subscribee.subscribers_document:
                subscribee.subscribers_document.remove(self.username)

        if 'subscribe_comment' in subscriptions:
            if not subscribee.username in self.subscriptions_comment:
                self.subscriptions_comment.append(subscribee.username)
        else:
            if subscribee.username in self.subscriptions_comment:
                self.subscriptions_comment.remove(subscribee.username)

        if 'email_comment' in subscriptions:
            if not self.username in subscribee.subscribers_comment:
                subscribee.subscribers_comment.append(self.username)
        else:
            if self.username in subscribee.subscribers_comment:
                subscribee.subscribers_comment.remove(self.username)

        subscribee.put()
        self.put()
        return message

    def withdrawCircle(self, username):
        self.invitees.remove(username)
        other = get_user(username)
        other.invitations.remove(self.username)
        self.put()
        other.put()
        # stream message
        message = StreamMessage()
        message.recipient = other
        message.content = self.get_url(
            html=True) + '\'s Writer\'s Circle invitation has been withdrawn.'
        message.put()
示例#5
0
class Comment(db.Model):
    """It might also be more elegant to 
    manage depth with a property here."""
    author = db.ReferenceProperty(User, collection_name='mycomments')
    raters = db.StringListProperty()
    commentType = db.TextProperty()
    content = db.TextProperty()
    date = db.DateTimeProperty(auto_now_add=True)
    draft = db.BooleanProperty(default=False)
    above = db.SelfReferenceProperty(collection_name='replies')
    article = db.ReferenceProperty(Document, collection_name='comments')
    object_type = db.StringProperty(default='Comment')
    user_page = db.ReferenceProperty(User, collection_name='mypagecomments')
    rating = db.IntegerProperty(default=0)
    stripped_content = db.TextProperty()
    subject = db.StringProperty()
    subscribers = db.StringListProperty(default=[])

    def get_stripped(self):
        self.stripped_content = strip_tags(self.content)
        return self.stripped_content

    def get_page_object(self):
        if self.above:
            return self.above.get_page_object()
        if self.article:
            return self.article
        if self.user_page:
            return self.user_page

    def get_url(self):
        object = self.get_page_object()
        url = object.get_url()
        return url

    def parse(self):
        acceptableElements = [
            'a', 'blockquote', 'br', 'em', 'i', 'ol', 'ul', 'li', 'p', 'b'
        ]
        acceptableAttributes = ['href']
        while True:
            soup = BeautifulSoup(self.content)
            removed = False
            for tag in soup.findAll(True):  # find all tags
                if tag.name not in acceptable_elements:
                    tag.extract()  # remove the bad ones
                    removed = True
                else:  # it might have bad attributes
                    # a better way to get all attributes?
                    for attr in tag._getAttrMap().keys():
                        if attr not in acceptable_attributes:
                            del tag[attr]

            # turn it back to html
            fragment = unicode(soup)

            if removed:
                # we removed tags and tricky can could exploit that!
                # we need to reparse the html until it stops changing
                self.content = fragment
                continue  # next round
            break
        self.put()

    def set_rating(self):
        votes = self.ratings
        rating = 40
        tally = 10
        for vote in votes:
            if vote == -1:
                tenBase = 0
            else:
                tenBase = 10
            rating = rating + tenBase * (vote.user.reputation)
            tally = tally + vote.user.reputation
        rating = rating / tally
        logging.info('rating = ' + str(rating))
        self.rating = rating
        self.put()

    def remove(self):

        ratings = self.ratings
        for rating in ratings:
            rating.delete()

        children = self.replies
        for child in children:
            child.remove()

        self.delete()

    def subscribe(self, user):
        if not user.username in self.subscribers:
            self.subscribers.append(user.username)

    def unsubscribe(self, user):
        if user.username in self.subscribers:
            self.subscribers.remove(user.username)
示例#6
0
class Article(search.SearchableModel,models.SerializableModel):

    @classmethod
    def SearchableProperties(cls): return [['title', 'body']]
      
    json_does_not_include = ['assoc_dict']

    permalink = db.StringProperty(required=True)
    # Useful for aliasing of old urls
    legacy_id = db.StringProperty()
    title = db.StringProperty(required=True)
    article_type = db.StringProperty(required=True, 
                                     choices=set(["article", "blog entry"]))
    # Body can be in any format supported by Bloog (e.g. textile)
    body = db.TextProperty(required=True)
    # If available, we use 'excerpt' to summarize instead of 
    # extracting the first 68 words of 'body'.
    excerpt = db.TextProperty()
    # The html property is generated from body
    html = db.TextProperty()
    published = db.DateTimeProperty(auto_now_add=True)
    updated = db.DateTimeProperty(auto_now_add=True)
    format = db.StringProperty(required=True, 
                               choices=set(["html", "textile", 
                                            "markdown", "text"]))
    # Picked dict for sidelinks, associated Amazon items, etc.
    assoc_dict = db.BlobProperty()
    # To prevent full query when just showing article headlines
    num_comments = db.IntegerProperty(default=0)
    # Use keys instead of db.Category for consolidation of tag names
    tags = db.StringListProperty(default=[])
    tag_keys = db.ListProperty(db.Key, default=[])
    two_columns = db.BooleanProperty()
    allow_comments = db.BooleanProperty()
    # A list of languages for code embedded in article.
    # This lets us choose the proper javascript for pretty viewing.
    embedded_code = db.StringListProperty()

    def get_comments(self):
        """Return comments lexicographically sorted on thread string"""
        q = db.GqlQuery("SELECT * FROM Comment " +
                        "WHERE article = :1 " +
                        "ORDER BY thread ASC", self.key())
        return [comment for comment in q]
    comments = property(get_comments)       # No set for now

    def set_associated_data(self, data):
        """
        Serialize data that we'd like to store with this article.
        Examples include relevant (per article) links and associated 
        Amazon items.
        """
        import pickle
        self.assoc_dict = pickle.dumps(data)

    def get_associated_data(self):
        import pickle
        return pickle.loads(self.assoc_dict)

    def full_permalink(self):
        return config.BLOG['root_url'] + '/' + self.permalink
    
    def rfc3339_published(self):
        return self.published.strftime('%Y-%m-%dT%H:%M:%SZ')

    def rfc3339_updated(self):
        return self.updated.strftime('%Y-%m-%dT%H:%M:%SZ')

    def is_big(self):
        guess_chars = len(self.html) + self.num_comments * 80
        if guess_chars > 2000 or \
           self.embedded_code or \
           '<img' in self.html or \
           '<code>' in self.html or \
           '<pre>' in self.html:
            return True
        else:
            return False

    def next_comment_thread_string(self):
        'Returns thread string for next comment for this article'
        return get_thread_string(self, '')

    def to_atom_xml(self):
        """Returns a string suitable for inclusion in Atom XML feed
        
        Internal html property should already have XHTML entities
        converted into unicode.  However, ampersands are valid ASCII
        and will cause issues with XML, so reconvert ampersands to
        valid XML entities &amp;
        """
        import re
        # Double-escaping does not work.  Otherwise a literal '&nbsp;' appears in the feed.
        # we need to replace named entities with their numeric equivalents.
        # TODO can we scrub this on post/save rather than when rendering the feed??
        return re.sub('&nbsp;', '&#160;', self.html)
示例#7
0
class Catalog(BaseMunkiModel):
  """Munki catalog.

  These will be automatically generated on App Engine whenever an admin uploads
  a pkginfo file.

  Note: There is also an "all" catalog that includes all packages.
  """

  package_names = db.StringListProperty()

  PLIST_LIB_CLASS = plist_lib.MunkiPlist

  @classmethod
  def Generate(cls, name, delay=0):
    """Generates a Catalog plist and entity from matching PackageInfo entities.

    Args:
      name: str, catalog name. all PackageInfo entities with this name in the
          "catalogs" property will be included in the generated catalog.
      delay: int, if > 0, Generate call is deferred this many seconds.
    """
    if delay:
      now = datetime.datetime.utcnow()
      now_str = '%s-%d' % (now.strftime('%Y-%m-%d-%H-%M-%S'), now.microsecond)
      deferred_name = 'create-catalog-%s-%s' % (name, now_str)
      deferred.defer(cls.Generate, name, _name=deferred_name, _countdown=delay)
      return

    lock = 'catalog_lock_%s' % name
    # Obtain a lock on the catalog name.
    if not gae_util.ObtainLock(lock):
      # If catalog creation for this name is already in progress then delay.
      logging.debug('Catalog creation for %s is locked. Delaying....', name)
      cls.Generate(name, delay=10)
      return

    package_names = []
    try:
      pkgsinfo_dicts = []
      package_infos = PackageInfo.all().filter('catalogs =', name).fetch(None)
      if not package_infos:
        logging.warning('No PackageInfo entities with catalog: %s', name)
      for p in package_infos:
        package_names.append(p.name)
        pkgsinfo_dicts.append(p.plist.GetXmlContent(indent_num=1))

      catalog = constants.CATALOG_PLIST_XML % '\n'.join(pkgsinfo_dicts)

      c = cls.get_or_insert(name)
      c.package_names = package_names
      c.name = name
      c.plist = catalog
      c.put()
      cls.DeleteMemcacheWrap(name, prop_name='plist_xml')
      # Generate manifest for newly generated catalog.
      Manifest.Generate(name, delay=1)
    except (db.Error, plist_lib.Error):
      logging.exception('Catalog.Generate failure for catalog: %s', name)
      raise
    finally:
      gae_util.ReleaseLock(lock)
 class Pizza(db.Model):
     topping = db.StringListProperty()
示例#9
0
class ClientConfig(db.Model):
  """Client configuration allowing customization of the UI.

  The term client is used in the sense of Maps API Premier client, not browser
  or user.  ClientConfigs will be allocated to particular partners allowing
  them the ability to change some aspects of the UI when embedding on their
  site.  Key name: client ID, as specified in the 'client' URL param.
  """
  # List of referers who are allowed to use this client config
  allowed_referer_domains = db.StringListProperty()

  # Whether or not to hide the footer in the UI
  hide_footer = db.BooleanProperty(default=False)

  # Whether or not to hide the share button in the UI
  hide_share_button = db.BooleanProperty(default=False)

  # Whether or not to hide the "My Location" button in the UI
  hide_my_location_button = db.BooleanProperty(default=False)

  # Allow a callback parameter so an embedding page can receive and control
  # the map
  allow_embed_map_callback = db.BooleanProperty(default=False)

  # Whether or not to show the login state and links to sign in and sign out
  show_login = db.BooleanProperty(default=False)

  # The web property ID to use for tracking with Google Analytics
  # If unspecified, the default Crisis Map Analytics ID is assigned downstream.
  analytics_id = db.StringProperty(default='')

  # HTML to be inserted into head.
  custom_head_html = db.StringProperty(default='')

  # Whether or not to activate the editing UI
  enable_editing = db.BooleanProperty(default=False)

  # Which side to show the layers panel on ('left' or 'right')
  panel_side = db.StringProperty(default='right')

  # Whether to float the panel over the map (instead of docking it to the side)
  panel_float = db.BooleanProperty(default=False)

  # Whether to hide the Google+ sharing button in the Share box
  hide_google_plus_button = db.BooleanProperty(default=False)

  # Whether to hide the Facebook Like button in the Share box
  hide_facebook_button = db.BooleanProperty(default=False)

  # Whether to hide the Twitter sharing button in the Share box
  hide_twitter_button = db.BooleanProperty(default=False)


  # Whether to display minimal map controls (small zoom control, no
  # scale control, no pegman).
  minimal_map_controls = db.BooleanProperty(default=False)

  # Whether to hide the map title and description from the panel.
  hide_panel_header = db.BooleanProperty(default=False)

  # Whether to show OpenStreetMap as a base map option to all users.
  enable_osm_map_type = db.BooleanProperty(default=False)

  # Whether to allow OpenStreetMap as a base map option in the editor.
  enable_osm_map_type_editing = db.BooleanProperty(default=False)

  # Whether to enable the layer filter in the panel.
  enable_layer_filter = db.BooleanProperty(default=False)

  # The API key to use for Google APIs (from the Google APIs Console).
  google_api_key = db.StringProperty(default='')

  # Whether or not to enable the tabbed UI.
  use_tab_panel = db.BooleanProperty(default=False)

  # Whether or not to show feature details in the tabbed panel.
  use_details_tab = db.BooleanProperty(default=False)

  # URL endpoint for UrlShortener API.
  urlshortener_api_url = db.StringProperty(default='')

  # Note: When adding future settings, the default value should reflect the
  # behavior prior to the introduction of the new setting.  To avoid confusion
  # with None, the default value for Boolean settings should always be False.

  @classmethod
  def Create(cls, client_id, **kwargs):
    """Creates a ClientConfig entity for a given client_id.

    Args:
      client_id: A string, to use as the key for the model.
      **kwargs: Values for the properties of the ClientConfig entity (see the
          class definition for the list of available properties).
    Returns:
      A new ClientConfig entity.
    """
    return cls(key_name=client_id, **kwargs)

  def AsDict(self):
    """Converts this entity to a dict suitable for sending to the UI as JSON."""
    return dict((k, getattr(self, k)) for k in self.properties()
                if k != 'allowed_referer_domains')
示例#10
0
class LiteralIndex(SearchIndex):
    """Index model for non-inflected search phrases."""
    parent_kind = db.StringProperty(required=True)
    phrases = db.StringListProperty(required=True)
示例#11
0
class StemmedIndex(SearchIndex):
    """Index model for stemmed (inflected) search phrases."""
    parent_kind = db.StringProperty(required=True)
    phrases = db.StringListProperty(required=True)
示例#12
0
class MyKey(db.Model):
    deviceIds = db.StringListProperty()
    coupon = db.StringProperty()
    isValid = db.BooleanProperty()
    appref = db.ReferenceProperty(App, collection_name="mykeys")
示例#13
0
文件: Person.py 项目: rod-meaney/mt
class Person(df.DomainFramework):
    google_id = db.StringProperty()
    google_email = db.StringProperty()
    name = db.StringProperty()
    timesheet_tasks = db.StringListProperty()
    
    def __init__(self, *args, **kwargs):  
        super(Person, self).__init__(*args, **kwargs)

    def getPersonByGoogleId(self, googleId):
        '''
        Retrieves the person based on their google ID
        If the person does not exist, then returns None
        '''
        query=Person.all()
        query.filter("google_id = ", googleId)
        result=query.fetch(10)
        if len(result)>1:
            raise KnownError(["User %s has multiple entries..." % users.get_current_user().email()])
        elif len(result)==1:
            return result[0]
        else:
            person = Person()
            person.name="";
            person.google_id=users.get_current_user().user_id()
            person.google_email=users.get_current_user().email()
            person.createNew() 
            return person    
        
    def getCurrentPerson(self):
        return self.getPersonByGoogleId(users.get_current_user().user_id())
        
    def getPeopleByGoogleId(self, googleIds):
        query=self.all()
        query.filter("google_id in ", googleIds)
        return query.fetch(1000)  
    
    def toDict(self):
        return {"person":{"name":self.name,
                          "id":self.google_id,  
                          "email":self.google_email,
                          "timesheet_tasks":self.timesheet_tasks}}
        
    def addName(self, new_name):
        '''
        The premise for this code logic is
        1. User cannot change their name
        2. Checks for name in all existing users
        '''
        person = self.getCurrentPerson()
        if person.name <> "": raise KnownError(["Already has a name (%s)" % person.name])
        if new_name=='': raise KnownError(["You must enter the persons name"])
        
        #check for duplicate names
        query=Person.all()
        query.filter("name = ", new_name)
        result=query.fetch(10)
        if len(result)>0:raise KnownError(["Name already exists, please chose another"])        
        
        #do the update
        person.name=new_name
        person.update()        
        
    def updateField(self, field_name, field_value):
        if field_name=='name':
            self.addName(field_value)
        else:
            KnownError(["Attempting to update invalid field (%s)" % field_name])
        
        
    def updateTimesheetTasks(self, tasks):
        person = self.getCurrentPerson()
        person.timesheet_tasks = tasks
        person.timesheet_tasks.sort()
        person.update()

    def updateTimesheetTask(self, task):
        person = self.getCurrentPerson()
        if not (task in person.timesheet_tasks):
            person.timesheet_tasks.append(task)
            person.timesheet_tasks.sort()
            person.update()        
示例#14
0
class _AppEngineUtilities_Session(db.Model):
    """
    Model for the sessions in the datastore. This contains the identifier and
    validation information for the session.
    """

    sid = db.StringListProperty()
    session_key = db.FloatProperty()
    ip = db.StringProperty()
    ua = db.StringProperty()
    last_activity = db.DateTimeProperty()
    dirty = db.BooleanProperty(default=False)
    working = db.BooleanProperty(default=False)
    deleted = db.BooleanProperty(default=False)  # used for cases where

    # datastore delete doesn't
    # work

    def put(self):
        """
        Extend put so that it writes vaules to memcache as well as the datastore,
        and keeps them in sync, even when datastore writes fails.
        """
        if self.session_key:
            memcache.set(
                "_AppEngineUtilities_Session_" + str(self.session_key), self)
        else:
            # new session, generate a new key, which will handle the put and set the memcache
            self.create_key()

        self.last_activity = datetime.datetime.now()

        try:
            self.dirty = False
            logging.info("doing a put")
            db.put(self)
            memcache.set(
                "_AppEngineUtilities_Session_" + str(self.session_key), self)
        except:
            self.dirty = True
            memcache.set(
                "_AppEngineUtilities_Session_" + str(self.session_key), self)

        return self

    @classmethod
    def get_session(cls, session_obj=None):
        """
        Uses the passed sid to get a session object from memcache, or datastore
        if a valid one exists.
        """
        if session_obj.sid == None:
            return None
        session_key = session_obj.sid.split('_')[0]
        session = memcache.get("_AppEngineUtilities_Session_" +
                               str(session_key))
        if session:
            if session.deleted == True:
                session.delete()
                return None
            if session.dirty == True and session.working != False:
                # the working bit is used to make sure multiple requests, which can happen
                # with ajax oriented sites, don't try to put at the same time
                session.working = True
                memcache.set("_AppEngineUtilities_Session_" + str(session_key),
                             session)
                session.put()
            if session_obj.sid in session.sid:
                logging.info('grabbed session from memcache')
                sessionAge = datetime.datetime.now() - session.last_activity
                if sessionAge.seconds > session_obj.session_expire_time:
                    session.delete()
                    return None
                return session
            else:
                return None

        # Not in memcache, check datastore
        query = _AppEngineUtilities_Session.all()
        query.filter("sid = ", session_obj.sid)
        results = query.fetch(1)
        if len(results) > 0:
            sessionAge = datetime.datetime.now() - results[0].last_activity
            if sessionAge.seconds > self.session_expire_time:
                results[0].delete()
                return None
            memcache.set("_AppEngineUtilities_Session_" + str(session_key),
                         results[0])
            memcache.set("_AppEngineUtilities_SessionData_" + str(session_key),
                         results[0].get_items_ds())
            logging.info('grabbed session from datastore')
            return results[0]
        else:
            return None

    def get_items(self):
        """
        Returns all the items stored in a session
        """
        items = memcache.get("_AppEngineUtilities_SessionData_" +
                             str(self.session_key))
        if items:
            for item in items:
                if item.deleted == True:
                    item.delete()
                    items.remove(item)
            return items

        query = _AppEngineUtilities_SessionData.all()
        query.filter('session_key', self.session_key)
        results = query.fetch(1000)
        return results

    def get_item(self, keyname=None):
        """
        Returns a single item from the memcache or datastore
        """
        mc = memcache.get("_AppEngineUtilities_SessionData_" +
                          str(self.session_key))
        if mc:
            for item in mc:
                if item.keyname == keyname:
                    if item.deleted == True:
                        item.delete()
                        return None
                    return item
        query = _AppEngineUtilities_SessionData.all()
        query.filter("session_key = ", self.session_key)
        query.filter("keyname = ", keyname)
        results = query.fetch(1)
        if len(results) > 0:
            memcache.set(
                "_AppEngineUtilities_SessionData_" + str(self.session_key),
                self.get_items_ds())
            return results[0]
        return None

    def get_items_ds(self):
        """
        This gets all the items straight from the datastore, does not
        interact with the memcache.
        """
        query = _AppEngineUtilities_SessionData.all()
        query.filter('session_key', self.session_key)
        results = query.fetch(1000)
        return results

    def delete(self):
        try:
            query = _AppEngineUtilities_SessionData.all()
            query.filter("session_key = ", self.session_key)
            results = query.fetch(1000)
            db.delete(results)
            db.delete(self)
            memcache.delete_multi([
                "_AppEngineUtilities_Session_" + str(self.session_key),
                "_AppEngineUtilities_SessionData_" + str(self.session_key)
            ])
        except:
            mc = memcache.get("_AppEngineUtilities_Session_" +
                              str(self.session_key))
            mc.deleted = True
            memcache.set(
                "_AppEngineUtilities_Session_" + str(self.session_key), mc)

    def create_key(self):
        """
        Creates a unique key for the session.
        """
        self.session_key = time.time()
        valid = False
        while valid == False:
            # verify session_key is unique
            if memcache.get("_AppEngineUtilities_Session_" +
                            str(self.session_key)):
                self.session_key = self.session_key + 0.001
            else:
                query = _AppEngineUtilities_Session.all()
                query.filter("session_key = ", self.session_key)
                results = query.fetch(1)
                if len(results) > 0:
                    self.session_key = self.session_key + 0.001
                else:
                    try:
                        self.put()
                        memcache.set(
                            "_AppEngineUtilities_Session_" +
                            str(self.session_key), self)
                    except:
                        self.dirty = True
                        memcache.set(
                            "_AppEngineUtilities_Session_" +
                            str(self.session_key), self)
                    valid = True
示例#15
0
class StaticSiteMapInfo(db.Model):
    """Holds static sitemaps file info."""
    static_sitemaps = db.StringListProperty()
    static_sitemaps_generation_time = db.DateTimeProperty(required=True)
    shard_size_seconds = db.IntegerProperty(default=90)
示例#16
0
class Feature(DictModel):
    """Container for a feature."""

    DEFAULT_MEMCACHE_KEY = '%s|features' % (settings.MEMCACHE_KEY_PREFIX)
    MAX_CHUNK_SIZE = 500  # max num features to save for each memcache chunk.

    @classmethod
    def get_feature_chunk_memcache_keys(self, key_prefix):
        num_features = len(Feature.all().fetch(limit=None, keys_only=True))
        l = list_to_chunks(range(0, num_features), self.MAX_CHUNK_SIZE)
        return ['%s|chunk%s' % (key_prefix, i) for i, val in enumerate(l)]

    @classmethod
    def set_feature_chunk_memcache_keys(self, key_prefix, feature_list):
        chunks = list_to_chunks(feature_list, self.MAX_CHUNK_SIZE)
        vals = []
        for i, chunk in enumerate(chunks):
            vals.append(('%s|chunk%s' % (key_prefix, i), chunk))
        # d = OrderedDict(sorted(dict(vals).items(), key=lambda t: t[0]))
        d = dict(vals)
        return d

    @classmethod
    def _first_of_milestone(self, feature_list, milestone, start=0):
        for i in xrange(start, len(feature_list)):
            f = feature_list[i]
            if (str(f['shipped_milestone']) == str(milestone)
                    or f['impl_status_chrome'] == str(milestone)):
                return i
            elif (f['shipped_milestone'] == None
                  and str(f['shipped_android_milestone']) == str(milestone)):
                return i

        return -1

    @classmethod
    def _first_of_milestone_v2(self, feature_list, milestone, start=0):
        for i in xrange(start, len(feature_list)):
            f = feature_list[i]
            desktop_milestone = f['browsers']['chrome'].get('desktop', None)
            android_milestone = f['browsers']['chrome'].get('android', None)
            status = f['browsers']['chrome']['status'].get('text', None)

            if (str(desktop_milestone) == str(milestone)
                    or status == str(milestone)):
                return i
            elif (desktop_milestone == None
                  and str(android_milestone) == str(milestone)):
                return i

        return -1

    @classmethod
    def _annotate_first_of_milestones(self, feature_list, version=None):
        try:
            omaha_data = util.get_omaha_data()

            win_versions = omaha_data[0]['versions']

            # Find the latest canary major version from the list of windows versions.
            canary_versions = [
                x for x in win_versions
                if x.get('channel') and x.get('channel').startswith('canary')
            ]
            LATEST_VERSION = int(
                canary_versions[0].get('version').split('.')[0])

            milestones = range(1, LATEST_VERSION + 1)
            milestones.reverse()
            versions = [
                IMPLEMENTATION_STATUS[NO_ACTIVE_DEV],
                IMPLEMENTATION_STATUS[PROPOSED],
                IMPLEMENTATION_STATUS[IN_DEVELOPMENT],
            ]
            versions.extend(milestones)
            versions.append(IMPLEMENTATION_STATUS[NO_LONGER_PURSUING])

            first_of_milestone_func = Feature._first_of_milestone
            if version == 2:
                first_of_milestone_func = Feature._first_of_milestone_v2

            last_good_idx = 0
            for i, ver in enumerate(versions):
                idx = first_of_milestone_func(feature_list,
                                              ver,
                                              start=last_good_idx)
                if idx != -1:
                    feature_list[idx]['first_of_milestone'] = True
                    last_good_idx = idx
        except Exception as e:
            logging.error(e)

    def format_for_template(self, version=None):
        d = self.to_dict()

        if version == 2:
            if self.is_saved():
                d['id'] = self.key().id()
            else:
                d['id'] = None
            d['category'] = FEATURE_CATEGORIES[self.category]
            d['created'] = {
                'by': d.pop('created', None),
                'when': d.pop('created_by', None),
            }
            d['updated'] = {
                'by': d.pop('updated_by', None),
                'when': d.pop('updated', None),
            }
            d['standards'] = {
                'spec': d.pop('spec_link', None),
                'status': {
                    'text': STANDARDIZATION[self.standardization],
                    'val': d.pop('standardization', None),
                },
                'visibility': {
                    'text': VISIBILITY_CHOICES[self.visibility],
                    'val': d.pop('visibility', None),
                },
                'footprint': {
                    'val': d.pop('footprint', None),
                    #'text': FOOTPRINT_CHOICES[self.footprint]
                }
            }
            d['resources'] = {
                'samples': d.pop('sample_links', []),
                'docs': d.pop('doc_links', []),
            }
            d['tags'] = d.pop('search_tags', [])
            d['browsers'] = {
                'chrome': {
                    'bug': d.pop('bug_url', None),
                    'blink_components': d.pop('blink_components', []),
                    'owners': d.pop('owner', []),
                    'origintrial': self.impl_status_chrome == ORIGIN_TRIAL,
                    'intervention': self.impl_status_chrome == INTERVENTION,
                    'prefixed': d.pop('prefixed', False),
                    'flag': self.impl_status_chrome == BEHIND_A_FLAG,
                    'status': {
                        'text': IMPLEMENTATION_STATUS[self.impl_status_chrome],
                        'val': d.pop('impl_status_chrome', None)
                    },
                    'desktop': d.pop('shipped_milestone', None),
                    'android': d.pop('shipped_android_milestone', None),
                    'webview': d.pop('shipped_webview_milestone', None),
                    'ios': d.pop('shipped_ios_milestone', None),
                },
                'opera': {
                    'desktop': d.pop('shipped_opera_milestone', None),
                    'android': d.pop('shipped_opera_android_milestone', None),
                },
                'ff': {
                    'view': {
                        'text': VENDOR_VIEWS[self.ff_views],
                        'val': d.pop('ff_views', None),
                        'url': d.pop('ff_views_link', None),
                    }
                },
                'edge': {
                    'view': {
                        'text': VENDOR_VIEWS[self.ie_views],
                        'val': d.pop('ie_views', None),
                        'url': d.pop('ie_views_link', None),
                    }
                },
                'safari': {
                    'view': {
                        'text': VENDOR_VIEWS[self.safari_views],
                        'val': d.pop('safari_views', None),
                        'url': d.pop('safari_views_link', None),
                    }
                },
                'webdev': {
                    'view': {
                        'text': WEB_DEV_VIEWS[self.web_dev_views],
                        'val': d.pop('web_dev_views', None),
                    }
                }
            }

            if self.shipped_milestone:
                d['browsers']['chrome']['status'][
                    'milestone_str'] = self.shipped_milestone
            elif self.shipped_milestone is None and self.shipped_android_milestone:
                d['browsers']['chrome']['status'][
                    'milestone_str'] = self.shipped_android_milestone
            else:
                d['browsers']['chrome']['status']['milestone_str'] = d[
                    'browsers']['chrome']['status']['text']

            del d['created']

            del_none(d)  # Further prune response by removing null/[] values.

        else:
            if self.is_saved():
                d['id'] = self.key().id()
            else:
                d['id'] = None
            d['category'] = FEATURE_CATEGORIES[self.category]
            d['visibility'] = VISIBILITY_CHOICES[self.visibility]
            d['impl_status_chrome'] = IMPLEMENTATION_STATUS[
                self.impl_status_chrome]
            d['meta'] = {
                'origintrial': self.impl_status_chrome == ORIGIN_TRIAL,
                'intervention': self.impl_status_chrome == INTERVENTION,
                'needsflag': self.impl_status_chrome == BEHIND_A_FLAG,
            }
            if self.shipped_milestone:
                d['meta']['milestone_str'] = self.shipped_milestone
            elif self.shipped_milestone is None and self.shipped_android_milestone:
                d['meta']['milestone_str'] = self.shipped_android_milestone
            else:
                d['meta']['milestone_str'] = d['impl_status_chrome']
            d['ff_views'] = {
                'value': self.ff_views,
                'text': VENDOR_VIEWS[self.ff_views]
            }
            d['ie_views'] = {
                'value': self.ie_views,
                'text': VENDOR_VIEWS[self.ie_views]
            }
            d['safari_views'] = {
                'value': self.safari_views,
                'text': VENDOR_VIEWS[self.safari_views]
            }
            d['standardization'] = {
                'value': self.standardization,
                'text': STANDARDIZATION[self.standardization]
            }
            d['web_dev_views'] = {
                'value': self.web_dev_views,
                'text': WEB_DEV_VIEWS[self.web_dev_views]
            }

        return d

    def format_for_edit(self):
        d = self.to_dict()
        #d['id'] = self.key().id
        d['owner'] = ', '.join(self.owner)
        d['doc_links'] = '\r\n'.join(self.doc_links)
        d['sample_links'] = '\r\n'.join(self.sample_links)
        d['search_tags'] = ', '.join(self.search_tags)
        d['blink_components'] = self.blink_components[
            0]  #TODO: support more than one component.
        return d

    @classmethod
    def get_all(self,
                limit=None,
                order='-updated',
                filterby=None,
                update_cache=False):
        KEY = '%s|%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, order, limit)

        # TODO(ericbidelman): Support more than one filter.
        if filterby is not None:
            s = ('%s%s' % (filterby[0], filterby[1])).replace(' ', '')
            KEY += '|%s' % s

        feature_list = memcache.get(KEY)

        if feature_list is None or update_cache:
            query = Feature.all().order(order)  #.order('name')

            # TODO(ericbidelman): Support more than one filter.
            if filterby:
                query.filter(filterby[0], filterby[1])

            features = query.fetch(limit)

            feature_list = [f.format_for_template() for f in features]

            memcache.set(KEY, feature_list)

        return feature_list

    @classmethod
    def get_all_with_statuses(self, statuses, update_cache=False):
        if not statuses:
            return []

        KEY = '%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, sorted(statuses))

        feature_list = memcache.get(KEY)

        if feature_list is None or update_cache:
            # There's no way to do an OR in a single datastore query, and there's a
            # very good chance that the self.get_all() results will already be in
            # memcache, so use an array comprehension to grab the features we
            # want from the array of everything.
            feature_list = [
                feature for feature in self.get_all(update_cache=update_cache)
                if feature['impl_status_chrome'] in statuses
            ]
            memcache.set(KEY, feature_list)

        return feature_list

    @classmethod
    def get_feature(self, feature_id, update_cache=False):
        KEY = '%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, feature_id)
        feature = memcache.get(KEY)

        if feature is None or update_cache:
            unformatted_feature = Feature.get_by_id(feature_id)
            if unformatted_feature:
                feature = unformatted_feature.format_for_template()
                feature[
                    'updated_display'] = unformatted_feature.updated.strftime(
                        "%Y-%m-%d")
                feature['new_crbug_url'] = unformatted_feature.new_crbug_url()
                memcache.set(KEY, feature)

        return feature

    @classmethod
    def get_chronological(self, limit=None, update_cache=False, version=None):
        KEY = '%s|%s|%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, 'cronorder',
                               limit, version)

        keys = Feature.get_feature_chunk_memcache_keys(KEY)
        feature_list = memcache.get_multi(keys)

        # If we didn't get the expected number of chunks back (or a cache update
        # was requested), do a db query.
        if len(feature_list.keys()) != len(keys) or update_cache:
            # Features with no active, in dev, proposed features.
            q = Feature.all()
            q.order('impl_status_chrome')
            q.order('name')
            q.filter('impl_status_chrome <=', IN_DEVELOPMENT)
            pre_release = q.fetch(None)

            # Shipping features. Exclude features that do not have a desktop
            # shipping milestone.
            q = Feature.all()
            q.order('-shipped_milestone')
            q.order('name')
            q.filter('shipped_milestone !=', None)
            shipping_features = q.fetch(None)

            # Features with an android shipping milestone but no desktop milestone.
            q = Feature.all()
            q.order('-shipped_android_milestone')
            q.order('name')
            q.filter('shipped_milestone =', None)
            android_only_shipping_features = q.fetch(None)

            # No longer pursuing features.
            q = Feature.all()
            q.order('impl_status_chrome')
            q.order('name')
            q.filter('impl_status_chrome =', NO_LONGER_PURSUING)
            no_longer_pursuing_features = q.fetch(None)

            shipping_features.extend(android_only_shipping_features)

            shipping_features = [
                f for f in shipping_features
                if (IN_DEVELOPMENT < f.impl_status_chrome < NO_LONGER_PURSUING)
            ]

            def getSortingMilestone(feature):
                feature._sort_by_milestone = (
                    feature.shipped_milestone
                    or feature.shipped_android_milestone)
                return feature

            # Sort the feature list on either Android shipping milestone or desktop
            # shipping milestone, depending on which is specified. If a desktop
            # milestone is defined, that will take default.
            shipping_features = map(getSortingMilestone, shipping_features)

            # First sort by name, then sort by feature milestone (latest first).
            shipping_features.sort(key=lambda f: f.name, reverse=False)
            shipping_features.sort(key=lambda f: f._sort_by_milestone,
                                   reverse=True)

            # Constructor the proper ordering.
            pre_release.extend(shipping_features)
            pre_release.extend(no_longer_pursuing_features)

            feature_list = [
                f.format_for_template(version) for f in pre_release
            ]

            self._annotate_first_of_milestones(feature_list, version=version)

            # Memcache doesn't support saving values > 1MB. Break up features list into
            # chunks so we don't hit the limit.
            memcache.set_multi(
                Feature.set_feature_chunk_memcache_keys(KEY, feature_list))
        else:
            temp_feature_list = []
            # Reconstruct feature list by ordering chunks.
            for key in sorted(feature_list.keys()):
                temp_feature_list.extend(feature_list[key])
            feature_list = temp_feature_list

        return feature_list

    @classmethod
    def get_shipping_samples(self, limit=None, update_cache=False):
        KEY = '%s|%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, 'samples', limit)

        feature_list = memcache.get(KEY)

        if feature_list is None or update_cache:
            # Get all shipping features. Ordered by shipping milestone (latest first).
            q = Feature.all()
            q.filter('impl_status_chrome IN',
                     [ENABLED_BY_DEFAULT, ORIGIN_TRIAL, INTERVENTION])
            q.order('-impl_status_chrome')
            q.order('-shipped_milestone')
            q.order('name')
            features = q.fetch(None)

            # Get non-shipping features (sans removed or deprecated ones) and
            # append to bottom of list.
            q = Feature.all()
            q.filter('impl_status_chrome <', ENABLED_BY_DEFAULT)
            q.order('-impl_status_chrome')
            q.order('-shipped_milestone')
            q.order('name')
            others = q.fetch(None)
            features.extend(others)

            # Filter out features without sample links.
            feature_list = [
                f.format_for_template() for f in features
                if len(f.sample_links)
            ]

            memcache.set(KEY, feature_list)

        return feature_list

    def crbug_number(self):
        if not self.bug_url:
            return
        m = re.search(r'[\/|?id=]([0-9]+)$', self.bug_url)
        if m:
            return m.group(1)

    def new_crbug_url(self):
        url = 'https://bugs.chromium.org/p/chromium/issues/entry'
        params = [
            'components=' + self.blink_components[0]
            or BlinkComponent.DEFAULT_COMPONENT
        ]
        crbug_number = self.crbug_number()
        if crbug_number and self.impl_status_chrome in (
                NO_ACTIVE_DEV, PROPOSED, IN_DEVELOPMENT, BEHIND_A_FLAG,
                ORIGIN_TRIAL, INTERVENTION):
            params.append('blocking=' + crbug_number)
        if self.owner:
            params.append('cc=' + ','.join(self.owner))
        return url + '?' + '&'.join(params)

    def __init__(self, *args, **kwargs):
        super(Feature, self).__init__(*args, **kwargs)

        # Stash existing values when entity is created so we can diff property
        # values later in put() to know what's changed. https://stackoverflow.com/a/41344898
        for prop_name, prop in self.properties().iteritems():
            old_val = getattr(self, prop_name, None)
            setattr(self, '_old_' + prop_name, old_val)

    def __notify_feature_subscribers_of_changes(self, is_update):
        """Async notifies subscribers of new features and property changes to features by
       posting to a task queue."""
        # Diff values to see what properties have changed.
        changed_props = []
        for prop_name, prop in self.properties().iteritems():
            new_val = getattr(self, prop_name, None)
            old_val = getattr(self, '_old_' + prop_name, None)
            if new_val != old_val:
                changed_props.append({
                    'prop_name': prop_name,
                    'old_val': old_val,
                    'new_val': new_val
                })

        payload = json.dumps({
            'changes': changed_props,
            'is_update': is_update,
            'feature': self.format_for_template(version=2)
        })

        # Create task to email subscribers.
        queue = taskqueue.Queue()  #name='emailer')
        task = taskqueue.Task(method="POST",
                              url='/tasks/email-subscribers',
                              target='notifier',
                              payload=payload)
        queue.add(task)

        # Create task to send push notifications
        queue = taskqueue.Queue()
        task = taskqueue.Task(method="POST",
                              url='/tasks/send_notifications',
                              target='notifier',
                              payload=payload)
        queue.add(task)

    def put(self, **kwargs):
        is_update = self.is_saved()
        key = super(Feature, self).put(**kwargs)
        self.__notify_feature_subscribers_of_changes(is_update)
        return key

    # Metadata.
    created = db.DateTimeProperty(auto_now_add=True)
    updated = db.DateTimeProperty(auto_now=True)
    updated_by = db.UserProperty(auto_current_user=True)
    created_by = db.UserProperty(auto_current_user_add=True)

    # General info.
    category = db.IntegerProperty(required=True)
    name = db.StringProperty(required=True)
    summary = db.StringProperty(required=True, multiline=True)

    # Chromium details.
    bug_url = db.LinkProperty()
    blink_components = db.StringListProperty(
        required=True, default=[BlinkComponent.DEFAULT_COMPONENT])

    impl_status_chrome = db.IntegerProperty(required=True)
    shipped_milestone = db.IntegerProperty()
    shipped_android_milestone = db.IntegerProperty()
    shipped_ios_milestone = db.IntegerProperty()
    shipped_webview_milestone = db.IntegerProperty()
    shipped_opera_milestone = db.IntegerProperty()
    shipped_opera_android_milestone = db.IntegerProperty()

    owner = db.ListProperty(db.Email)
    footprint = db.IntegerProperty()
    visibility = db.IntegerProperty(required=True)

    #webbiness = db.IntegerProperty() # TODO: figure out what this is

    # Standards details.
    standardization = db.IntegerProperty(required=True)
    spec_link = db.LinkProperty()
    prefixed = db.BooleanProperty()

    ff_views = db.IntegerProperty(required=True, default=NO_PUBLIC_SIGNALS)
    ie_views = db.IntegerProperty(required=True, default=NO_PUBLIC_SIGNALS)
    safari_views = db.IntegerProperty(required=True, default=NO_PUBLIC_SIGNALS)

    ff_views_link = db.LinkProperty()
    ie_views_link = db.LinkProperty()
    safari_views_link = db.LinkProperty()

    # Web dev details.
    web_dev_views = db.IntegerProperty(required=True)
    doc_links = db.StringListProperty()
    sample_links = db.StringListProperty()
    #tests = db.StringProperty()

    search_tags = db.StringListProperty()

    comments = db.StringProperty(multiline=True)
示例#17
0
class Person(Base):
    """The datastore entity kind for storing a PFIF person record.  Never call
    Person() directly; use Person.create_clone() or Person.create_original().

    Methods that start with "get_" return actual values or lists of values;
    other methods return queries or generators for values.
    """
    # If you add any new fields, be sure they are handled in wipe_contents().

    # entry_date should update every time a record is created or re-imported.
    entry_date = db.DateTimeProperty(required=True)
    expiry_date = db.DateTimeProperty(required=False)

    author_name = db.StringProperty(default='', multiline=True)
    author_email = db.StringProperty(default='')
    author_phone = db.StringProperty(default='')

    # the original date we saw this record; it should not change.
    original_creation_date = db.DateTimeProperty(auto_now_add=True)

    # source_date is the date that the original repository last changed
    # any of the fields in the pfif record.
    source_date = db.DateTimeProperty()

    source_name = db.StringProperty(default='')
    source_url = db.StringProperty(default='')

    # TODO(ryok): consider marking this required.
    full_name = db.StringProperty(multiline=True)
    given_name = db.StringProperty()
    family_name = db.StringProperty()
    alternate_names = db.StringProperty(default='', multiline=True)
    description = db.TextProperty(default='')
    sex = db.StringProperty(default='', choices=pfif.PERSON_SEX_VALUES)
    date_of_birth = db.StringProperty(default='')  # YYYY, YYYY-MM, YYYY-MM-DD
    age = db.StringProperty(default='')  # NN or NN-MM
    home_street = db.StringProperty(default='')
    home_neighborhood = db.StringProperty(default='')
    home_city = db.StringProperty(default='')
    home_state = db.StringProperty(default='')
    home_postal_code = db.StringProperty(default='')
    home_country = db.StringProperty(default='')
    photo_url = db.TextProperty(default='')
    profile_urls = db.TextProperty(default='')

    # This reference points to a locally stored Photo entity.  ONLY set this
    # property when storing a new Photo object that is owned by this Person
    # record and can be safely deleted when the Person is deleted.
    photo = db.ReferenceProperty(default=None)

    # The following properties are not part of the PFIF data model; they are
    # cached on the Person for efficiency.

    # Value of the 'status' and 'source_date' properties on the Note
    # with the latest source_date with the 'status' field present.
    latest_status = db.StringProperty(default='')
    latest_status_source_date = db.DateTimeProperty()
    # Value of the 'author_made_contact' and 'source_date' properties on the
    # Note with the latest source_date with the 'author_made_contact' field
    # present.
    latest_found = db.BooleanProperty()
    latest_found_source_date = db.DateTimeProperty()

    # Last write time of this Person or any Notes on this Person.
    # This reflects any change to the Person page.
    last_modified = db.DateTimeProperty(auto_now=True)

    # This flag is set to true only when the record author disabled
    # adding new notes to a record.
    notes_disabled = db.BooleanProperty(default=False)

    # attributes used by indexing.py
    names_prefixes = db.StringListProperty()
    # TODO(ryok): index address components.
    _fields_to_index_properties = ['given_name', 'family_name', 'full_name']
    _fields_to_index_by_prefix_properties = [
        'given_name', 'family_name', 'full_name'
    ]

    @staticmethod
    def past_due_records(repo):
        """Returns a query for all Person records with expiry_date in the past,
        or None, regardless of their is_expired flags."""
        import utils
        return Person.all(filter_expired=False).filter(
            'expiry_date <=', utils.get_utcnow()).filter('repo =', repo)

    @staticmethod
    def potentially_expired_records(repo,
                                    days_to_expire=DEFAULT_EXPIRATION_DAYS):
        """Returns a query for all Person records with source date
        older than days_to_expire (or empty source_date), regardless of
        is_expired flags value."""
        import utils
        cutoff_date = utils.get_utcnow() - timedelta(days_to_expire)
        return Person.all(filter_expired=False).filter('source_date <=',
                                                       cutoff_date).filter(
                                                           'repo =', repo)

    @property
    def person_record_id(self):
        return self.record_id

    @property
    def primary_full_name(self):
        return self.full_name.splitlines()[0] if self.full_name else ''

    @property
    def full_name_list(self):
        return self.full_name.splitlines() if self.full_name else []

    @property
    def alternate_names_list(self):
        return self.alternate_names.splitlines(
        ) if self.alternate_names else []

    @property
    def profile_urls_list(self):
        return self.profile_urls.splitlines() if self.profile_urls else []

    @property
    def photo_url_no_scheme(self):
        import utils
        return utils.strip_url_scheme(self.photo_url)

    def get_notes(self, filter_expired=True):
        """Returns a list of all the Notes on this Person, omitting expired
        Notes by default."""
        return Note.get_by_person_record_id(self.repo,
                                            self.record_id,
                                            filter_expired=filter_expired)

    def get_subscriptions(self, subscription_limit=200):
        """Retrieves a list of all the Subscriptions for this Person."""
        return Subscription.get_by_person_record_id(self.repo,
                                                    self.record_id,
                                                    limit=subscription_limit)

    def get_linked_person_ids(self, note_limit=200):
        """Retrieves IDs of Persons marked as duplicates of this Person."""
        return [
            note.linked_person_record_id for note in self.get_notes(note_limit)
            if note.linked_person_record_id
        ]

    def get_linked_persons(self, note_limit=200):
        """Retrieves Persons marked as duplicates of this Person."""
        return Person.get_all(self.repo,
                              self.get_linked_person_ids(note_limit))

    def get_all_linked_persons(self):
        """Retrieves all Persons transitively linked to this Person."""
        linked_person_ids = set([self.record_id])
        linked_persons = []
        # Maintain a list of ids of duplicate persons that have not
        # yet been processed.
        new_person_ids = set(self.get_linked_person_ids())
        # Iteratively process all new_person_ids by retrieving linked
        # duplicates and storing those not yet processed.
        # Processed ids are stored in the linked_person_ids set, and
        # their corresponding records are in the linked_persons list.
        while new_person_ids:
            linked_person_ids.update(new_person_ids)
            new_persons = Person.get_all(self.repo, list(new_person_ids))
            for person in new_persons:
                new_person_ids.update(person.get_linked_person_ids())
            linked_persons += new_persons
            new_person_ids -= linked_person_ids
        return linked_persons

    def get_associated_emails(self):
        """Gets a set of all the e-mail addresses to notify when this record
        is changed."""
        email_addresses = set([
            note.author_email for note in self.get_notes() if note.author_email
        ])
        if self.author_email:
            email_addresses.add(self.author_email)
        return email_addresses

    def get_effective_expiry_date(self):
        """Gets the expiry_date, or if no expiry_date is present, returns the
        source_date plus the configurable default_expiration_days interval.

        If there's no source_date, we use original_creation_date.
        Returns:
          A datetime date (not None).
        """
        if self.expiry_date:
            return self.expiry_date
        else:
            expiration_days = config.get_for_repo(
                self.repo,
                'default_expiration_days') or (DEFAULT_EXPIRATION_DAYS)
            # in theory, we should always have original_creation_date, but since
            # it was only added recently, we might have legacy
            # records without it.
            start_date = (self.source_date or self.original_creation_date
                          or utils.get_utcnow())
            return start_date + timedelta(expiration_days)

    def put_expiry_flags(self):
        """Updates the is_expired flags on this Person and related Notes to
        make them consistent with the effective_expiry_date() on this Person,
        and commits the changes to the datastore."""
        import utils
        now = utils.get_utcnow()
        expired = self.get_effective_expiry_date() <= now

        if self.is_expired != expired:
            # NOTE: This should be the ONLY code that modifies is_expired.
            self.is_expired = expired

            # if we neglected to capture the original_creation_date,
            # make a best effort to grab it now, for posterity.
            if not self.original_creation_date:
                self.original_creation_date = self.source_date

            # If the record is expiring (being replaced with a placeholder,
            # see http://zesty.ca/pfif/1.3/#data-expiry) or un-expiring (being
            # restored from deletion), we want the source_date and entry_date
            # updated so downstream clients will see this as the newest state.
            self.source_date = now
            self.entry_date = now

            # All the Notes on the Person also expire or unexpire, to match.
            notes = self.get_notes(filter_expired=False)
            for note in notes:
                note.is_expired = expired

            # Store these changes in the datastore.
            db.put(notes + [self])
            # TODO(lschumacher): photos don't have expiration currently.

    def wipe_contents(self):
        """Sets all the content fields to None (leaving timestamps and the
        expiry flag untouched), stores the empty record, and permanently
        deletes any related Notes and Photos.  Call this method ONLY on records
        that have already expired."""
        # We rely on put_expiry_flags to have properly set the source_date,
        # entry_date, and is_expired flags on Notes, as necessary.
        assert self.is_expired

        # Permanently delete all related Photos and Notes, but not self.
        self.delete_related_entities()

        for name, property in self.properties().items():
            # Leave the repo, is_expired flag, and timestamps untouched.
            if name not in [
                    'repo', 'is_expired', 'original_creation_date',
                    'source_date', 'entry_date', 'expiry_date'
            ]:
                setattr(self, name, property.default)
        self.put()  # Store the empty placeholder record.

    def delete_related_entities(self, delete_self=False):
        """Permanently delete all related Photos and Notes, and also self if
        delete_self is True."""
        # Delete all related Notes.
        notes = self.get_notes(filter_expired=False)
        # Delete the locally stored Photos.  We use get_value_for_datastore to
        # get just the keys and prevent auto-fetching the Photo data.
        photo = Person.photo.get_value_for_datastore(self)
        note_photos = [Note.photo.get_value_for_datastore(n) for n in notes]

        entities_to_delete = filter(None, notes + [photo] + note_photos)
        if delete_self:
            entities_to_delete.append(self)
            if config.get('enable_fulltext_search'):
                full_text_search.delete_record_from_index(self)
        db.delete(entities_to_delete)

    def update_from_note(self, note):
        """Updates any necessary fields on the Person to reflect a new Note."""
        # We want to transfer only the *non-empty, newer* values to the Person.
        if note.author_made_contact is not None:  # for boolean, None means
            # unspecified
            # datetime stupidly refuses to compare to None, so check for None.
            if (self.latest_found_source_date is None
                    or note.source_date >= self.latest_found_source_date):
                self.latest_found = note.author_made_contact
                self.latest_found_source_date = note.source_date
        if note.status:  # for string, '' means unspecified
            if (self.latest_status_source_date is None
                    or note.source_date >= self.latest_status_source_date):
                self.latest_status = note.status
                self.latest_status_source_date = note.source_date

    def update_index(self, which_indexing):
        #setup new indexing
        if 'new' in which_indexing:
            indexing.update_index_properties(self)
            if config.get('enable_fulltext_search'):
                full_text_search.add_record_to_index(self)
        # setup old indexing
        if 'old' in which_indexing:
            prefix.update_prefix_properties(self)

    def update_latest_status(self):
        """Scans all notes on this Person and fixes latest_status if needed."""
        status = None
        status_source_date = None
        for note in self.get_notes():
            if note.status and not note.hidden:
                status = note.status
                status_source_date = note.source_date
        if status != self.latest_status:
            self.latest_status = status
            self.latest_status_source_date = status_source_date
            self.put()
示例#18
0
class GCIProgram(soc_program_model.Program):
  """GCI Program model extends the basic Program model."""

  _messages_model = GCIProgramMessages

  #: string used as a prefix of various key names for other models
  #TODO(daniel): eliminate this
  prefix = 'gci_program'

  homepage_url_name = "gci_homepage"

  #: Required property containing the number of Tasks Students can work
  #: on simultaneously. For GCI it is 1
  nr_simultaneous_tasks = db.IntegerProperty(
      required=True, default=1,
      verbose_name=translation.ugettext('Simultaneous tasks'))
  nr_simultaneous_tasks.group = translation.ugettext('Contest')
  nr_simultaneous_tasks.help_text = translation.ugettext(
      'Number of tasks students can work on simultaneously in the program.')

  #: Determines what winner selection model is used for the program
  winner_selection_type = db.StringProperty(required=True,
      verbose_name=translation.ugettext('Winner selection type'),
      choices=WINNER_SELECTION_TYPES,
      default=WinnerSelectionType.ORG_NOMINATED)

  #: Required property containing the number of winners to be selected in
  #: the program. Defaults to 10
  nr_winners = db.IntegerProperty(
      required=True, default=10,
      verbose_name=translation.ugettext('Number of winners'))
  nr_winners.group = translation.ugettext('Contest')
  nr_winners.help_text = translation.ugettext(
      'Number of winners to be selected at the end of the program.')

  #: A list of task types that a Task can belong to
  task_types = db.StringListProperty(
      required=True, default=['Any'],
      verbose_name=translation.ugettext('Task Types'))
  task_types.group = translation.ugettext('Task Types')
  task_types.help_text = translation.ugettext(
      'List all the types a task can be in.')

  #: Document reference property used for the Student Agreement
  terms_and_conditions = db.ReferenceProperty(
      reference_class=soc_document_model.Document,
      verbose_name=translation.ugettext('Terms and Conditions'),
      collection_name='terms_and_conditions')
  terms_and_conditions.group = soc_program_model.PROGRAM_DOCUMENTS_GROUP
  terms_and_conditions.help_text = translation.ugettext(
      'Document containing Terms and Conditions for participants.')

  #: An URL to a page with example tasks so that students can get
  #: some intuition about the types of tasks in the program
  example_tasks = db.LinkProperty(
      required=False, verbose_name=translation.ugettext('Example tasks'))
  example_tasks.help_text = translation.ugettext(
      'URL to a page with example tasks.')

  #: URL to a page that contains the form translations.
  form_translations_url = db.LinkProperty(
      required=False, verbose_name=translation.ugettext(
          'Form translation URL'))
  form_translations_url.help_text = translation.ugettext(
      'URL to the page containing translations of the forms students '
      'should upload.')
示例#19
0
class PackageInfo(BaseMunkiModel):
  """Munki pkginfo file, Blobstore key, etc., for the corresponding package.

  _plist contents are generated offline by Munki tools and uploaded by admins.

  name is something like: Adobe Flash, Mozilla Firefox, MS Office, etc.
  """

  PLIST_LIB_CLASS = plist_lib.MunkiPackageInfoPlist
  AVG_DURATION_TEXT = (
      '%d users have installed this with an average duration of %d seconds.')
  AVG_DURATION_REGEX = re.compile(
      r'\d+ users have installed this with an average duration of '
      r'\d+ seconds\.')

  # catalog names this pkginfo belongs to; unstable, testing, stable.
  catalogs = db.StringListProperty()
  # manifest names this pkginfo belongs to; unstable, testing, stable.
  manifests = db.StringListProperty()
  # install types for this pkg; managed_installs, managed_uninstalls,
  #   managed_updates, etc.
  install_types = db.StringListProperty()
  # admin username that uploaded pkginfo.
  user = db.StringProperty()
  # filename for the package data
  filename = db.StringProperty()
  # key to Blobstore for package data.
  blobstore_key = db.StringProperty()
  # sha256 hash of package data
  pkgdata_sha256 = db.StringProperty()
  # munki name in the form of pkginfo '%s-%s' % (display_name, version)
  # this property is automatically updated on put()
  munki_name = db.StringProperty()
  # datetime when the PackageInfo was initially created.
  created = db.DateTimeProperty(auto_now_add=True)
  # str group name(s) in common.MANIFEST_MOD_GROUPS that have access to inject
  # this package into manifests.
  manifest_mod_access = db.StringListProperty()

  def _GetDescription(self):
    """Returns only admin portion of the desc, omitting avg duration text."""
    desc = self.plist.get('description', None)
    if desc:
      match = self.AVG_DURATION_REGEX.search(desc)
      if match:
        avg_duration_text = match.group(0)
        return desc.replace(avg_duration_text, '').strip()
    return desc

  def _SetDescription(self, desc):
    """Sets the description to the plist, preserving any avg duration text."""
    if self.AVG_DURATION_REGEX.search(desc):
      # If the new description has the avg duration text, just keep it all.
      self.plist['description'] = desc
    else:
      # Otherwise append the old avg duration text to the new description.
      match = self.AVG_DURATION_REGEX.search(self.plist.get('description', ''))
      if match:
        self.plist['description'] = '%s\n\n%s' % (desc, match.group(0))
      else:
        self.plist['description'] = desc

    # Update the plist property with the new description.
    self.plist = self.plist.GetXml()

  description = property(_GetDescription, _SetDescription)

  def _GetBlobInfo(self):
    """Returns the blobstore.BlobInfo object for the PackageInfo."""
    if not self.blobstore_key:
      return None
    return blobstore.BlobInfo.get(self.blobstore_key)

  def _SetBlobInfo(self, blob_info):
    """Sets the blobstore_key property from a given blobstore.BlobInfo object.

    This mimics the new blobstore.BlobReferenceProperty() without requiring
    a schema change, which isn't fun for external Simian customers.

    Args:
      blob_info: blobstore.BlobInfo instance.
    """
    self.blobstore_key = str(blob_info.key())

  blob_info = property(_GetBlobInfo, _SetBlobInfo)

  @property
  def approval_required(self):
    if not hasattr(self, '_is_approval_required'):
      self._is_approval_required, _ = settings.Settings.GetItem(
          'approval_required')
    return self._is_approval_required

  @property
  def proposal(self):
    if not hasattr(self, '_proposal'):
      self._proposal = PackageInfoProposal.FindOrCreatePackageInfoProposal(self)
    return self._proposal

  @property
  def catalog_matrix(self):
    return common.util.MakeTrackMatrix(self.catalogs, self.proposal.catalogs)

  @property
  def manifest_matrix(self):
    return common.util.MakeTrackMatrix(self.manifests, self.proposal.manifests)

  def IsSafeToModify(self):
    """Returns True if the pkginfo is modifiable, False otherwise."""
    if self.approval_required:
      return self.proposal.IsPackageInfoSafeToModify()
    else:
      if common.STABLE in self.manifests:
        return False
      elif common.TESTING in self.manifests:
        return False
      return True

  def MakeSafeToModify(self):
    """Modifies a PackageInfo such that it is safe to modify."""
    if self.approval_required:
      self.proposal.MakePackageInfoSafeToModify()
    else:
      self.Update(catalogs=[], manifests=[])

  def put(self, *args, **kwargs):
    """Put to Datastore, generating and setting the "munki_name" property.

    Args:
      *args: list, optional, args to superclass put()
      **kwargs: dict, optional, keyword args to superclass put()
    Returns:
      return value from superclass put()
    Raises:
      PackageInfoUpdateError: pkginfo property validation failed.
    """
    # Ensure any defined manifests have matching catalogs.
    for manifest in self.manifests:
      if manifest not in self.catalogs:
        raise PackageInfoUpdateError(
            'manifest did not have matching catalog: %s' % manifest)

    # Always update the munki_name property with the latest Pkg-<Version> name
    # for backwards compatibility.
    try:
      self.munki_name = self.plist.GetMunkiName()
    except plist_lib.PlistNotParsedError:
      self.munki_name = None
    return super(PackageInfo, self).put(*args, **kwargs)

  def delete(self, *args, **kwargs):
    """Deletes a PackageInfo and cleans up associated data in other models.

    Any Blobstore blob associated with the PackageInfo is deleted, and all
    Catalogs the PackageInfo was a member of are regenerated.

    Args:
      *args: list, optional, args to superclass delete()
      **kwargs: dict, optional, keyword args to superclass delete()
    Returns:
      return value from superlass delete()
    """
    ret = super(PackageInfo, self).delete(*args, **kwargs)
    for catalog in self.catalogs:
      Catalog.Generate(catalog)
    if self.blobstore_key:
      gae_util.SafeBlobDel(self.blobstore_key)
    return ret

  def VerifyPackageIsEligibleForNewCatalogs(self, new_catalogs):
    """Ensure a package with the same name does not exist in the new catalogs.

    Args:
      new_catalogs: list of str catalogs to verify the package name is not in.
    Raises:
      PackageInfoUpdateError: a new catalog contains a pkg with the same name.
    """
    for catalog in new_catalogs:
      catalog_obj = Catalog.get_by_key_name(catalog)
      if catalog_obj and self.name in catalog_obj.package_names:
        raise PackageInfoUpdateError(
            '%r already exists in %r catalog' % (self.name, catalog))

  @classmethod
  def _PutAndLogPackageInfoUpdate(
      cls, pkginfo, original_plist, original_catalogs):
    """Helper method called by Update or UpdateFromPlist to put/log the update.

    Args:
      pkginfo: a PackageInfo entity ready to be put to Datastore.
      original_plist: str XML of the original pkginfo plist, before updates.
      original_catalogs: list of catalog names the pkg was previously in.
    Returns:
      return AdminPackageLog record.
    Raises:
      PackageInfoUpdateError: there were validation problems with the pkginfo.
    """
    new_catalogs = [c for c in pkginfo.catalogs if c not in original_catalogs]
    pkginfo.VerifyPackageIsEligibleForNewCatalogs(new_catalogs)
    pkginfo.put()

    changed_catalogs = set(original_catalogs + pkginfo.catalogs)
    for track in sorted(changed_catalogs, reverse=True):
      Catalog.Generate(track)

    # Log admin pkginfo put to Datastore.
    user = users.get_current_user().email()
    log = base.AdminPackageLog(
        user=user, action='pkginfo', filename=pkginfo.filename,
        catalogs=pkginfo.catalogs,
        manifests=pkginfo.manifests,
        original_plist=original_plist, install_types=pkginfo.install_types,
        manifest_mod_access=pkginfo.manifest_mod_access)
    # The plist property is a py property of _plist, and therefore cannot be
    # set in the constructure, so set here.
    log.plist = pkginfo.plist
    log.put()

    return log

  def PutAndLogFromProposal(self, original_plist, original_catalogs):
    if self.proposal.status == 'approved':
      self._PutAndLogPackageInfoUpdate(self, original_plist, original_catalogs)

  @classmethod
  def _New(cls, key_name):
    """Returns a new PackageInfo entity with a given key name.

    Only needed for unit test stubbing purposes.

    Args:
      key_name: str, key name for the entity.
    Returns:
      PackageInfo object isntance.
    """
    return cls(key_name=key_name)

  @classmethod
  def UpdateFromPlist(cls, plist, create_new=False):
    """Updates a PackageInfo entity from a plist_lib.ApplePlist object or str.

    Args:
      plist: str or plist_lib.ApplePlist object.
      create_new: bool, optional, default False. If True, create a new
          PackageInfo entity, only otherwise update an existing one.
    Returns:
      pkginfo: Returns updated PackageInfo object.
      log: Returns AdminPackageLog record.
    Raises:
      PackageInfoLockError: if the package is already locked in the datastore.
      PackageInfoNotFoundError: if the filename is not a key in the datastore.
      PackageInfoUpdateError: there were validation problems with the pkginfo.
    """
    if isinstance(plist, basestring) or isinstance(plist, unicode):
      plist = plist_lib.MunkiPackageInfoPlist(plist)
      plist.EncodeXml()
      try:
        plist.Parse()
      except plist_lib.PlistError as e:
        raise PackageInfoUpdateError(
            'plist_lib.PlistError parsing plist XML: %s' % str(e))

    filename = plist['installer_item_location']

    lock = 'pkgsinfo_%s' % filename
    if not gae_util.ObtainLock(lock, timeout=5.0):
      raise PackageInfoLockError('This PackageInfo is locked.')

    if create_new:
      if cls.get_by_key_name(filename):
        gae_util.ReleaseLock(lock)
        raise PackageInfoUpdateError(
            'An existing pkginfo exists for: %s' % filename)
      pkginfo = cls._New(filename)
      pkginfo.filename = filename
      # If we're uploading a new pkginfo plist, wipe out catalogs.
      plist['catalogs'] = []
      original_plist = None
    else:
      pkginfo = cls.get_by_key_name(filename)
      if not pkginfo:
        gae_util.ReleaseLock(lock)
        raise PackageInfoNotFoundError('pkginfo not found: %s' % filename)
      original_plist = pkginfo.plist.GetXml()

    if not pkginfo.IsSafeToModify():
      gae_util.ReleaseLock(lock)
      raise PackageInfoUpdateError(
          'PackageInfo is not safe to modify; move to unstable first.')

    pkginfo.plist = plist
    pkginfo.name = plist['name']
    original_catalogs = pkginfo.catalogs
    pkginfo.catalogs = plist['catalogs']
    pkginfo.pkgdata_sha256 = plist['installer_item_hash']
    try:
      log = cls._PutAndLogPackageInfoUpdate(
          pkginfo, original_plist, original_catalogs)
    except PackageInfoUpdateError:
      gae_util.ReleaseLock(lock)
      raise

    gae_util.ReleaseLock(lock)

    return pkginfo, log

  def Update(self, **kwargs):
    """Updates properties and/or plist of an existing PackageInfo entity.

    Omitted properties are left unmodified on the PackageInfo entity.

    Args:
      **kwargs: many, below:
          catalogs: list, optional, a subset of common.TRACKS.
          manifests: list, optional, a subset of common.TRACKS.
          install_types: list, optional, a subset of common.INSTALL_TYPES.
          manifest_mod_access: list, optional, subset of
            common.MANIFEST_MOD_GROUPS.
          name: str, optional, pkginfo name value.
          display_name: str, optional, pkginfo display_name value.
          unattended_install: boolean, optional, True to set unattended_install.
          unattended_uninstall: boolean, optional, True to set
            unattended_uninstall.
          description: str, optional, pkginfo description.
          version: str, optional, pkginfo version.
          minimum_os_version: str, optional, pkginfo minimum_os_version value.
          maximum_os_version: str, optional, pkginfo maximum_os_version value.
          force_install_after_date: datetime, optional, pkginfo
              force_install_after_date value.

    Raises:
      PackageInfoLockError: if the package is already locked in the datastore.
      PackageInfoUpdateError: there were validation problems with the pkginfo.
    """
    catalogs = kwargs.get('catalogs')
    manifests = kwargs.get('manifests')
    install_types = kwargs.get('install_types')
    manifest_mod_access = kwargs.get('manifest_mod_access')
    name = kwargs.get('name')
    display_name = kwargs.get('display_name')
    unattended_install = kwargs.get('unattended_install')
    unattended_uninstall = kwargs.get('unattended_uninstall')
    description = kwargs.get('description')
    version = kwargs.get('version')
    minimum_os_version = kwargs.get('minimum_os_version')
    maximum_os_version = kwargs.get('maximum_os_version')
    category = kwargs.get('category')
    developer = kwargs.get('developer')
    force_install_after_date = kwargs.get('force_install_after_date')

    original_plist = self.plist.GetXml()

    lock = 'pkgsinfo_%s' % self.filename
    if not gae_util.ObtainLock(lock, timeout=5.0):
      raise PackageInfoLockError

    if self.IsSafeToModify():
      if name is not None:
        self.plist['name'] = name
        self.name = name

      if description is not None:
        self.description = description

      if 'display_name' in self.plist and display_name == '':
        self.plist.RemoveDisplayName()
      elif display_name != '' and display_name is not None:
        self.plist.SetDisplayName(display_name)

      if install_types is not None:
        self.install_types = install_types

      if manifest_mod_access is not None:
        self.manifest_mod_access = manifest_mod_access

      if version is not None:
        self.plist['version'] = version

      if minimum_os_version is not None:
        if not minimum_os_version and 'minimum_os_version' in self.plist:
          del self.plist['minimum_os_version']
        elif minimum_os_version:
          self.plist['minimum_os_version'] = minimum_os_version

      if maximum_os_version is not None:
        if not maximum_os_version and 'maximum_os_version' in self.plist:
          del self.plist['maximum_os_version']
        elif maximum_os_version:
          self.plist['maximum_os_version'] = maximum_os_version

      if force_install_after_date is not None:
        if force_install_after_date:
          self.plist['force_install_after_date'] = force_install_after_date
        else:
          if 'force_install_after_date' in self.plist:
            del self.plist['force_install_after_date']

      self.plist.SetUnattendedInstall(unattended_install)
      self.plist.SetUnattendedUninstall(unattended_uninstall)
      self.plist['category'] = category
      self.plist['developer'] = developer
    else:
      # If not safe to modify, only catalogs/manifests can be changed.
      for k, v in kwargs.iteritems():
        if v and k not in ['catalogs', 'manifests']:
          if self.approval_required:
            failure_message = ('PackageInfo is not safe to modify;'
                               ' please remove from catalogs first.')
          else:
            failure_message = ('PackageInfo is not safe to modify;'
                               ' please move to unstable first.')
          gae_util.ReleaseLock(lock)
          raise PackageInfoUpdateError(failure_message)

    original_catalogs = self.catalogs

    if self.approval_required and (
        catalogs != self.catalogs or manifests != self.manifests):
      self.proposal.Propose(catalogs=catalogs, manifests=manifests)
    else:
      if catalogs is not None:
        self.catalogs = catalogs
        self.plist['catalogs'] = catalogs
      if manifests is not None:
        self.manifests = manifests

    try:
      self._PutAndLogPackageInfoUpdate(self, original_plist, original_catalogs)
    except PackageInfoUpdateError:
      gae_util.ReleaseLock(lock)
      raise

    gae_util.ReleaseLock(lock)

  @classmethod
  def GetManifestModPkgNames(
      cls, group=common.MANIFEST_MOD_ADMIN_GROUP, only_names=False):
    """Returns a list of package names that a particular group can inject."""
    if group == common.MANIFEST_MOD_ADMIN_GROUP:
      query = cls.all()
    elif group not in common.MANIFEST_MOD_GROUPS:
      return []
    else:
      query = cls.all().filter('manifest_mod_access =', group)
    if only_names:
      return  [e.name for e in query]
    else:
      pkgs = [{'name': e.name, 'munki_name': e.munki_name} for e in query]
      return sorted(pkgs, key=lambda d: unicode.lower(d.get('munki_name')))
示例#20
0
class Function(polymodel.PolyModel):
    creator = db.UserProperty()
    created = db.DateTimeProperty(auto_now_add=True)
    lastupdatedby = db.UserProperty()
    lastupdated = db.DateTimeProperty(auto_now=True)
    name = db.StringProperty(required=True)
    searchname = db.StringProperty(required=True)
    code = db.TextProperty()
    tests = db.TextProperty()
    dependson = db.StringListProperty()
    dependedonby = db.StringListProperty()

    def AddSelfToDependsOns(self):
        for ldependedonfunction in self.GetDependsOn:
            if not ldependedonfunction.dependedonby:
                ldependedonfunction.dependedonby = []
            if not self.name in ldependedonfunction.dependedonby:
                ldependedonfunction.dependedonby.append(self.name)
                if self.key() == ldependedonfunction.key():
                    self.dependedonby = ldependedonfunction.dependedonby
                ldependedonfunction.put()

    def RemoveSelfFromDependsOns(self):
        for ldependedonfunction in self.GetDependsOn:
            if ldependedonfunction.dependedonby:
                if self.name in ldependedonfunction.dependedonby:
                    ldependedonfunction.dependedonby.remove(self.name)
                    if self.key() == ldependedonfunction.key():
                        self.dependedonby = ldependedonfunction.dependedonby
                    ldependedonfunction.put()

    def calcput(self):
        # should do something smarter with just removing what needs removing
        self.RemoveSelfFromDependsOns()

        if self.name:
            self.searchname = self.name.upper()
        ldependson = self.GetDependsOnFromInput(self.tests)
        ldependson.extend(self.GetDependsOnFromInput(self.code))

        self.dependson = ldependson
        self.put()

        self.AddSelfToDependsOns()

    def calcdelete(self):
        self.RemoveSelfFromDependsOns()

        for lfunctionrun in self.Runs():
            lfunctionrun.calcdelete()

        self.delete()

    @classmethod
    def GetOrCreate(cls, aName, aUser):
        retval = cls.GetByName(aName)
        if not retval:
            retval = cls(name=aName, creator=aUser, lastupdatedby=aUser)
            retval.calcput()
        return retval

    @classmethod
    def GetByName(cls, aName):
        retval = None
        if aName:
            retval = cls.all().filter("searchname =", aName.upper()).get()
        return retval

    @classmethod
    def CreateNew(cls, aName, aUser):
        if cls.NameExists(aName):
            raise Exception("Name already exists")
        else:
            #            lname = cls.GenerateName(aUser)
            retval = cls(name=aName,
                         creator=aUser,
                         lastupdatedby=aUser,
                         searchname=aName.upper())
            retval.calcput()
            return retval

    def Clone(self, aName, aUser):
        retval = None
        if self.NameExists(aName):
            raise Exception("Name already exists")
        else:
            retval = self.__class__(name=aName,
                                    creator=aUser,
                                    lastupdatedby=aUser,
                                    searchname=aName.upper())
            retval.code = self.code
            retval.tests = self.tests
            retval.calcput()
            return retval

    @classmethod
    def GetFunctions(cls, aSearch):
        retval = cls.all()

        if aSearch:
            lsearch = aSearch.upper()
            retval = retval.filter("searchname >=", lsearch).filter(
                "searchname <", lsearch + "zzzzzzzzzzzzzzzzzzzzzzzzzzz")

        retval = retval.order("searchname").order("-created")

        return retval

    @classmethod
    def GenerateName(cls, aUser):
        retval = None
        lnameExists = None
        while not retval or lnameExists:
            retval = aUser.nickname() + str(
                int(math.trunc(random.random() * 1000)))
            retval = retval.replace("@", "").replace(".", "")
            lnameExists = cls.NameExists(retval)
        return retval

    @classmethod
    def NameExists(cls, aName):
        lname = aName
        if not lname:
            lname = ""
        lname = lname.upper()
        retval = not cls.all().filter("searchname =", lname).get() is None
        return retval

    @property
    def LatestRun(self):
        return FunctionRun.all().filter("function =",
                                        self).order("-initiated").get()

    def Runs(self):
        return FunctionRun.all().filter("function =", self).order("-initiated")

    def CheckDependencies(self):
        for ldependsOn in self.GetDependsOn:
            if ldependsOn.key() != self.key():
                llatestRun = ldependsOn.LatestRun
                if llatestRun:
                    if not llatestRun.success:
                        raise Exception(
                            "Import '%s' fails: %s" %
                            (ldependsOn.name, llatestRun.errormessage))
                else:
                    raise Exception("Import '%s' has never been run." %
                                    (ldependsOn.name))

    @property
    def GetDependsOn(self):
        retval = []
        for ldependsonname in self.dependson:
            ldependsOnFunction = Function.all().filter(
                "searchname =", ldependsonname.upper()).get()
            if ldependsOnFunction:
                retval.append(ldependsOnFunction)
        return retval

    @property
    def GetDependedOnBy(self):
        retval = []
        for ldependedonbyname in self.dependedonby:
            ldependedOnByFunction = Function.all().filter(
                "searchname =", ldependedonbyname.upper()).get()
            if ldependedOnByFunction:
                retval.append(ldependedOnByFunction)
        return retval

    def AddDependsOnToDictionary(self, aDictionary):
        for ldependson in self.GetDependsOn:
            if ldependson.key() != self.key():
                if not ldependson.searchname in aDictionary:
                    aDictionary[ldependson.searchname] = ldependson
                    aDictionary = ldependson.AddDependsOnToDictionary(
                        aDictionary)
        return aDictionary

    def GetDependsOnCode(self, aDictionary):
        retval = ""
        for lkey in aDictionary:
            ldependson = aDictionary[lkey]
            if ldependson.code:
                retval += "\n\n" + ldependson.code
        return retval

    def RunTests(self, aInitiator):
        lfunctionrun = FunctionRun()
        lfunctionrun.function = self
        lfunctionrun.initiator = aInitiator
        lfunctionrun.put()  # temporary, so we can save logitems

        try:

            def xlog(aMessage):
                logitem = LogItem()
                logitem.functionrun = lfunctionrun
                logitem.message = aMessage
                logitem.put()

            def xcheck(aBool, aMessage=None):
                if not aBool:
                    if aMessage:
                        raise Exception("Assert failed: %s" % aMessage)
                    else:
                        raise Exception("Assert failed.")

            # "__builtins__":None,
            lscope = {
                "__builtins__": None,
                "log": xlog,
                "check": xcheck,
                "str": str,
                "False": False,
                "True": True
            }

            self.CheckDependencies()

            limportsDict = self.AddDependsOnToDictionary({})

            if limportsDict:
                limportsCode = self.GetDependsOnCode(limportsDict)

                try:
                    exec limportsCode in lscope
                except Exception, ex:
                    raise ex.__class__("Imports: %s" % unicode(ex))

            lcode = self.code
            if not lcode:
                lcode = ""

            if limportsDict:
                lcode += limportsCode

            try:
                exec lcode in lscope
            except Exception, ex:
                raise ex.__class__("Implementation: %s" % unicode(ex))

            if self.tests:
                lcode += "\n\n" + self.tests

            try:
                exec lcode in lscope
            except Exception, ex:
                raise ex.__class__("Tests: %s" % unicode(ex))
示例#21
0
class Game(db.Model):
    black_email = db.StringProperty(required=True)
    white_email = db.StringProperty(required=True)
    whose_go = db.StringProperty(required=True)
    pickled_board = db.BlobProperty()
    pickled_last_board = db.BlobProperty()
    history = db.StringListProperty()
    processed = db.StringListProperty()

    next_go = {'white' : 'black', 'black': 'white'}

    def whose_go_email(self):
        return getattr(self, self.whose_go + '_email')

    def other_email(self):
        return getattr(self, self.__class__.next_go[self.whose_go] + '_email')

    def __init__(self, *args, **kwargs):
        db.Model.__init__(self, *args, **kwargs)

        if self.pickled_board:
            self.board = pickle.loads(self.pickled_board)
            self.last_board = pickle.loads(self.pickled_last_board)
        else:
            self.board = Board()
            self.board.setup_initial_board()
            self.last_board = self.board

    def put(self, *args, **kwargs):
        self.pickled_board = pickle.dumps(self.board)
        self.pickled_last_board = pickle.dumps(self.last_board)

        db.Model.put(self, *args, **kwargs)

    def do_move(self, move):
        move = move.lower()
        logging.debug(move)

        if 'undo' in move:
            self.history.append(move)
            self.board = self.last_board
            self.whose_go = Game.next_go[self.whose_go]

            raise errors.Undo

        match = re.search(r'([a-h][1-8]) ?to ?([a-h][1-8])(?: ?and ?([a-h][1-8]) ?to ?([a-h][1-8]))?', move)

        if match:
            matches = match.groups()
            _from = matches[0]
            to = matches[1]

            piece_moving = self.piece_at(_from)
            move = "%s %s" % (unicode(piece_moving), move)

            piece_taken = self.piece_at(to)
            if piece_taken:
                move = move + " (takes %s)" % unicode(piece_taken)
            self.history.append(move)

            self.last_board = copy.deepcopy(self.board)
            self.move_from(_from, to)
            if len(matches) >= 4 and matches[2] and matches[3]:
                self.move_from(matches[2], matches[3])

            self.whose_go = Game.next_go[self.whose_go]
        else:
            raise errors.InvalidMove

    def piece_at(self, at):
        x, y = self.coords_from_alphanumeric(at)

        return self.board[x][y]

    def move_from(self, _from, to):
        from_x, from_y = self.coords_from_alphanumeric(_from)
        to_x, to_y = self.coords_from_alphanumeric(to)

        self.board[to_x][to_y] = self.board[from_x][from_y]
        self.board[from_x][from_y] = None

    def coords_from_alphanumeric(self, alpha):
        return (int(alpha[1]) - 1, ord(alpha[0]) - 97)
示例#22
0
class Result(Model):
    """Answers to a survey and access permissions"""
    keys = db.StringListProperty(required=True)
    metric = db.StringProperty(required=True)
    answers_json = db.TextProperty(default='')  # preferred

    @classmethod
    def get_results(self, private_keys, group=None):
        public_keys = [util.Keys().get_public(k) for k in private_keys]
        ancestor = Group.get_group(group)

        if ancestor:
            results = Result.gql(
                """ WHERE keys IN :1
                                     AND ANCESTOR IS :2
                                     ORDER BY created DESC""", public_keys,
                ancestor)
        else:
            results = Result.gql(
                """ WHERE keys IN :1
                                     ORDER BY created DESC""", public_keys)

        metrics = set()
        answers = []
        for r in results:
            metrics.add(r.metric)
            answers.append(r.get_answers())

        if len(metrics) > 1:
            raise Exception(
                "Keys were not all from the same metric: {} {}".format(
                    public_keys, metrics))

        if len(answers) > 0:
            return {'metric': metrics.pop(), 'answers': answers}
        else:
            # No results
            logging.info('No answers found')
            return {'metric': 'no responses yet', 'answers': []}

    @classmethod
    def put_result(self, keys, metric, answers, group):
        if group:
            parent = Group.get_group(group)
            result = Result(keys=keys,
                            metric=metric,
                            answers_json=answers,
                            parent=parent)
        else:
            result = Result(keys=keys, metric=metric, answers_json=answers)

        return result.put()

    def get_answers(self):
        # Some old entities don't have json-based results. Treat them as if
        # they are empty. This is easier than deleting them all.
        if self.answers_json:
            answers = json.loads(self.answers_json)
        else:
            answers = {}

        # Always take the precaution of hashing participant ids, if present.
        if 'pid' in answers:
            answers['pid'] = util.hash_participant_id(answers['pid'])

        return answers
示例#23
0
class Document(db.Model):

    author = db.ReferenceProperty(User, collection_name='works')
    authorname = db.StringProperty()
    content = db.TextProperty()
    date = db.DateTimeProperty(auto_now_add=True)
    _description = db.StringProperty(default='')
    draft = db.BooleanProperty(default=True)
    favorites = db.StringListProperty(default=[])
    filename = db.StringProperty()
    leaftags = db.StringListProperty(default=[])
    object_type = db.StringProperty(default='Document')
    raters = db.StringListProperty()
    rating = db.IntegerProperty(default=0)
    subscribers = db.StringListProperty(default=[])
    subtitle = db.StringProperty(default='')
    tags = db.StringListProperty(default=[])
    title = db.StringProperty()
    views = db.IntegerProperty(default=0)
    viewers = db.StringListProperty(default=[])
    special = db.BooleanProperty(default=False)
    type = db.StringListProperty(default=["not_meta"])

    def get_commentary(self):
        commentary = Commentary(self.author.username, self.filename)
        return commentary

    def add_tag(self, tag):
        if len(self.tags) < 3:
            tags = self.tags
            tags.append(tag)
            self.add_tags(tags)
            return 'Tag added successfully.'
        else:
            return 'A document may only have three leaf tags.'

    def add_tags(self, taglist):

        self.leaftags = []
        self.tags = []
        skip = False
        for tag in taglist:

            tagObject = Tag.get_by_key_name(tag)

            try:
                Tag.get_by_key_name(tag)
                real = True
            except:
                pass
            if real:
                if tag in self.tags:
                    pass
                else:
                    for leaftag in self.leaftags:
                        if tag in Tag.get_by_key_name(leaftag).descendants:
                            self.leaftags.remove(leaftag)
                    self.leaftags.append(tag)
                    ancestry = tagObject.ancestors
                    ancestry.append(tag)
                    self.tags.extend(ancestry)
                    self.tags = remove_duplicates(self.tags)
                    if 'Meta' in self.tags:
                        if 'meta' not in self.type:
                            try:
                                self.type.remove('not_meta')
                            except:
                                pass
                            self.type.append('meta')
        self.put()

    def get_url(self, includeDomain=False):
        if includeDomain:
            return hank[
                'domainstring'] + self.author.username + '/document/' + self.filename + '/'
        else:
            return '/' + self.author.username + '/document/' + self.filename + '/'

    def get_stripped(self):

        stripped = strip_tags(self.content)
        #for n in range(len(stripped)):
        #if stripped[n] =='&':
        #stripped = stripped[:n]+stripped[n+5:]
        return stripped

    def set_description(self, words):
        words = strip_tags(words)
        words = words[:150]
        self._description = words

    def get_description(self):
        return self._description

    def get_leaftags(self):
        return [Tag.get_by_key_name(title) for title in self.leaftags]

    def parse(self):
        acceptableElements = [
            'a', 'blockquote', 'br', 'em', 'i', 'ol', 'ul', 'li', 'p', 'b'
        ]
        acceptableAttributes = ['href']
        counter = 0
        contentTemp = self.content
        while True:
            counter += 1
            soup = BeautifulSoup(contentTemp)
            removed = False
            for tag in soup.findAll(True):  # find all tags
                if tag.name not in acceptableElements:
                    tag.extract()  # remove the bad ones
                    removed = True
                else:  # it might have bad attributes
                    for attr in tag._getAttrMap().keys():
                        if attr not in acceptableAttributes:
                            del tag[attr]

            # turn it back to html
            fragment = unicode(soup)
            if removed:
                # tricks can exploit a single pass
                # we need to reparse the html until it stops changing
                contentTemp = fragment
                continue  # next round
            break
        self.content = contentTemp
        self.put()

    def set_rating(self):
        votes = self.ratings
        rating = 0
        view_mass = 0
        for viewer in self.viewers:
            user_view = get_user(viewer)
            view_mass = view_mass + (1 + user_view.reputation)
        for vote in votes:
            rating = rating + vote.value * (1 + vote.user.reputation)
        rating = rating / view_mass
        self.rating = rating
        self.put()

    def get_tags(self):
        return [Tag.get_by_key_name(title) for title in self.tags]

    def set_subscriber(self, subscriber, add=True):
        "Subscriber should be a username."
        if add == True and not subscriber in self.subscribers:
            self.subscribers.append(subscriber)
        if add == False and subscriber in self.subscribers:
            self.subscribers.remove(subscriber)

    def remove(self):

        ratings = self.ratings
        for rating in ratings:
            rating.delete()

        replies = self.comments
        for reply in replies:
            reply.remove()

        self.delete()

    def remove_tag(self, tagName):
        current = self.leaftags
        if tagName in current:
            current.remove(tagName)
        self.add_tags(current)
        return tagName + ' was removed from tags.'

    def set_view(self):
        if not self.draft:
            self.views += 1
            user = get_user()
            try:
                user.username
                if not user.username in self.viewers:
                    self.viewers.append(user.username)
            except:
                pass
            self.put()

    def get_tag_number(self):
        return len(self.tags)
示例#24
0
class Post(db.Model):
    content = db.TextProperty(required=True)
    user = db.UserProperty(required=True)
    created = db.DateTimeProperty(auto_now_add=True)
    likes = db.IntegerProperty(default=0)
    liked_by = db.StringListProperty(default=[])
示例#25
0
class Tag(db.Model):
    """ Tags should be instantiated with their title as a key_name """
    parent_tag = db.SelfReferenceProperty(collection_name='children')
    title = db.StringProperty()
    ancestors = db.StringListProperty()
    descendants = db.StringListProperty()

    def set_descendants(self, passive=False):
        descendants = []
        for child in self.children:
            family = child.set_descendants(True)
            descendants.extend(family)
        if passive:
            descendants.append(self.title)
            return descendants
        else:
            self.descendants = descendants
            self.put()

    def get_ancestors(self):
        return [Tag.get_by_key_name(title) for title in self.ancestors]

    def set_ancestors(self, ancestry=None):
        if not ancestry:
            if self.parent_tag:
                ancestry = [self.parent_tag.title]
                return self.set_ancestors(ancestry)
            else:
                self.ancestors = []
                self.put()
        else:
            if Tag.get_by_key_name(ancestry[-1]).parent_tag:
                ancestry.append(
                    Tag.get_by_key_name(ancestry[-1]).parent_tag.title)
                return self.set_ancestors(ancestry)
            else:
                self.ancestors = ancestry
                self.put()

    def get_children(self):
        children = Tag.all().filter('parent_tag ==', self).fetch(1000)
        return children

    def get_childNames(self):
        children = Tag.all().filter('parent_tag ==', self).fetch(1000)
        returnChildren = [child.title for child in children]
        return returnChildren

    def populate_descendants(self, descendants=None):
        if not descendants:
            descendants = [self.title]
            if self.children:
                for child in self.children:
                    descendants.extend(populate_descendants(child))
                return descendants
            else:
                return descendants
        self.put()

    def get_documents(self, own=False):

        if self.title == 'Root':
            return Document.all().filter("draft ==",
                                         False).order('-date').fetch(1000)
        else:
            if own:
                return get_documents([self.title])

            else:
                docs = []
                for descendant in self.descendants:
                    tag = Tag.get_by_key_name(descendant)
                    docs.extend(tag.get_documents(True))
                docs.extend(self.get_documents(True))
                unique = list(set(docs))
                ordered = sorted(unique,
                                 key=lambda document: document.date,
                                 reverse=True)
                return ordered

    def exterminate(self):
        references = Document.all().filter('tags =', self.title).fetch(1000)
        for reference in references:
            #next two line ensure no duplicate version of tag will remain
            purge = remove_duplicates(reference.tags)
            reference.tags = purge
            reference.tags.remove(self.title)
            reference.put()
        children = self.children
        for child in children:
            child.exterminate()
        self.delete()

    def get_url(self):
        return '/tag/' + self.title + '/'
示例#26
0
文件: user.py 项目: PEZ/MooTalk
class User(db.Model):
    nickname = db.StringProperty(required=True)
    address = db.StringProperty(required=True)
    _email_addresses = db.StringListProperty()
    avatar = db.LinkProperty()

    @classmethod
    def create(cls, nickname, address):
        address = address.strip()
        if address not in [u.address for u in User.all()]:
            user = cls(nickname=nickname, address=address)
            user.avatar = db.Link(gravatar(address, size=48))
            user.put()
            return user
        else:
            return None

    @classmethod
    def user(cls, address):
        user = cls.all()
        user.filter("address =", address)
        return user.get()

    @classmethod
    def user_from_nick(cls, nick):
        user = cls.all()
        user.filter("nickname =", nick)
        return user.get()

    @classmethod
    def user_from_email_address(cls, address):
        user = cls.all()
        user.filter("_email_addresses =", address)
        return user.get()

    @classmethod
    def user_from_any_address(cls, address):
        user = cls.user(address)
        if user is not None:
            return user
        else:
            user = cls.all()
            user.filter("_email_addresses =", address)
            return user.get()

    @classmethod
    def murder(cls, murder_key):
        import logging

        user = User.get(murder_key)
        logging.info("REMOVING User %s" % user.nickname)
        for chat in user.chats:
            logging.info("REMOVING %s from chat %s" %
                         (user.nickname, chat.title))
            chat.remove_participant(user.address)
            chat.clean_chat_from_non_users()

        user.delete()

    def email_addresses_get(self):
        return self._email_addresses

    def email_addresses_set(self, addresses):
        if type(addresses) in (str, type(u'')):
            self._email_addresses = textlines_to_list(addresses)
        else:
            self._email_addresses = addresses
        self.put()

    email_addresses = property(email_addresses_get, email_addresses_set)

    @property
    def email_addresses_as_text(self):
        return '\n'.join(self._email_addresses)

    @property
    def chats(self):
        from moo.chat import Chat
        return db.get(
            Chat.all(keys_only=True).filter("participants =", self.address))
示例#27
0
class Department(db.Model):
    id = db.IntegerProperty(required=True)
    name = db.StringProperty(required=True)
    SectNO = db.StringProperty()  # defined by the hospital
    doctors = db.StringListProperty()
示例#28
0
class Avatars(db.Model):
    """
    """
    
    twitter_followers = db.StringListProperty(indexed=False)
    disqus_commenters = db.StringListProperty(indexed=False)
示例#29
0
class Role(db.Model):
    name = db.StringProperty(required = True)
    permissions = db.StringListProperty(default = [])
示例#30
0
class Article(db.Model):
    # key id
    title = db.StringProperty(required=True)  # 标题
    url = db.StringProperty(required=True)  # 相对链接
    content = db.TextProperty()  # 内容
    format = db.IntegerProperty(default=CONTENT_FORMAT_FLAG['plain'],
                                indexed=False)  # 解析格式
    published = db.BooleanProperty(default=True)  # 是否对外发布
    time = db.DateTimeProperty(auto_now_add=True)  # 发布时间
    mod_time = db.DateTimeProperty(auto_now_add=True)  # 修改时间
    keywords = db.StringListProperty()  # 搜索关键字,保存为小写
    tags = db.StringListProperty()  # 标签
    category = db.StringProperty()  # 分类路径
    hits = db.IntegerProperty(default=0)  # 点击数
    replies = db.IntegerProperty(default=0)  # 评论数
    like = db.IntegerProperty(default=0)  # 喜欢评价数
    hate = db.IntegerProperty(default=0)  # 讨厌评价数
    rating = db.ComputedProperty(lambda self: (self.like or 0) -
                                 (self.hate or 0))
    # password = db.StringProperty(indexed=False)
    # view_level = db.IntegerProperty(default=0, indexed=False)
    # closed = db.BooleanProperty(default=False, indexed=False) #  是否关闭评论

    _PATTERN = re.compile(r'\d{4}/\d{2}/\d{2}/.+')

    @staticmethod
    @memcached('get_articles_for_homepage',
               ARTICLES_CACHE_TIME,
               lambda cursor=None, fetch_limit=ARTICLES_PER_PAGE: hash(cursor)
               if cursor else None)
    def get_articles_for_homepage(cursor=None, fetch_limit=ARTICLES_PER_PAGE):
        query = Article.all().filter('published =', True).order('-time')
        articles, cursor = get_fetch_result_with_valid_cursor(
            query_with_cursor(query, cursor),
            fetch_limit,
            config=EVENTUAL_CONSISTENCY_CONFIG)
        _cache_articles(articles, True)
        return articles, cursor

    @staticmethod
    def get_unpublished_articles(cursor=None, fetch_limit=ARTICLES_PER_PAGE):
        query = Article.all().filter('published =', False).order('-mod_time')
        articles, cursor = get_fetch_result_with_valid_cursor(
            query_with_cursor(query, cursor), fetch_limit)
        _cache_articles(articles, False)
        return articles, cursor

    @staticmethod
    @memcached('get_articles_for_feed', FEED_CACHE_TIME)
    def get_articles_for_feed(fetch_limit=ARTICLES_PER_PAGE):
        return Article.all().filter('published =',
                                    True).order('-mod_time').fetch(fetch_limit)

    @staticmethod
    @memcached('get_article_by_url', ARTICLE_CACHE_TIME, lambda url: hash(url))
    def get_article_by_url(url):
        if len(url) <= 500:
            article = Article.all().filter(
                'url =', url).get(config=EVENTUAL_CONSISTENCY_CONFIG)
            if article:
                memcache.set('get_article_by_id:%s' % article.key().id(),
                             article, ARTICLE_CACHE_TIME)
                return article
        return ENTITY_NOT_FOUND

    @staticmethod
    @memcached('get_article_by_id', ARTICLE_CACHE_TIME, lambda id: id)
    def get_article_by_id(id):
        if id > 0:
            article = Article.get_by_id(id)
            if article:
                memcache.set('get_article_by_url:%s' % hash(article.url),
                             article, ARTICLE_CACHE_TIME)
                return article
        return ENTITY_NOT_FOUND

    def category_name(self):
        return Category.path_to_name(self.category)

    def html_summary(self):
        format = self.format
        content = self.content
        if SUMMARY_DELIMETER.search(content):
            summary = SUMMARY_DELIMETER.split(content, 1)[0]
        elif SUMMARY_DELIMETER2.search(content):
            summary = SUMMARY_DELIMETER2.split(content, 1)[0]
        else:
            summary = content
        if format & CONTENT_FORMAT_FLAG['bbcode']:
            return convert_bbcode_to_html(
                summary, escape=not (format & CONTENT_FORMAT_FLAG['html']))
        if format & CONTENT_FORMAT_FLAG['html']:
            return summary
        else:
            return parse_plain_text(summary)

    def html_content(self):
        format = self.format
        content = self.content
        if SUMMARY_DELIMETER.search(content):
            content = SUMMARY_DELIMETER.sub('', content, 1)
        elif SUMMARY_DELIMETER2.search(content):
            content = SUMMARY_DELIMETER2.split(content, 1)[1]

        if format & CONTENT_FORMAT_FLAG['bbcode']:
            return convert_bbcode_to_html(
                content, escape=not (format & CONTENT_FORMAT_FLAG['html']))
        if format & CONTENT_FORMAT_FLAG['html']:
            return content
        else:
            return parse_plain_text(content)

    def previous_article(self, published):
        previous_article = None
        try:
            previous_article = Article.all().filter(
                'published =',
                published).filter('time <', self.time).order('-time').get(
                    config=QUICK_LIMITED_EVENTUAL_CONSISTENCY_CONFIG)
            if previous_article:
                memcache.set_multi(
                    {
                        'get_previous_article:%s_%s' % (self.key().id(), published):
                        previous_article,
                        'get_next_article:%s_%s' % (previous_article.key().id(
                                                    ), published):
                        self,
                        'get_article_by_url:%s' % hash(previous_article.url):
                        previous_article,
                        'get_article_by_id:%s' % previous_article.key().id():
                        previous_article
                    }, ARTICLE_CACHE_TIME)
                return previous_article
            memcache.set(
                'get_previous_article:%s_%s' % (self.key().id(), published),
                ENTITY_NOT_FOUND, ARTICLE_CACHE_TIME)
            return ENTITY_NOT_FOUND
        except:
            return previous_article

    def next_article(self, published):
        next_article = None
        try:
            next_article = Article.all().filter(
                'published =',
                published).filter('time >', self.time).order('time').get(
                    config=QUICK_LIMITED_EVENTUAL_CONSISTENCY_CONFIG)
            if next_article:
                memcache.set_multi(
                    {
                        'get_next_article:%s_%s' % (self.key().id(), published):
                        next_article,
                        'get_previous_article:%s_%s' % (next_article.key().id(
                                                        ), published):
                        self,
                        'get_article_by_url:%s' % hash(next_article.url):
                        next_article,
                        'get_article_by_id:%s' % next_article.key().id():
                        next_article
                    }, ARTICLE_CACHE_TIME)
                return next_article
            memcache.set(
                'get_next_article:%s_%s' % (self.key().id(), published),
                ENTITY_NOT_FOUND, ARTICLE_CACHE_TIME)
            return ENTITY_NOT_FOUND
        except:
            return next_article

    def nearby_articles(self, published=True):
        key = '%s_%s' % (self.key().id(), published)
        previous_key = 'get_previous_article:' + key
        next_key = 'get_next_article:' + key
        nearby_articles = memcache.get_multi((next_key, previous_key))

        previous_article = nearby_articles.get(previous_key, None)
        if previous_article is None:
            previous_article = self.previous_article(published)

        next_article = nearby_articles.get(next_key, None)
        if next_article is None:
            next_article = self.next_article(published)
        return previous_article, next_article

    @staticmethod
    @memcached('relative_articles', ARTICLES_CACHE_TIME, lambda id, limit: id)
    def relative_articles(id, limit):
        if id <= 0:
            return []
        article = Article.get_article_by_id(id)
        if not article:
            return []
        article_key = article.key()
        keywords = article.keywords
        relative_article_keys = set()
        left = limit
        total = 0
        if keywords:
            # 居然遇到这个问题:Keys only queries do not support IN filters.
            for keyword in keywords:
                relative_article_keys |= set(
                    Article.all(keys_only=True).filter(
                        'keywords =', keyword).filter('published =',
                                                      True).fetch(left + 1))
                relative_article_keys.discard(article_key)
                total = len(relative_article_keys)
                if total >= limit:
                    return db.get(list(relative_article_keys)[:limit])
            left -= total
        tags = article.tags
        if tags:
            for tag in tags:
                new_article_keys = set(
                    Article.all(keys_only=True).filter('tags =', tag).filter(
                        'published =',
                        True).fetch(left + 1)) - relative_article_keys
                new_article_keys.discard(article_key)
                if len(new_article_keys) >= left:
                    return db.get(
                        list(relative_article_keys) +
                        list(new_article_keys)[:left])
                relative_article_keys |= new_article_keys
            left = limit - len(relative_article_keys)
        category = article.category
        if category:
            new_article_keys = set(
                Article.all(keys_only=True).filter(
                    'category >=', category).filter(
                        'category <', category + u'\ufffd').filter(
                            'published =',
                            True).fetch(left + 1)) - relative_article_keys
            new_article_keys.discard(article_key)
            if len(new_article_keys) >= left:
                return db.get(
                    list(relative_article_keys) +
                    list(new_article_keys)[:left])
            relative_article_keys |= new_article_keys
        if relative_article_keys:
            return db.get(relative_article_keys)
        return []

    @staticmethod
    def calc_hits(article_id):
        if article_id < 0:
            return False

        def calc(key):
            try:
                hits = memcache.get(key)
                if hits:
                    hits = int(hits)
                    if hits:
                        article = Article.get_by_id(int(key.split(':')[1]))
                        if article:
                            article.hits += hits
                            article.put()
                            memcache.decr(key, hits)
                return True
            except:
                return False

        return db.run_in_transaction(calc, article_id)

    @staticmethod
    def calc_rating(article_id):
        if article_id < 0:
            return None
        article = Article.get_article_by_id(article_id)
        if not article:
            return None

        def calc(article_key):
            article = Article.get(article_key)
            if not article:
                return None
            article.like = Point.all().ancestor(article_key).filter(
                'value =', True).count(None)
            article.hate = Point.all().ancestor(article_key).filter(
                'value =', False).count(None)
            return like, hate

        return db.run_in_transaction(calc, article.key())

    @staticmethod
    def search(keywords,
               published,
               cursor=None,
               fetch_limit=ARTICLES_PER_PAGE):
        if not cursor:
            query = Article.all().filter('title =', keywords)
            if published is not None:
                query.filter('published =', published)
            article = query.fetch(1)  # 一般不会写同名文章,有需要的自己增加这个数值
            if article:
                fetch_limit -= 1
        else:
            article = []
        if keywords:
            keywords = set(keywords.split())
            query = Article.all()
            for keyword in keywords:
                query.filter('keywords =', keyword.lower())
            if published is not None:
                query.filter('published =', published)
            articles, cursor = get_fetch_result_with_valid_cursor(
                query_with_cursor(query, cursor), fetch_limit)
            return (article + articles, cursor)
        else:
            return article, None

    def quoted_url(self):
        return escape(quoted_string(self.url))

    def quoted_not_escaped_url(self):
        return quoted_string(self.url)