class Comment(Visibility): """ Se puede comentar cualquier objeto del modelo """ user = db.ReferenceProperty(User, collection_name='comments') instance = db.ReferenceProperty(None) created = db.DateTimeProperty(auto_now_add=True) modified = db.DateTimeProperty(auto_now=True) msg = db.TextProperty(required=True) deleted = db.BooleanProperty(default=False) votes = db.IntegerProperty(default=0) objects = CommentHelper() @property def id(self): return int(self.key().id()) def set_votes(self, count): def _tx(count): obj = Comment.get(self.key()) obj.votes += count if obj.votes < 0: obj.votes = 0 obj.put() return obj.votes return db.run_in_transaction(_tx, count) @classmethod def do_comment(cls, user, instance, msg): if msg is None or msg == '': raise TypeError('msg is empty') comment = Comment( user=user, instance=instance, msg=msg, _vis=instance._vis if hasattr(instance, '_vis') else 'private') comment.put() if getattr(instance, 'counter', None) is not None: instance.counter.set_comments() from signals import comment_new comment_new.send(sender=comment) return comment def to_dict(self): return { 'id': self.id if self.is_saved() else -1, 'instance': self.instance.id, 'created': self.created, 'modified': self.modified, 'msg': self.msg, 'user': self.user, } def delete(self, force=False): from signals import comment_deleted if force: comment_deleted.send(self) super(Comment, self).deleted() else: self.deleted = True self.put() comment_deleted.send(self) def __str__(self): return unicode(self.msg).encode('utf-8') def __unicode__(self): return self.msg
class BasePassphrase(db.Model): """Base model for various types of passphrases.""" def __init__(self, owner=None, **kwds): super(BasePassphrase, self).__init__(**kwds) if owner: assert 'owners' not in kwds self.owners = [owner] AUDIT_LOG_MODEL = None ESCROW_TYPE_NAME = 'base_target' TARGET_PROPERTY_NAME = None SECRET_PROPERTY_NAME = 'undefined' ALLOW_OWNER_CHANGE = False MUTABLE_PROPERTIES = [ 'force_rekeying', 'hostname', 'owners', ] # True for only the most recently escrowed, unique target_id. active = db.BooleanProperty(default=True) created = db.DateTimeProperty(auto_now_add=True) created_by = AutoUpdatingUserProperty() # user that created the object. force_rekeying = db.BooleanProperty(default=False) hostname = db.StringProperty() owners = OwnersProperty() tag = db.StringProperty(default='default') # Key Slot def ChangeOwners(self, new_owners, request=None): """Changes owner. Args: new_owners: list New owners. request: a webapp Request object to fetch obtain details from. Returns: bool whether change was made. """ if self.owners == sorted(new_owners): return False logging.info('changes owners of %s from %s to %s', self.target_id, self.owners, new_owners) self.AUDIT_LOG_MODEL.Log(entity=self, request=request, message='changes owners of %s from %s to %s' % (self.target_id, self.owners, new_owners)) self._UpdateMutableProperties(self.key(), { 'owners': new_owners, 'force_rekeying': True, }) return True def __eq__(self, other): for p in self.properties(): if getattr(self, p) != getattr(other, p): return False return True def __ne__(self, other): return not self.__eq__(other) def ToDict(self, skip_secret=False): passphrase = { p: unicode(getattr(self, p)) for p in self.properties() if not skip_secret or p != self.SECRET_PROPERTY_NAME } passphrase['id'] = str(self.key()) passphrase['active'] = self.active # store the bool, not string, value passphrase['target_id'] = self.target_id passphrase['owners'] = self.owners return passphrase @classmethod def GetLatestForTarget(cls, target_id, tag='default'): entity = cls.all().filter('tag =', tag).filter( '%s =' % cls.TARGET_PROPERTY_NAME, target_id).order('-created').fetch(1) if not entity: return None return entity[0] def Clone(self): items = { p.name: getattr(self, p.name) for p in self.properties().values() if not isinstance(p, db.ComputedProperty) } del items['created_by'] del items['created'] return self.__class__(**items) @db.transactional(xg=True) def _PutNew(self, ancestor_key, *args, **kwargs): ancestor = self.get(ancestor_key) if not ancestor.active: raise self.ACCESS_ERR_CLS('parent entity is inactive: %s.' % self.target_id) ancestor.active = False super(BasePassphrase, ancestor).put(*args, **kwargs) return super(BasePassphrase, self).put(*args, **kwargs) def put(self, parent=None, *args, **kwargs): # pylint: disable=g-bad-name """Disallow updating an existing entity, and enforce key_name. Args: parent: Optional. A Passphrase of the same type as the current instance. If passed then it is used as the parent entity for this instance. *args: Positional arguments to be passed to parent class' put method. **kwargs: Keyword arguments to be passed to parent class' put method. Returns: The key of the instance (either the existing key or a new key). Raises: errors.DuplicateEntity: Entity is a duplicate of active passphrase with same target_id. AccessError: required property was empty or not set. """ if self.hostname: self.hostname = self.NormalizeHostname(self.hostname) model_name = self.__class__.__name__ for prop_name in self.REQUIRED_PROPERTIES: if not getattr(self, prop_name, None): raise self.ACCESS_ERR_CLS('Required property empty: %s' % prop_name) if not self.active: raise self.ACCESS_ERR_CLS('New entity is not active: %s' % self.target_id) if self.has_key(): raise self.ACCESS_ERR_CLS('Key should be auto genenrated for %s.' % model_name) existing_entity = parent if not existing_entity: existing_entity = self.__class__.GetLatestForTarget(self.target_id, tag=self.tag) if existing_entity: if not existing_entity.active: raise self.ACCESS_ERR_CLS('parent entity is inactive: %s.' % self.target_id) different_properties = [] for prop in self.properties(): if getattr(self, prop) != getattr(existing_entity, prop): different_properties.append(prop) if not different_properties or different_properties == ['created']: raise errors.DuplicateEntity() if self.created > existing_entity.created: return self._PutNew(existing_entity.key()) else: logging.warning('entity from past') self.active = False return super(BasePassphrase, self).put(*args, **kwargs) @classmethod @db.transactional() def _UpdateMutableProperties(cls, key, changes): entity = cls.get(key) if not entity.active: raise cls.ACCESS_ERR_CLS('entity is inactive: %s.' % entity.target_id) for property_name, value in changes.iteritems(): if property_name == 'hostname': value = cls.NormalizeHostname(value) setattr(entity, property_name, value) return super(BasePassphrase, entity).put() def UpdateMutableProperty(self, property_name, value): if not self.has_key(): raise self.ACCESS_ERR_CLS('Volume should be in the db.') if property_name not in self.MUTABLE_PROPERTIES: raise ValueError self._UpdateMutableProperties(self.key(), {property_name: value}) setattr(self, property_name, value) @property def target_id(self): return getattr(self, self.TARGET_PROPERTY_NAME) @target_id.setter def _set_target_id(self, value): return setattr(self, self.TARGET_PROPERTY_NAME, value) @property def secret(self): return getattr(self, self.SECRET_PROPERTY_NAME) @property def checksum(self): return hashlib.md5(self.secret).hexdigest() @classmethod def NormalizeHostname(cls, hostname, strip_fqdn=False): """Sanitizes a hostname for consistent search functionality. Args: hostname: str hostname to sanitize. strip_fqdn: boolean, if True removes fully qualified portion of hostname. Returns: str hostname. """ # call this during escrow create, to sanitize before storage. if strip_fqdn: hostname = hostname.partition('.')[0] return hostname.lower()
class Proxy(db.Model): name = db.StringProperty(required=True) url = db.LinkProperty(required=True) approved = db.BooleanProperty(default=False, required=True)
class Room(db.Model): """All the data we store for a room""" user1 = db.StringProperty() user2 = db.StringProperty() user1_connected = db.BooleanProperty(default=False) user2_connected = db.BooleanProperty(default=False) def __str__(self): result = '[' if self.user1: result += "%s-%r" % (self.user1, self.user1_connected) if self.user2: result += ", %s-%r" % (self.user2, self.user2_connected) result += ']' return result def get_occupancy(self): occupancy = 0 if self.user1: occupancy += 1 if self.user2: occupancy += 1 return occupancy def get_other_user(self, user): if user == self.user1: return self.user2 elif user == self.user2: return self.user1 else: return None def has_user(self, user): return (user and (user == self.user1 or user == self.user2)) def add_user(self, user): if not self.user1: self.user1 = user elif not self.user2: self.user2 = user else: raise RuntimeError('room is full') self.put() def remove_user(self, user): delete_saved_messages(make_client_id(self, user)) if user == self.user2: self.user2 = None self.user2_connected = False if user == self.user1: if self.user2: self.user1 = self.user2 self.user1_connected = self.user2_connected self.user2 = None self.user2_connected = False else: self.user1 = None self.user1_connected = False if self.get_occupancy() > 0: self.put() else: self.delete() def set_connected(self, user): if user == self.user1: self.user1_connected = True if user == self.user2: self.user2_connected = True self.put() def is_connected(self, user): if user == self.user1: return self.user1_connected if user == self.user2: return self.user2_connected
class QueryView(FolderishMixin, ContentishMixin): """ """ zope.interface.implements(interfaces.IQueryView) _template = "query" body = db.TextProperty(default=u'') hidden = db.BooleanProperty(default=False) reparent = db.BooleanProperty(default=False) find_kind = db.StringProperty() filters = db.TextProperty(default=u'') order_by = db.TextProperty(default=u'') group_by = db.TextProperty(default=u'') custom_view = db.StringProperty(default=u'') def template(self): if self.custom_view: return self.custom_view return super(QueryView, self).template() def delContent(self, key, request): obj = db.get(key) path = obj.getPath() self.getRoot().setcached(path, None) obj.delete() def reparent_absolute_url(self, obj, request): url = "%s%s/" % (self.absolute_url(request), str(obj.key())) return url def __getitem__(self, name): find_kind = self.find_kind.strip() root = self.getRoot() kind = root.get_model(find_kind) try: key = db.Key(name) except db.BadKeyError: raise KeyError if (key.kind() != find_kind): if issubclass(kind, polymodel.PolyModel): if kind.class_name() != find_kind: raise KeyError('Entity key not valid for query (%s)' % find_kind) else: raise KeyError('Entity key not valid for query (%s)' % find_kind) cache_key = str(key) obj = root.getcached(cache_key) if not obj: obj = db.get(name) root.setcached(cache_key, obj) if obj: if isinstance(obj, NonContentishMixin) or self.reparent: try: obj.__parent__ = self obj.__name__ = str(obj.key()) except AttributeError: pass return obj else: raise KeyError('Object not found') def content_summary(self, request, limit=None): results = [] if not request: request = self._request() root = self.getRoot() cache_key = str(self.absolute_url().rstrip()) + ":summary" cached_result = root.getcached(cache_key) if cached_result and not getattr(request.principal, 'ADMIN', False): if limit: cached_result = cached_result[0:min(len(cached_result), limit)] return cached_result kind = root.get_model(self.find_kind) #BREAKPOINT() query = kind.all() if self.filters: for i in self.filters.split('\n'): if i: lhs, rhs = i.split(',', 1) query = query.filter(lhs.strip(), eval(rhs.strip())) if self.order_by: for i in self.order_by.split('\n'): if i: query = query.order(i.strip()) for i in query: summary = {} #BREAKPOINT() if isinstance(i, NonContentishMixin) or self.reparent: i.__name__ = str(i.key()) i.__parent__ = self summary['key'] = str(i.key()) if not getattr(i, 'hidden', False): url = "" if self.reparent: url = self.reparent_absolute_url(i, request) else: url = i.absolute_url(request) if hasattr(i, 'item_summary'): summary = i.item_summary() summary['url'] = url else: try: title = i.title_or_id() except AttributeError: title = i.key() description = getattr(i, 'description', '') name = getattr(i, 'name', i.key()) summary = { 'name': name, 'url': url, 'title': title, 'description': description, 'kind': i.kind(), 'key': i.key() } if hasattr(i, 'image_thumbnail'): summary['thumbnail'] = url + 'thumbnail' if hasattr(i, 'image'): summary['thumbnail'] = url + 'mini' results.append(summary) if not getattr(request.principal, 'ADMIN', False): root.setcached(cache_key, results) root.setcached(cache_key, results) if limit: results = results[0:min(len(results), limit)] return results def groupby(self, results): #BREAKPOINT() groupby_keys = [ i.strip() for i in self.group_by.split('\n') if i.strip() ] if not groupby_keys: groupby_keys = [ 'Kind', ] def makekeyfunc(groupkeys): def groupkey(item): return tuple([item[i] for i in groupkeys]) return groupkey return groupby(results, makekeyfunc(groupby_keys)) def groupit(self, results): gresults = [] #BREAKPOINT() for name, i in self.groupby(results): gresults.append({'name': name, 'group': []}) #print name n = 0 for i1 in i: if n == 0: gresults[-1]['item'] = i1 gresults[-1]['group'].append(i1) n = n + 1 return gresults
class Folder(db.Model): """A collection of programs created by a user""" # Parent is a User # key is the folder's name (unique for a user) isPublic = db.BooleanProperty()
class Student(BaseEntity): """Student profile.""" enrolled_on = db.DateTimeProperty(auto_now_add=True, indexed=True) user_id = db.StringProperty(indexed=False) name = db.StringProperty(indexed=False) is_enrolled = db.BooleanProperty(indexed=False) # Each of the following is a string representation of a JSON dict. scores = db.TextProperty(indexed=False) @classmethod def _memcache_key(cls, key): """Makes a memcache key from primary key.""" return 'entity:student:%s' % key def put(self): """Do the normal put() and also add the object to memcache.""" result = super(Student, self).put() MemcacheManager.set(self._memcache_key(self.key().name()), self) return result def delete(self): """Do the normal delete() and also remove the object from memcache.""" super(Student, self).delete() MemcacheManager.delete(self._memcache_key(self.key().name())) @classmethod def get_by_email(cls, email): return Student.get_by_key_name(email.encode('utf8')) @classmethod def get_enrolled_student_by_email(cls, email): """Returns enrolled student or None.""" student = MemcacheManager.get(cls._memcache_key(email)) if NO_OBJECT == student: return None if not student: student = Student.get_by_email(email) if student: MemcacheManager.set(cls._memcache_key(email), student) else: MemcacheManager.set(cls._memcache_key(email), NO_OBJECT) if student and student.is_enrolled: return student else: return None @classmethod def rename_current(cls, new_name): """Gives student a new name.""" user = users.get_current_user() if not user: raise Exception('No current user.') if new_name: student = Student.get_by_email(user.email()) student.name = new_name student.put() @classmethod def set_enrollment_status_for_current(cls, is_enrolled): """Changes student enrollment status.""" user = users.get_current_user() if not user: raise Exception('No current user.') student = Student.get_by_email(user.email()) student.is_enrolled = is_enrolled student.put()
class GradingRecord(base.ModelWithFieldAttributes): """Explicitly group SurveyRecords with a common project. Because Mentors and Students take different surveys, we cannot simply link survey records by a common project and survey. Instead, we establish a GradingRecord. A GradingRecord links a group of survey records with a common project, and links back to its records. This entity can be edited by Program Administrators to edit the outcome of a the Grading surveys without touching the real survey's answers. Also if a ProjectSurvey has been coupled to the GradingSurveyGroup this must be on record as well for the GradingRecord to state a pass, even if the Mentor has filled in a passing grade. """ #: The GradingSurveyGroup to which this record belongs grading_survey_group = db.ReferenceProperty( reference_class=GradingSurveyGroup, required=True, collection_name='grading_records') #: Mentor's GradingProjectSurveyRecord for this evaluation. Iff exists. mentor_record = db.ReferenceProperty( reference_class=GradingProjectSurveyRecord, required=False, collection_name='mentor_grading_records') #: Student's ProjectSurveyRecord for this evaluation. Iff exists. student_record = db.ReferenceProperty( reference_class=ProjectSurveyRecord, required=False, collection_name='student_grading_records') #: Project for this evaluation. project = db.ReferenceProperty( reference_class=StudentProject, required=True, collection_name='grading_records') #: Grade decision set for this grading record. #: pass: Iff the mentor_record states that the student has passed. #: And if a ProjectSurvey has been set in the GradingSurveyGroup #: then the student_record must be set as well. #: fail: If the mentor_record states that the student has failed. The #: student_record does not matter in this case. However if the mentor #: states that the student has passed, a ProjectSurvey has been #: set in the GradingSurveyGroup and the student_record property is not #: set the decision will be fail. #: undecided: If no mentor_record has been set. grade_decision = db.StringProperty(required=True, default='undecided', choices=['pass', 'fail', 'undecided']) #: Boolean that states if the grade_decision property has been locked #: This is to prevent an automatic update from a GradingSurveyGroup to #: overwrite the decision made by for example a Program Administrator. locked = db.BooleanProperty(required=False, default=False, verbose_name=ugettext('Grade Decision locked')) #: Property containing the date that this GradingRecord was created. created = db.DateTimeProperty(auto_now_add=True) #: Property containing the last date that this GradingRecord was modified. modified = db.DateTimeProperty(auto_now=True)
class List(db.polymodel.PolyModel, model_plus.Model): ''' NO USAR ESTA LISTA, USAR LOS MODELOS ESPECIFICOS :D ''' name = db.StringProperty(required=True) description = db.TextProperty() keys = db.ListProperty(db.Key) created = db.DateTimeProperty(auto_now_add = True) modified = db.DateTimeProperty(auto_now=True) active = db.BooleanProperty(default=True) _short_url = db.URLProperty(indexed=False) count = db.IntegerProperty(default=0) # numero de sugerencias en la lista _counters = None _new = False @property def id(self): return int(self.key().id()) @property def short_url(self): from os import environ if environ['HTTP_HOST'] == 'localhost:8080': return 'http://%s%s' % (environ['HTTP_HOST'], self.get_absolute_url()) if self._short_url is None: self._get_short_url() if self._short_url is not None: self.put() else: from os import environ return 'http://%s%s' % (environ['HTTP_HOST'], self.get_absolute_url()) return self._short_url @property def counters(self): if self._counters is None: self._counters = ListCounter.all().ancestor(self.key()).get() if self._counters is None: self._counters = ListCounter(parent=self) self._counters.put() return self._counters @classproperty def objects(self): return ListHelper() def _pre_put(self): self.count = len(self.keys) if not self.is_saved(): self._get_short_url() self._new = True def put(self, from_comment=False): if self.is_saved(): super(List, self).put() if from_comment: return self from watchers import modified_list, deleted_list if not self.active: list_deleted.send(sender=self) else: list_modified.send(sender=self) else: super(List, self).put() counter = ListCounter(parent=self) a = db.put_async(counter) from watchers import new_list list_new.send(sender=self) a.get_result() def delete(self): children = db.query_descendants(self).fetch(100) for c in children: c.delete() return db.delete_async(self) def to_dict(self, resolve=False, instances=None): dict = {'id': self.id, 'name': self.name, 'description': self.description, 'modified': self.modified if self.modified is not None else 0, 'created': self.created if self.created is not None else 0, 'tags': self.tags if hasattr(self, 'tags') else None, 'count': self.count, 'counters': self.counters.to_dict() if self.counters is not None else None, 'keys': [i.id() for i in self.keys], 'visibility': self._get_visibility(), 'get_absolute_url': self.get_absolute_url(), 'get_absolute_fburl': self.get_absolute_fburl(), 'short_url': self.short_url, } if resolve: if instances is not None: dict['instances'] = [instances[k] for k in self.keys] dict['user'] = instances.get(ListSuggestion.user.get_value_for_datastore(self), self.user) else: dict['instances'] = db.get(self.keys) else: dict['user'] = self.user.username return dict def to_json(self): from libs.jsonrpc.jsonencoder import JSONEncoder return simplejson.dumps(self.to_dict(), cls=JSONEncoder) def __str__(self): return unicode(self.name).encode('utf-8') def __unicode__(self): return self.name def get_absolute_url(self): return '/list/%s/' % str(self.id) def get_absolute_fburl(self): return '/fb%s' % self.get_absolute_url() def _get_short_url(self): from libs.vavag import VavagRequest from os import environ try: # parche hasta conseguir que se cachee variable global client = VavagRequest(settings.SHORTENER_ACCESS['user'], settings.SHORTENER_ACCESS['key']) response = client.set_pack('http://%s%s' % (environ['HTTP_HOST'], self.get_absolute_url())) self._short_url = response['packUrl'] except Exception, e: import logging logging.error('ERROR EN VAVAG: %s' % e.message) self._short_url = None
class Feed(db.Model): book = db.ReferenceProperty(Book) title = db.StringProperty() url = db.StringProperty() isfulltext = db.BooleanProperty() time = db.DateTimeProperty() #源被加入的时间,用于排序
class KeUser(db.Model): # kindleEar User name = db.StringProperty(required=True) passwd = db.StringProperty(required=True) secret_key = db.StringProperty() kindle_email = db.StringProperty() enable_send = db.BooleanProperty() send_days = db.StringListProperty() send_time = db.IntegerProperty() timezone = db.IntegerProperty() book_type = db.StringProperty() device = db.StringProperty() expires = db.DateTimeProperty() ownfeeds = db.ReferenceProperty(Book) # 每个用户都有自己的自定义RSS use_title_in_feed = db.BooleanProperty() # 文章标题优先选择订阅源中的还是网页中的 titlefmt = db.StringProperty() #在元数据标题中添加日期的格式 merge_books = db.BooleanProperty() #是否合并书籍成一本 share_fuckgfw = db.BooleanProperty() #归档和分享时是否需要翻墙 evernote = db.BooleanProperty() #是否分享至evernote evernote_mail = db.StringProperty() #evernote邮件地址 wiz = db.BooleanProperty() #为知笔记 wiz_mail = db.StringProperty() pocket = db.BooleanProperty(default=False) #send to [email protected] pocket_access_token = db.StringProperty(default='') pocket_acc_token_hash = db.StringProperty(default='') instapaper = db.BooleanProperty() instapaper_username = db.StringProperty() instapaper_password = db.StringProperty() xweibo = db.BooleanProperty() tweibo = db.BooleanProperty() facebook = db.BooleanProperty() #分享链接到facebook twitter = db.BooleanProperty() tumblr = db.BooleanProperty() browser = db.BooleanProperty() qrcode = db.BooleanProperty() #是否在文章末尾添加文章网址的QRCODE cover = db.BlobProperty() #保存各用户的自定义封面图片二进制内容 @property def whitelist(self): return WhiteList.all().filter('user = '******'user = '******'user = '******'title = ', title).get()
class Page(db.Model): name = db.StringProperty(required=True) url = db.LinkProperty(required=True) site = db.ReferenceProperty(Site) _styles = db.ListProperty(db.Key) channels = db.ListProperty(db.Key) preview_img = db.BlobProperty(required=False, default=None) preview_urls = db.ListProperty(db.Link, default=None) # *additional* preview urls import_state = db.IntegerProperty(default=0) on_cdn = db.BooleanProperty(default=False) _style_cache = None def _set_styles(self, styles): self._style_cache = styles self._styles = [style.key() for style in styles] def _get_styles(self): if not self._style_cache: self._style_cache = [Style.get(k) for k in self._styles] return self._style_cache styles = property(_get_styles, _set_styles) def delete(self): for key in self.channels: channel = PageChannel.get(key) if channel: channel.send_message({'cmd': 'lock'}) channel.delete() for style in self.styles: style.delete() db.delete(self) def clean_channels(self): stale = [] for key in self.channels: channel = PageChannel.get(key) if not channel or channel.is_stale(): stale.append(key) if stale: for key in stale: self.channels.remove(key) channel = PageChannel.get(key) if channel: # If the channel is still here, it's probably stale. # Send 'lock' and remove, so it can't clobber anyone else. channel.send_message({'cmd': 'lock'}) channel.delete() self.put() def get_channels(self): channels = [] stale = [] for key in self.channels: channel = PageChannel.get(key) if channel: channels.append(channel) else: stale.append(key) if stale: for key in stale: self.channels.remove(key) self.put() return channels def update_locks(self): owner = None channels = self.get_channels() if channels: owner_user = channels[0].user owner = dict(name=owner_user.nickname(), email=owner_user.email()) channels[0].send_message(dict(cmd='unlock', user=owner)) lock_msg = dict(cmd='lock', user=owner) for channel in channels[1:]: channel.send_message(lock_msg) def add_channel(self, channel): self.remove_channel(channel) self.channels.append(channel.key()) self.put() def add_channel_first(self, channel): self.remove_channel(channel) self.channels.insert(0, channel.key()) self.put() def remove_channel(self, channel, delete=False): if channel.key() in self.channels: self.channels.remove(channel.key()) self.put() if delete: channel.delete() def put(self, *args, **kwargs): self._set_styles(self.styles) super(Page, self).put(*args, **kwargs) def queue_preview(self): taskqueue.add(queue_name='fetch-preview', url=url_for('tasks.fetch_preview'), params={'page_key': self.key()}) def queue_upload(self): taskqueue.add(queue_name='upload-css', url=url_for('tasks.upload_style'), params={'page_key': self.key()}) def queue_refresh(self): self.queue_upload() self.queue_preview() def _css(self, preview, compress): css = StringIO() for style in self.styles: rev = style.preview_rev if ( preview and style.preview_rev) else style.published_rev if compress: css.write(rev.compressed) else: css.write(scss.Scss().compile('@option compress:no;' + rev.raw)) return css.getvalue() def compressed_css(self, preview): return self._css(preview, compress=True) def uncompressed_css(self, preview): return self._css(preview, compress=False) def last_modified(self, preview): max_last_edit = datetime.min for style in self.styles: rev = style.preview_rev if ( preview and style.preview_rev) else style.published_rev max_last_edit = max(max_last_edit, rev.dt_last_edit) return max_last_edit def styles_json(self): # NOTE: It is okay to return an array here because we only display this # to users via editor.html. If we ever return this directly as the # response, we'll want to wrap it to avoid the exploit described at # http://haacked.com/archive/2009/06/25/json-hijacking.aspx styles_obj = [style.json_obj() for style in self.styles] return json.dumps(styles_obj, default=dt_handler, sort_keys=True, indent=4 * ' ' if settings.debug else None) def upload_to_cdn(self): if not settings.use_google_cloud_storage: return path = files.gs.create('/gs/%s/%s.css' % (settings.google_bucket, str(self.key())), mime_type='text/css', acl='public-read', cache_control='private,max-age=300') try: fd = files.open(path, 'a') fd.write(self.compressed_css(False).encode('utf-8')) self.on_cdn = True self.save() except Exception: self.on_cdn = False self.save() raise finally: fd.close() files.finalize(path) @staticmethod def get_or_404(key): page = None if isinstance(key, int) or (isinstance(key, basestring) and key.isdigit()): page = Page.get_by_id(int(key)) else: try: key_obj = db.Key(key) except BadKeyError: abort(404) if (key_obj.kind() == 'Style'): page = Page.gql('WHERE _styles=:1', key_obj).get() else: page = Page.get(key) if not page: abort(404) return page @staticmethod def get_edit_or_404(page_id): page = Page.get_or_404(page_id) if gae_users.get_current_user() not in page.site.users: abort(404) return page @staticmethod def get_admin_or_404(page_id): page = Page.get_or_404(page_id) site = page.site if not site or gae_users.get_current_user() not in site.admins: abort(404) return page @staticmethod def new_page(site, name, url): ''' Do all the work in adding a new page to a site. ''' style = Style(name=name, site=site) style.put() first_rev = StyleRevision(parent=style) first_rev.raw = render_template('first_run.css') first_rev.put() style.published_rev = first_rev style.put() page = Page(name=name, url=url, site=site, _styles=[style.key()]) page.put() page.queue_refresh() return page
class UserSettings(db.Model): user = db.UserProperty(required=True) seen_example = db.BooleanProperty(default=False) seen_guiders = db.StringListProperty() # the last version (list of ints) this person has viewed the release notes for seen_version = db.ListProperty(int, default=None) locale = db.StringProperty(default=None) chimped = db.BooleanProperty(default=False) @staticmethod def has_seen_example(): user = gae_users.get_current_user() if not user or not user.user_id(): raise Exception("Logged in user expected") settings = UserSettings.get_or_insert(user.user_id(), user=user) return settings.seen_example @staticmethod def mark_example_as_seen(): user = gae_users.get_current_user() if not user or not user.user_id(): raise Exception("Logged in user expected") settings = UserSettings.get_or_insert(user.user_id(), user=user) settings.seen_example = True settings.put() @staticmethod def show_guider(guider_name): user = gae_users.get_current_user() if not user or not user.user_id(): return False settings = UserSettings.get_or_insert(user.user_id(), user=user) return (guider_name not in settings.seen_guiders) @staticmethod def mark_guider_as_seen(guider_name): user = gae_users.get_current_user() if not user or not user.user_id(): return settings = UserSettings.get_or_insert(user.user_id(), user=user) if not guider_name in settings.seen_guiders: settings.seen_guiders.append(guider_name) settings.put() @staticmethod def has_seen_version(version): user = gae_users.get_current_user() if not user or not user.user_id(): return True # don't bother displaying "new version available" to non-authenticated users settings = UserSettings.get_or_insert(user.user_id(), user=user) if not settings.seen_version: settings.seen_version = [0, 0, 0] settings.put() return settings.seen_version >= version @staticmethod def mark_version_as_seen(version): user = gae_users.get_current_user() if not user or not user.user_id(): return settings = UserSettings.get_or_insert(user.user_id(), user=user) settings.seen_version = version settings.put() @staticmethod def get_locale(): user = gae_users.get_current_user() if not user or not user.user_id(): return None settings = UserSettings.get_or_insert(user.user_id(), user=user) return settings.locale @staticmethod def set_locale(locale): user = gae_users.get_current_user() if not user or not user.user_id(): return settings = UserSettings.get_or_insert(user.user_id(), user=user) settings.locale = locale settings.put()
class User(db.Model): """Universal user model. Can be used with App Engine's default users API, own auth or third party authentication methods (OpenId, OAuth etc). """ #: Creation date. created = db.DateTimeProperty(auto_now_add=True) #: Modification date. updated = db.DateTimeProperty(auto_now=True) #: User defined unique name, also used as key_name. username = db.StringProperty(required=True) #: Password, only set for own authentication. password = db.StringProperty(required=False) #: User email email = db.EmailProperty() #: Authentication identifier, depending on the auth method used. #: For example: #: * own:username #: * gae:user_id #: * openid:identifier #: * twitter:username #: * facebook:username auth_id = db.StringProperty(required=True) uid = db.StringProperty(required=True) #added by Bryan - basic profile info profile_url = db.StringProperty(required=True) profile_image_url = db.StringProperty() #added by Bryan from sample GAE simple example access_token = db.StringProperty(required=True) #can't require yet access_token_expires = db.DateTimeProperty() #can't require yet #id = db.StringProperty()#required=True) name = db.StringProperty(required=True) domain = db.StringProperty(required=True) # Session id, renewed periodically for improved security. session_id = db.StringProperty(required=True) # Session id last renewal date. session_updated = db.DateTimeProperty(auto_now_add=True) # Admin flag. is_admin = db.BooleanProperty(required=True, default=False) friend_ids = db.StringListProperty() character = db.ReferenceProperty(reference_class=Character) is_deleted = db.BooleanProperty(default=False) @classmethod def get_by_username(cls, username): return cls.get_by_key_name(username) @classmethod def get_by_auth_id(cls, auth_id): return cls.all().filter('auth_id =', auth_id).get() @classmethod def create(cls, username, auth_id, **kwargs): #def create(cls, username, auth_id, profile_url, profile_image_url, **kwargs): """Creates a new user and returns it. If the username already exists, returns None. :param username: Unique username. :param auth_id: Authentication id, according the the authentication method used. :param kwargs: Additional entity attributes. :returns: The newly created user or None if the username already exists. """ kwargs['username'] = username kwargs['key_name'] = username now = datetime.datetime.now() kwargs['access_token_expires'] = now + datetime.timedelta(seconds=int(kwargs['expires_seconds'])) #kwargs['profile_url'] = profile_url #kwargs['profile_image_url'] = profile_image_url kwargs['domain'] = auth_id.split('|')[0] kwargs['auth_id'] = auth_id # Generate an initial session id. kwargs['session_id'] = gen_salt(length=32) kwargs['friend_ids'] = [] if 'password_hash' in kwargs: # Password is already hashed. kwargs['password'] = kwargs.pop('password_hash') elif 'password' in kwargs: # Password is not hashed: generate a hash. kwargs['password'] = gen_pwhash(kwargs['password']) config = Config.get_for_current_environment() def txn(): if cls.get_by_username(username) is not None: # Username already exists. return None user = cls(**kwargs) character = Character.create(config=config, user=user, image_url=user.profile_image_url, is_fake=False) user.character = character db.put((user, character)) return user return db.run_in_transaction(txn) def set_password(self, new_password): """Sets a new, plain password. :param new_password: A plain, not yet hashed password. :returns: None. """ self.password = gen_pwhash(new_password) def check_password(self, password): """Checks if a password is valid. This is done with form login :param password: Password to be checked. :returns: True is the password is valid, False otherwise. """ if not check_password(self.password, password): return False # Check if session id needs to be renewed. self._renew_session() return True def check_session(self, session_id): """Checks if a session id is valid. :param session_id: Session id to be checked. :returns: True is the session id is valid, False otherwise. """ if self.session_id != session_id: return False # Check if session id needs to be renewed. self._renew_session() return True def _renew_session(self, force=False): """Renews the session id if its expiration time has passed. :param force: True to force the session id to be renewed, False to check if the expiration time has passed. :returns: None. """ if force is False: # Only renew the session id if it is too old. expires = datetime.timedelta(seconds=get_config('tipfy.ext.auth', 'session_max_age')) force = (self.session_updated + expires < datetime.datetime.now()) if force is True: self.session_id = gen_salt(length=32) self.session_updated = datetime.datetime.now() self.put() def __unicode__(self): """Returns this entity's username. :returns: Username, as unicode. """ return unicode(self.username) def __str__(self): """Returns this entity's username. :returns: Username, as unicode. """ return self.__unicode__() def __eq__(self, obj): """Compares this user entity with another one. :returns: True if both entities have same key, False otherwise. """ if not obj: return False return str(self.key()) == str(obj.key()) def __ne__(self, obj): """Compares this user entity with another one. :returns: True if both entities don't have same key, False otherwise. """ return not self.__eq__(obj)
class Task(db.Model): user = db.UserProperty() name = db.StringProperty(required=True) done = db.BooleanProperty()
class Base(db.Model): """Base class providing methods common to both Person and Note entities, whose key names are partitioned using the repo name as a prefix.""" # max records to fetch in one go. FETCH_LIMIT = 200 # Even though the repo is part of the key_name, it is also stored # redundantly as a separate property so it can be indexed and queried upon. repo = db.StringProperty(required=True) # We can't use an inequality filter on expiry_date (together with other # inequality filters), so we use a periodic task to set the is_expired flag # on expired records, and filter using the flag. A record's life cycle is: # # 1. Record is created with some expiry_date. # 2. expiry_date passes. # 3. tasks.DeleteExpired sets is_expired to True; record vanishes from UI. # 4. delete.EXPIRED_TTL_DAYS days pass. # 5. tasks.DeleteExpired wipes the record. # We set default=False to ensure all entities are indexed by is_expired. # NOTE: is_expired should ONLY be modified in Person.put_expiry_flags(). is_expired = db.BooleanProperty(required=False, default=False) @classmethod def all(cls, keys_only=False, filter_expired=True): """Returns a query for all records of this kind; by default this filters out the records marked as expired. Args: keys_only - If true, return only the keys. filter_expired - If true, omit records with is_expired == True. Returns: query - A Query object for the results. """ query = super(Base, cls).all(keys_only=keys_only) if filter_expired: query.filter('is_expired =', False) return query @classmethod def all_in_repo(cls, repo, filter_expired=True): """Gets a query for all entities in a given repository.""" return cls.all(filter_expired=filter_expired).filter('repo =', repo) def get_record_id(self): """Returns the record ID of this record.""" repo, record_id = self.key().name().split(':', 1) return record_id record_id = property(get_record_id) def get_original_domain(self): """Returns the domain name of this record's original repository.""" return self.record_id.split('/', 1)[0] original_domain = property(get_original_domain) def is_original(self): """Returns True if this record was created in this repository.""" return is_original(self.repo, self.record_id) def is_clone(self): """Returns True if this record was copied from another repository.""" return not self.is_original() @classmethod def get_key(cls, repo, record_id): """Get entity key from its record id""" return db.Key.from_path(cls.kind(), repo + ':' + record_id) @classmethod def get_all(cls, repo, record_ids, limit=200): """Gets the entities with the given record_ids in a given repository.""" keys = [cls.get_key(repo, id) for id in record_ids] return [record for record in db.get(keys) if record is not None] @classmethod def get(cls, repo, record_id, filter_expired=True): """Gets the entity with the given record_id in a given repository.""" record = cls.get_by_key_name(repo + ':' + record_id) if record: if not (filter_expired and record.is_expired): return record @classmethod def create_original(cls, repo, **kwargs): """Creates a new original entity with the given field values.""" # TODO(ryok): Consider switching to URL-like record id format, # which is more consitent with repo id format. record_id = '%s.%s/%s.%d' % (repo, HOME_DOMAIN, cls.__name__.lower(), UniqueId.create_id()) return cls(key_name=repo + ':' + record_id, repo=repo, **kwargs) @classmethod def create_clone(cls, repo, record_id, **kwargs): """Creates a new clone entity with the given field values.""" assert is_clone(repo, record_id) return cls(key_name=repo + ':' + record_id, repo=repo, **kwargs) # TODO(kpy): Rename this function (maybe to create_with_record_id?). @classmethod def create_original_with_record_id(cls, repo, record_id, **kwargs): """Creates an original entity with the given record_id and field values, overwriting any existing entity with the same record_id. This should be rarely used in practice (e.g. for an administrative import into a home repository), hence the long method name.""" return cls(key_name=repo + ':' + record_id, repo=repo, **kwargs)
class Bottle(CarryableObject, PourableObject): is_closed = db.BooleanProperty()
class Person(Base): """The datastore entity kind for storing a PFIF person record. Never call Person() directly; use Person.create_clone() or Person.create_original(). Methods that start with "get_" return actual values or lists of values; other methods return queries or generators for values. """ # If you add any new fields, be sure they are handled in wipe_contents(). # entry_date should update every time a record is created or re-imported. entry_date = db.DateTimeProperty(required=True) expiry_date = db.DateTimeProperty(required=False) author_name = db.StringProperty(default='', multiline=True) author_email = db.StringProperty(default='') author_phone = db.StringProperty(default='') # the original date we saw this record; it should not change. original_creation_date = db.DateTimeProperty(auto_now_add=True) # source_date is the date that the original repository last changed # any of the fields in the pfif record. source_date = db.DateTimeProperty() source_name = db.StringProperty(default='') source_url = db.StringProperty(default='') # TODO(ryok): consider marking this required. full_name = db.StringProperty(multiline=True) given_name = db.StringProperty() family_name = db.StringProperty() alternate_names = db.StringProperty(default='', multiline=True) description = db.TextProperty(default='') sex = db.StringProperty(default='', choices=pfif.PERSON_SEX_VALUES) date_of_birth = db.StringProperty(default='') # YYYY, YYYY-MM, YYYY-MM-DD age = db.StringProperty(default='') # NN or NN-MM home_street = db.StringProperty(default='') home_neighborhood = db.StringProperty(default='') home_city = db.StringProperty(default='') home_state = db.StringProperty(default='') home_postal_code = db.StringProperty(default='') home_country = db.StringProperty(default='') photo_url = db.TextProperty(default='') profile_urls = db.TextProperty(default='') # This reference points to a locally stored Photo entity. ONLY set this # property when storing a new Photo object that is owned by this Person # record and can be safely deleted when the Person is deleted. photo = db.ReferenceProperty(default=None) # The following properties are not part of the PFIF data model; they are # cached on the Person for efficiency. # Value of the 'status' and 'source_date' properties on the Note # with the latest source_date with the 'status' field present. latest_status = db.StringProperty(default='') latest_status_source_date = db.DateTimeProperty() # Value of the 'author_made_contact' and 'source_date' properties on the # Note with the latest source_date with the 'author_made_contact' field # present. latest_found = db.BooleanProperty() latest_found_source_date = db.DateTimeProperty() # Last write time of this Person or any Notes on this Person. # This reflects any change to the Person page. last_modified = db.DateTimeProperty(auto_now=True) # This flag is set to true only when the record author disabled # adding new notes to a record. notes_disabled = db.BooleanProperty(default=False) # attributes used by indexing.py names_prefixes = db.StringListProperty() # TODO(ryok): index address components. _fields_to_index_properties = ['given_name', 'family_name', 'full_name'] _fields_to_index_by_prefix_properties = [ 'given_name', 'family_name', 'full_name' ] @staticmethod def past_due_records(repo): """Returns a query for all Person records with expiry_date in the past, or None, regardless of their is_expired flags.""" import utils return Person.all(filter_expired=False).filter( 'expiry_date <=', utils.get_utcnow()).filter('repo =', repo) @staticmethod def potentially_expired_records(repo, days_to_expire=DEFAULT_EXPIRATION_DAYS): """Returns a query for all Person records with source date older than days_to_expire (or empty source_date), regardless of is_expired flags value.""" import utils cutoff_date = utils.get_utcnow() - timedelta(days_to_expire) return Person.all(filter_expired=False).filter('source_date <=', cutoff_date).filter( 'repo =', repo) @property def person_record_id(self): return self.record_id @property def primary_full_name(self): return self.full_name.splitlines()[0] if self.full_name else '' @property def full_name_list(self): return self.full_name.splitlines() if self.full_name else [] @property def alternate_names_list(self): return self.alternate_names.splitlines( ) if self.alternate_names else [] @property def profile_urls_list(self): return self.profile_urls.splitlines() if self.profile_urls else [] @property def photo_url_no_scheme(self): import utils return utils.strip_url_scheme(self.photo_url) def get_notes(self, filter_expired=True): """Returns a list of all the Notes on this Person, omitting expired Notes by default.""" return Note.get_by_person_record_id(self.repo, self.record_id, filter_expired=filter_expired) def get_subscriptions(self, subscription_limit=200): """Retrieves a list of all the Subscriptions for this Person.""" return Subscription.get_by_person_record_id(self.repo, self.record_id, limit=subscription_limit) def get_linked_person_ids(self, note_limit=200): """Retrieves IDs of Persons marked as duplicates of this Person.""" return [ note.linked_person_record_id for note in self.get_notes(note_limit) if note.linked_person_record_id ] def get_linked_persons(self, note_limit=200): """Retrieves Persons marked as duplicates of this Person.""" return Person.get_all(self.repo, self.get_linked_person_ids(note_limit)) def get_all_linked_persons(self): """Retrieves all Persons transitively linked to this Person.""" linked_person_ids = set([self.record_id]) linked_persons = [] # Maintain a list of ids of duplicate persons that have not # yet been processed. new_person_ids = set(self.get_linked_person_ids()) # Iteratively process all new_person_ids by retrieving linked # duplicates and storing those not yet processed. # Processed ids are stored in the linked_person_ids set, and # their corresponding records are in the linked_persons list. while new_person_ids: linked_person_ids.update(new_person_ids) new_persons = Person.get_all(self.repo, list(new_person_ids)) for person in new_persons: new_person_ids.update(person.get_linked_person_ids()) linked_persons += new_persons new_person_ids -= linked_person_ids return linked_persons def get_associated_emails(self): """Gets a set of all the e-mail addresses to notify when this record is changed.""" email_addresses = set([ note.author_email for note in self.get_notes() if note.author_email ]) if self.author_email: email_addresses.add(self.author_email) return email_addresses def get_effective_expiry_date(self): """Gets the expiry_date, or if no expiry_date is present, returns the source_date plus the configurable default_expiration_days interval. If there's no source_date, we use original_creation_date. Returns: A datetime date (not None). """ if self.expiry_date: return self.expiry_date else: expiration_days = config.get_for_repo( self.repo, 'default_expiration_days') or (DEFAULT_EXPIRATION_DAYS) # in theory, we should always have original_creation_date, but since # it was only added recently, we might have legacy # records without it. start_date = (self.source_date or self.original_creation_date or utils.get_utcnow()) return start_date + timedelta(expiration_days) def put_expiry_flags(self): """Updates the is_expired flags on this Person and related Notes to make them consistent with the effective_expiry_date() on this Person, and commits the changes to the datastore.""" import utils now = utils.get_utcnow() expired = self.get_effective_expiry_date() <= now if self.is_expired != expired: # NOTE: This should be the ONLY code that modifies is_expired. self.is_expired = expired # if we neglected to capture the original_creation_date, # make a best effort to grab it now, for posterity. if not self.original_creation_date: self.original_creation_date = self.source_date # If the record is expiring (being replaced with a placeholder, # see http://zesty.ca/pfif/1.3/#data-expiry) or un-expiring (being # restored from deletion), we want the source_date and entry_date # updated so downstream clients will see this as the newest state. self.source_date = now self.entry_date = now # All the Notes on the Person also expire or unexpire, to match. notes = self.get_notes(filter_expired=False) for note in notes: note.is_expired = expired # Store these changes in the datastore. db.put(notes + [self]) # TODO(lschumacher): photos don't have expiration currently. def wipe_contents(self): """Sets all the content fields to None (leaving timestamps and the expiry flag untouched), stores the empty record, and permanently deletes any related Notes and Photos. Call this method ONLY on records that have already expired.""" # We rely on put_expiry_flags to have properly set the source_date, # entry_date, and is_expired flags on Notes, as necessary. assert self.is_expired # Permanently delete all related Photos and Notes, but not self. self.delete_related_entities() for name, property in self.properties().items(): # Leave the repo, is_expired flag, and timestamps untouched. if name not in [ 'repo', 'is_expired', 'original_creation_date', 'source_date', 'entry_date', 'expiry_date' ]: setattr(self, name, property.default) self.put() # Store the empty placeholder record. def delete_related_entities(self, delete_self=False): """Permanently delete all related Photos and Notes, and also self if delete_self is True.""" # Delete all related Notes. notes = self.get_notes(filter_expired=False) # Delete the locally stored Photos. We use get_value_for_datastore to # get just the keys and prevent auto-fetching the Photo data. photo = Person.photo.get_value_for_datastore(self) note_photos = [Note.photo.get_value_for_datastore(n) for n in notes] entities_to_delete = filter(None, notes + [photo] + note_photos) if delete_self: entities_to_delete.append(self) db.delete(entities_to_delete) def update_from_note(self, note): """Updates any necessary fields on the Person to reflect a new Note.""" # We want to transfer only the *non-empty, newer* values to the Person. if note.author_made_contact is not None: # for boolean, None means # unspecified # datetime stupidly refuses to compare to None, so check for None. if (self.latest_found_source_date is None or note.source_date >= self.latest_found_source_date): self.latest_found = note.author_made_contact self.latest_found_source_date = note.source_date if note.status: # for string, '' means unspecified if (self.latest_status_source_date is None or note.source_date >= self.latest_status_source_date): self.latest_status = note.status self.latest_status_source_date = note.source_date def update_index(self, which_indexing): #setup new indexing if 'new' in which_indexing: indexing.update_index_properties(self) # setup old indexing if 'old' in which_indexing: prefix.update_prefix_properties(self) def update_latest_status(self): """Scans all notes on this Person and fixes latest_status if needed.""" status = None status_source_date = None for note in self.get_notes(): if note.status and not note.hidden: status = note.status status_source_date = note.source_date if status != self.latest_status: self.latest_status = status self.latest_status_source_date = status_source_date self.put()
class GymMembership(db.Model): climber = db.ReferenceProperty(Climber, collection_name='memberships') user = db.UserProperty(auto_current_user_add=True) gym = db.ReferenceProperty(Gym, collection_name='memberships') owner = db.BooleanProperty(default=False)
class Note(Base): """The datastore entity kind for storing a PFIF note record. Never call Note() directly; use Note.create_clone() or Note.create_original().""" # The entry_date should update every time a record is re-imported. entry_date = db.DateTimeProperty(required=True) person_record_id = db.StringProperty(required=True) # Use this field to store the person_record_id of a duplicate Person entry. linked_person_record_id = db.StringProperty(default='') author_name = db.StringProperty(default='', multiline=True) author_email = db.StringProperty(default='') author_phone = db.StringProperty(default='') # the original date we saw this record; it should not change. original_creation_date = db.DateTimeProperty(auto_now_add=True) # source_date is the date that the original repository last changed # any of the fields in the pfif record. source_date = db.DateTimeProperty() status = db.StringProperty(default='', choices=pfif.NOTE_STATUS_VALUES) author_made_contact = db.BooleanProperty() email_of_found_person = db.StringProperty(default='') phone_of_found_person = db.StringProperty(default='') last_known_location = db.StringProperty(default='') text = db.TextProperty(default='') photo_url = db.TextProperty(default='') # This reference points to a locally stored Photo entity. ONLY set this # property when storing a new Photo object that is owned by this Note # record and can be safely deleted when the Note is deleted. photo = db.ReferenceProperty(default=None) # True if the note has been marked as spam. Will cause the note to be # initially hidden from display upon loading a record page. hidden = db.BooleanProperty(default=False) # True if the note has been reviewed for spam content at /admin/review. reviewed = db.BooleanProperty(default=False) def get_note_record_id(self): return self.record_id note_record_id = property(get_note_record_id) @property def photo_url_no_scheme(self): import utils return utils.strip_url_scheme(self.photo_url) @staticmethod def get_by_person_record_id(repo, person_record_id, filter_expired=True): """Gets a list of all the Notes on a Person, ordered by source_date.""" return list( Note.generate_by_person_record_id(repo, person_record_id, filter_expired)) @staticmethod def generate_by_person_record_id(repo, person_record_id, filter_expired=True): """Generates all the Notes on a Person record ordered by source_date.""" query = Note.all_in_repo(repo, filter_expired=filter_expired).filter( 'person_record_id =', person_record_id).order('source_date') notes = query.fetch(Note.FETCH_LIMIT) while notes: for note in notes: yield note query.with_cursor(query.cursor()) # Continue where fetch left off. notes = query.fetch(Note.FETCH_LIMIT)
class OnOffSwitch(db.Model): status = db.BooleanProperty()
class Authorization(db.Model): """Authorization keys. Key name: repo + ':' + auth_key.""" DEFAULT_SETTINGS = dict(contact_name='', contact_email='', organization_name='', domain_write_permission='', read_permission=False, full_read_permission=False, search_permission=True, subscribe_permission=False, mark_notes_reviewed=False, is_valid=True, key='') # Even though the repo is part of the key_name, it is also stored # redundantly as a separate property so it can be indexed and queried upon. repo = db.StringProperty(required=True) # If this field is non-empty, this authorization token allows the client # to write records with this original domain. domain_write_permission = db.StringProperty() # If this flag is true, this authorization token allows the client to read # non-sensitive fields (i.e. filtered by utils.filter_sensitive_fields). read_permission = db.BooleanProperty() # If this flag is true, this authorization token allows the client to read # all fields (i.e. not filtered by utils.filter_sensitive_fields). full_read_permission = db.BooleanProperty() # If this flag is true, this authorization token allows the client to use # the search API and return non-sensitive fields (i.e. filtered # by utils.filter_sensitive_fields). search_permission = db.BooleanProperty() # If this flag is true, this authorization token allows the client to use # the API to subscribe any e-mail address to updates on any person. subscribe_permission = db.BooleanProperty() # If this flag is true, notes written with this authorization token are # marked as "reviewed" and won't show up in admin's review list. mark_notes_reviewed = db.BooleanProperty() # If this flag is true, notes written with this authorization token are # allowed to have status == 'believed_dead'. believed_dead_permission = db.BooleanProperty() # If this flag is true, this key can be used to get overall statistics. stats_permission = db.BooleanProperty() # If this flag is False, the API access with this key won't be # allowed. is_valid = db.BooleanProperty(default=True) # Bookkeeping information for humans, not used programmatically. contact_name = db.StringProperty() contact_email = db.StringProperty() organization_name = db.StringProperty() @property def api_key(self): """Gets a key value excluding the repo part. """ if self.has_key(): return self.key().name().split(':')[1] return None @classmethod def get(cls, repo, key): """Gets the Authorization entity for a given repository and key.""" return cls.get_by_key_name(repo + ':' + key) @classmethod def create(cls, repo, key, **kwargs): """Creates an Authorization entity for a given repository and key.""" return cls(key_name=repo + ':' + key, repo=repo, **kwargs)
class StaticList(FolderishMixin, ContentishMixin): """ """ zope.interface.implements(interfaces.IStaticListView) _template = "query" body = db.TextProperty(default=u'') hidden = db.BooleanProperty(default=False) reparent = db.BooleanProperty(default=False) list_items = db.TextProperty(default=u'') custom_view = db.StringProperty(default=u'') def template(self): if self.custom_view: return self.custom_view return super(StaticList, self).template() def reparent_absolute_url(self, obj, request): url = "%s%s/" % (self.absolute_url(request), str(obj.key())) return url def resultwrapper(self, item): """Place holder method.""" return ResultAdapter(self, dict()) def content_summary(self, REQUEST=None, limit=None): results = [] root = self.getRoot() request = None if REQUEST: request = REQUEST else: request = self._request() cache_key = str(self.absolute_url().rstrip('/')) + ":summary" cached_result = root.getcached(cache_key) if cached_result and not getattr(request.principal, 'ADMIN', False): return cached_result values = self.contentValues(request) if limit: values = values[0:min(limit, len(values))] for i in self.contentValues(request): if self.reparent: i.__parent__ = self i.__name__ = str(i.key()) url = self.reparent_absolute_url(i, request) else: url = i.absolute_url(request) title = i.title_or_id() description = i.description or '' summary = { 'name': i.name, 'url': url, 'title': title, 'key': str(i.key()), 'description': description, 'kind': i.kind(), 'hidden': getattr(i, 'hidden', False), 'modified': i.modified, 'isfolder': interfaces.IFolderish.providedBy(i) } summary['heading_tab'] = getattr(i, 'heading_tab', False) results.append(summary) if hasattr(i, 'image_thumbnail'): summary['thumbnail'] = i.thumb_tag() if hasattr(i, 'image'): summary['thumbnail'] = url + 'mini' if not getattr(request.principal, 'ADMIN', False): root.setcached(cache_key, results) return results def contentValues(self, REQUEST=None, kind=None): results = [] for i in self.list_items.split('\n'): try: method, key = i.strip().split(':', 1) except ValueError: continue item = None try: if method == 'key': item = self[key] elif method == 'path': item = self.traverse(key) except KeyError: continue if item: if isinstance(item, NonContentishMixin) or self.reparent: try: item.__parent__self item.__name__ = str(item.key()) except AttributeError: pass results.append(item) return results def contentNames(self): return [i.key for i in self.contentValues()] def __repr__(self): return '<StaticList path="%s">' % self.getPath() def __getitem__(self, name): root = self.getRoot() try: key = db.Key(name) except db.BadKeyError: raise KeyError cache_key = str(key) obj = root.getcached(cache_key) if not obj: obj = db.get(name) root.setcached(cache_key, obj) if obj: if isinstance(obj, NonContentishMixin) or self.reparent: try: obj.__parent__ = self obj.__name__ = str(obj.key()) except AttributeError: pass return obj else: raise KeyError('Object not found')
class Feature(DictModel): """Container for a feature.""" DEFAULT_MEMCACHE_KEY = '%s|features' % (settings.MEMCACHE_KEY_PREFIX) MAX_CHUNK_SIZE = 500 # max num features to save for each memcache chunk. @classmethod def get_feature_chunk_memcache_keys(self, key_prefix): num_features = len(Feature.all().fetch(limit=None, keys_only=True)) l = list_to_chunks(range(0, num_features), self.MAX_CHUNK_SIZE) return ['%s|chunk%s' % (key_prefix, i) for i, val in enumerate(l)] @classmethod def set_feature_chunk_memcache_keys(self, key_prefix, feature_list): chunks = list_to_chunks(feature_list, self.MAX_CHUNK_SIZE) vals = [] for i, chunk in enumerate(chunks): vals.append(('%s|chunk%s' % (key_prefix, i), chunk)) # d = OrderedDict(sorted(dict(vals).items(), key=lambda t: t[0])) d = dict(vals) return d @classmethod def _first_of_milestone(self, feature_list, milestone, start=0): for i in xrange(start, len(feature_list)): f = feature_list[i] if (str(f['shipped_milestone']) == str(milestone) or f['impl_status_chrome'] == str(milestone)): return i elif (f['shipped_milestone'] == None and str(f['shipped_android_milestone']) == str(milestone)): return i return -1 @classmethod def _first_of_milestone_v2(self, feature_list, milestone, start=0): for i in xrange(start, len(feature_list)): f = feature_list[i] desktop_milestone = f['browsers']['chrome'].get('desktop', None) android_milestone = f['browsers']['chrome'].get('android', None) status = f['browsers']['chrome']['status'].get('text', None) if (str(desktop_milestone) == str(milestone) or status == str(milestone)): return i elif (desktop_milestone == None and str(android_milestone) == str(milestone)): return i return -1 @classmethod def _annotate_first_of_milestones(self, feature_list, version=None): try: omaha_data = util.get_omaha_data() win_versions = omaha_data[0]['versions'] # Find the latest canary major version from the list of windows versions. canary_versions = [ x for x in win_versions if x.get('channel') and x.get('channel').startswith('canary') ] LATEST_VERSION = int( canary_versions[0].get('version').split('.')[0]) milestones = range(1, LATEST_VERSION + 1) milestones.reverse() versions = [ IMPLEMENTATION_STATUS[NO_ACTIVE_DEV], IMPLEMENTATION_STATUS[PROPOSED], IMPLEMENTATION_STATUS[IN_DEVELOPMENT], ] versions.extend(milestones) versions.append(IMPLEMENTATION_STATUS[NO_LONGER_PURSUING]) first_of_milestone_func = Feature._first_of_milestone if version == 2: first_of_milestone_func = Feature._first_of_milestone_v2 last_good_idx = 0 for i, ver in enumerate(versions): idx = first_of_milestone_func(feature_list, ver, start=last_good_idx) if idx != -1: feature_list[idx]['first_of_milestone'] = True last_good_idx = idx except Exception as e: logging.error(e) def format_for_template(self, version=None): d = self.to_dict() if version == 2: if self.is_saved(): d['id'] = self.key().id() else: d['id'] = None d['category'] = FEATURE_CATEGORIES[self.category] d['created'] = { 'by': d.pop('created', None), 'when': d.pop('created_by', None), } d['updated'] = { 'by': d.pop('updated_by', None), 'when': d.pop('updated', None), } d['standards'] = { 'spec': d.pop('spec_link', None), 'status': { 'text': STANDARDIZATION[self.standardization], 'val': d.pop('standardization', None), }, 'visibility': { 'text': VISIBILITY_CHOICES[self.visibility], 'val': d.pop('visibility', None), }, 'footprint': { 'val': d.pop('footprint', None), #'text': FOOTPRINT_CHOICES[self.footprint] } } d['resources'] = { 'samples': d.pop('sample_links', []), 'docs': d.pop('doc_links', []), } d['tags'] = d.pop('search_tags', []) d['browsers'] = { 'chrome': { 'bug': d.pop('bug_url', None), 'blink_components': d.pop('blink_components', []), 'owners': d.pop('owner', []), 'origintrial': self.impl_status_chrome == ORIGIN_TRIAL, 'intervention': self.impl_status_chrome == INTERVENTION, 'prefixed': d.pop('prefixed', False), 'flag': self.impl_status_chrome == BEHIND_A_FLAG, 'status': { 'text': IMPLEMENTATION_STATUS[self.impl_status_chrome], 'val': d.pop('impl_status_chrome', None) }, 'desktop': d.pop('shipped_milestone', None), 'android': d.pop('shipped_android_milestone', None), 'webview': d.pop('shipped_webview_milestone', None), 'ios': d.pop('shipped_ios_milestone', None), }, 'opera': { 'desktop': d.pop('shipped_opera_milestone', None), 'android': d.pop('shipped_opera_android_milestone', None), }, 'ff': { 'view': { 'text': VENDOR_VIEWS[self.ff_views], 'val': d.pop('ff_views', None), 'url': d.pop('ff_views_link', None), } }, 'edge': { 'view': { 'text': VENDOR_VIEWS[self.ie_views], 'val': d.pop('ie_views', None), 'url': d.pop('ie_views_link', None), } }, 'safari': { 'view': { 'text': VENDOR_VIEWS[self.safari_views], 'val': d.pop('safari_views', None), 'url': d.pop('safari_views_link', None), } }, 'webdev': { 'view': { 'text': WEB_DEV_VIEWS[self.web_dev_views], 'val': d.pop('web_dev_views', None), } } } if self.shipped_milestone: d['browsers']['chrome']['status'][ 'milestone_str'] = self.shipped_milestone elif self.shipped_milestone is None and self.shipped_android_milestone: d['browsers']['chrome']['status'][ 'milestone_str'] = self.shipped_android_milestone else: d['browsers']['chrome']['status']['milestone_str'] = d[ 'browsers']['chrome']['status']['text'] del d['created'] del_none(d) # Further prune response by removing null/[] values. else: if self.is_saved(): d['id'] = self.key().id() else: d['id'] = None d['category'] = FEATURE_CATEGORIES[self.category] d['visibility'] = VISIBILITY_CHOICES[self.visibility] d['impl_status_chrome'] = IMPLEMENTATION_STATUS[ self.impl_status_chrome] d['meta'] = { 'origintrial': self.impl_status_chrome == ORIGIN_TRIAL, 'intervention': self.impl_status_chrome == INTERVENTION, 'needsflag': self.impl_status_chrome == BEHIND_A_FLAG, } if self.shipped_milestone: d['meta']['milestone_str'] = self.shipped_milestone elif self.shipped_milestone is None and self.shipped_android_milestone: d['meta']['milestone_str'] = self.shipped_android_milestone else: d['meta']['milestone_str'] = d['impl_status_chrome'] d['ff_views'] = { 'value': self.ff_views, 'text': VENDOR_VIEWS[self.ff_views] } d['ie_views'] = { 'value': self.ie_views, 'text': VENDOR_VIEWS[self.ie_views] } d['safari_views'] = { 'value': self.safari_views, 'text': VENDOR_VIEWS[self.safari_views] } d['standardization'] = { 'value': self.standardization, 'text': STANDARDIZATION[self.standardization] } d['web_dev_views'] = { 'value': self.web_dev_views, 'text': WEB_DEV_VIEWS[self.web_dev_views] } return d def format_for_edit(self): d = self.to_dict() #d['id'] = self.key().id d['owner'] = ', '.join(self.owner) d['doc_links'] = '\r\n'.join(self.doc_links) d['sample_links'] = '\r\n'.join(self.sample_links) d['search_tags'] = ', '.join(self.search_tags) d['blink_components'] = self.blink_components[ 0] #TODO: support more than one component. return d @classmethod def get_all(self, limit=None, order='-updated', filterby=None, update_cache=False): KEY = '%s|%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, order, limit) # TODO(ericbidelman): Support more than one filter. if filterby is not None: s = ('%s%s' % (filterby[0], filterby[1])).replace(' ', '') KEY += '|%s' % s feature_list = memcache.get(KEY) if feature_list is None or update_cache: query = Feature.all().order(order) #.order('name') # TODO(ericbidelman): Support more than one filter. if filterby: query.filter(filterby[0], filterby[1]) features = query.fetch(limit) feature_list = [f.format_for_template() for f in features] memcache.set(KEY, feature_list) return feature_list @classmethod def get_all_with_statuses(self, statuses, update_cache=False): if not statuses: return [] KEY = '%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, sorted(statuses)) feature_list = memcache.get(KEY) if feature_list is None or update_cache: # There's no way to do an OR in a single datastore query, and there's a # very good chance that the self.get_all() results will already be in # memcache, so use an array comprehension to grab the features we # want from the array of everything. feature_list = [ feature for feature in self.get_all(update_cache=update_cache) if feature['impl_status_chrome'] in statuses ] memcache.set(KEY, feature_list) return feature_list @classmethod def get_feature(self, feature_id, update_cache=False): KEY = '%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, feature_id) feature = memcache.get(KEY) if feature is None or update_cache: unformatted_feature = Feature.get_by_id(feature_id) if unformatted_feature: feature = unformatted_feature.format_for_template() feature[ 'updated_display'] = unformatted_feature.updated.strftime( "%Y-%m-%d") feature['new_crbug_url'] = unformatted_feature.new_crbug_url() memcache.set(KEY, feature) return feature @classmethod def get_chronological(self, limit=None, update_cache=False, version=None): KEY = '%s|%s|%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, 'cronorder', limit, version) keys = Feature.get_feature_chunk_memcache_keys(KEY) feature_list = memcache.get_multi(keys) # If we didn't get the expected number of chunks back (or a cache update # was requested), do a db query. if len(feature_list.keys()) != len(keys) or update_cache: # Features with no active, in dev, proposed features. q = Feature.all() q.order('impl_status_chrome') q.order('name') q.filter('impl_status_chrome <=', IN_DEVELOPMENT) pre_release = q.fetch(None) # Shipping features. Exclude features that do not have a desktop # shipping milestone. q = Feature.all() q.order('-shipped_milestone') q.order('name') q.filter('shipped_milestone !=', None) shipping_features = q.fetch(None) # Features with an android shipping milestone but no desktop milestone. q = Feature.all() q.order('-shipped_android_milestone') q.order('name') q.filter('shipped_milestone =', None) android_only_shipping_features = q.fetch(None) # No longer pursuing features. q = Feature.all() q.order('impl_status_chrome') q.order('name') q.filter('impl_status_chrome =', NO_LONGER_PURSUING) no_longer_pursuing_features = q.fetch(None) shipping_features.extend(android_only_shipping_features) shipping_features = [ f for f in shipping_features if (IN_DEVELOPMENT < f.impl_status_chrome < NO_LONGER_PURSUING) ] def getSortingMilestone(feature): feature._sort_by_milestone = ( feature.shipped_milestone or feature.shipped_android_milestone) return feature # Sort the feature list on either Android shipping milestone or desktop # shipping milestone, depending on which is specified. If a desktop # milestone is defined, that will take default. shipping_features = map(getSortingMilestone, shipping_features) # First sort by name, then sort by feature milestone (latest first). shipping_features.sort(key=lambda f: f.name, reverse=False) shipping_features.sort(key=lambda f: f._sort_by_milestone, reverse=True) # Constructor the proper ordering. pre_release.extend(shipping_features) pre_release.extend(no_longer_pursuing_features) feature_list = [ f.format_for_template(version) for f in pre_release ] self._annotate_first_of_milestones(feature_list, version=version) # Memcache doesn't support saving values > 1MB. Break up features list into # chunks so we don't hit the limit. memcache.set_multi( Feature.set_feature_chunk_memcache_keys(KEY, feature_list)) else: temp_feature_list = [] # Reconstruct feature list by ordering chunks. for key in sorted(feature_list.keys()): temp_feature_list.extend(feature_list[key]) feature_list = temp_feature_list return feature_list @classmethod def get_shipping_samples(self, limit=None, update_cache=False): KEY = '%s|%s|%s' % (Feature.DEFAULT_MEMCACHE_KEY, 'samples', limit) feature_list = memcache.get(KEY) if feature_list is None or update_cache: # Get all shipping features. Ordered by shipping milestone (latest first). q = Feature.all() q.filter('impl_status_chrome IN', [ENABLED_BY_DEFAULT, ORIGIN_TRIAL, INTERVENTION]) q.order('-impl_status_chrome') q.order('-shipped_milestone') q.order('name') features = q.fetch(None) # Get non-shipping features (sans removed or deprecated ones) and # append to bottom of list. q = Feature.all() q.filter('impl_status_chrome <', ENABLED_BY_DEFAULT) q.order('-impl_status_chrome') q.order('-shipped_milestone') q.order('name') others = q.fetch(None) features.extend(others) # Filter out features without sample links. feature_list = [ f.format_for_template() for f in features if len(f.sample_links) ] memcache.set(KEY, feature_list) return feature_list def crbug_number(self): if not self.bug_url: return m = re.search(r'[\/|?id=]([0-9]+)$', self.bug_url) if m: return m.group(1) def new_crbug_url(self): url = 'https://bugs.chromium.org/p/chromium/issues/entry' params = [ 'components=' + self.blink_components[0] or BlinkComponent.DEFAULT_COMPONENT ] crbug_number = self.crbug_number() if crbug_number and self.impl_status_chrome in ( NO_ACTIVE_DEV, PROPOSED, IN_DEVELOPMENT, BEHIND_A_FLAG, ORIGIN_TRIAL, INTERVENTION): params.append('blocking=' + crbug_number) if self.owner: params.append('cc=' + ','.join(self.owner)) return url + '?' + '&'.join(params) def __init__(self, *args, **kwargs): super(Feature, self).__init__(*args, **kwargs) # Stash existing values when entity is created so we can diff property # values later in put() to know what's changed. https://stackoverflow.com/a/41344898 for prop_name, prop in self.properties().iteritems(): old_val = getattr(self, prop_name, None) setattr(self, '_old_' + prop_name, old_val) def __notify_feature_subscribers_of_changes(self, is_update): """Async notifies subscribers of new features and property changes to features by posting to a task queue.""" # Diff values to see what properties have changed. changed_props = [] for prop_name, prop in self.properties().iteritems(): new_val = getattr(self, prop_name, None) old_val = getattr(self, '_old_' + prop_name, None) if new_val != old_val: changed_props.append({ 'prop_name': prop_name, 'old_val': old_val, 'new_val': new_val }) payload = json.dumps({ 'changes': changed_props, 'is_update': is_update, 'feature': self.format_for_template(version=2) }) # Create task to email subscribers. queue = taskqueue.Queue() #name='emailer') task = taskqueue.Task(method="POST", url='/tasks/email-subscribers', target='notifier', payload=payload) queue.add(task) # Create task to send push notifications queue = taskqueue.Queue() task = taskqueue.Task(method="POST", url='/tasks/send_notifications', target='notifier', payload=payload) queue.add(task) def put(self, **kwargs): is_update = self.is_saved() key = super(Feature, self).put(**kwargs) self.__notify_feature_subscribers_of_changes(is_update) return key # Metadata. created = db.DateTimeProperty(auto_now_add=True) updated = db.DateTimeProperty(auto_now=True) updated_by = db.UserProperty(auto_current_user=True) created_by = db.UserProperty(auto_current_user_add=True) # General info. category = db.IntegerProperty(required=True) name = db.StringProperty(required=True) summary = db.StringProperty(required=True, multiline=True) # Chromium details. bug_url = db.LinkProperty() blink_components = db.StringListProperty( required=True, default=[BlinkComponent.DEFAULT_COMPONENT]) impl_status_chrome = db.IntegerProperty(required=True) shipped_milestone = db.IntegerProperty() shipped_android_milestone = db.IntegerProperty() shipped_ios_milestone = db.IntegerProperty() shipped_webview_milestone = db.IntegerProperty() shipped_opera_milestone = db.IntegerProperty() shipped_opera_android_milestone = db.IntegerProperty() owner = db.ListProperty(db.Email) footprint = db.IntegerProperty() visibility = db.IntegerProperty(required=True) #webbiness = db.IntegerProperty() # TODO: figure out what this is # Standards details. standardization = db.IntegerProperty(required=True) spec_link = db.LinkProperty() prefixed = db.BooleanProperty() ff_views = db.IntegerProperty(required=True, default=NO_PUBLIC_SIGNALS) ie_views = db.IntegerProperty(required=True, default=NO_PUBLIC_SIGNALS) safari_views = db.IntegerProperty(required=True, default=NO_PUBLIC_SIGNALS) ff_views_link = db.LinkProperty() ie_views_link = db.LinkProperty() safari_views_link = db.LinkProperty() # Web dev details. web_dev_views = db.IntegerProperty(required=True) doc_links = db.StringListProperty() sample_links = db.StringListProperty() #tests = db.StringProperty() search_tags = db.StringListProperty() comments = db.StringProperty(multiline=True)
class Entry(db.Model): #id = 1 title = db.StringProperty(required=True, default="(Untitled)", indexed=False) published = db.DateTimeProperty() updated = db.DateTimeProperty(auto_now=True) tags = db.StringListProperty() content = db.TextProperty() public = db.BooleanProperty( ) # None = Draft, True = Public, False = Private attachments = db.IntegerProperty(default=0, indexed=True) @classmethod def get_by_query(cls, query={}): if 'id' in query: result = [ cls.get_by_id(query['id']), ] else: q = cls.all() if 'order' in query: q.order(query['order']) else: q.order("-published") if 'public' in query: q.filter("public = ", query['public']) if 'tags' in query: q.filter("tags IN ", query['tags']) if 'start_time' in query: q.filter("published >= ", lp.time.changetz(query['start_time'], '+0000')) if 'end_time' in query: q.filter("published < ", lp.time.changetz(query['end_time'], '+0000')) if 'limit' in query: limit = query['limit'] else: limit = 25 if 'page' in query: offset = limit * (query['page'] - 1) else: offset = 0 result = q.fetch(limit, offset) total = q.count() for i in range(len(result)): result[i].id = result[i].key().id() if 'time_offset' in query: result[i].published = lp.time.changetz(result[i].published, query['time_offset']) if 'id' in query: return result[0] else: return (result, total) @classmethod def save(cls, entries): if not isinstance(entries, (list, tuple)): entries = [ entries, ] for i in range(len(entries)): entries[i].published = lp.time.changetz(entries[i].published, '+0000') for tag in entries[i].tags: if ' ' in tag: raise lp.error.ValidationError( "Tag cannot contain spaces.") # This is just a safety validation. The Handler should be responsible for the validity of tag string. return db.put(entries) @classmethod def delete_by_id(cls, ids): keys = [db.Key.from_path(cls.kind(), id, parent=None) for id in ids] db.delete(keys) @classmethod def delete(cls, models): db.delete(models)
class ICalendarSource(db.Model): site=db.ReferenceProperty(Eventsite, required=True) name = db.StringProperty(required=True, indexed=False) source_link=db.LinkProperty(indexed=False, required=False) ical_href=db.LinkProperty(required=True, indexed=False) status= db.StringProperty(required=True) trusted=db.BooleanProperty() default_tags=db.StringListProperty() submitted_by=db.ReferenceProperty(Profile,required=False, collection_name='icals_submitted') submitted_at=db.DateTimeProperty(auto_now_add=True) approved_by=db.ReferenceProperty(Profile,indexed=False, collection_name="icals_approved") approved_on=db.DateTimeProperty(indexed=False) last_fetch=db.DateTimeProperty(required=False) content=db.TextProperty(required=False) is_rssfeed=db.BooleanProperty() @property def approval_form(self): return sources.forms.ICalApprovalForm(initial={'trusted': self.trusted, 'tags': ','.join(self.default_tags)}) @DerivedProperty def slug(self): return str(slugify(self.name)) def fetch(self,started=None, timestamp=None): format_start="%Y%m%d%H%M" if not started: started=str(datetime.now()) if not timestamp:timestamp= datetime.now().strftime("%Y%m%d%H%M") if self.ical_href.startswith('http://www.google.com/calendar/ical/'): gcal_id=unquote(self.ical_href[36:].split('/')[0]) query = gdata.calendar.service.CalendarEventQuery(gcal_id, 'public', 'full-noattendees') query.start_min= self.site.today.strftime("%Y-%m-%d") query.recurrence_expansion_end=(date.today()+relativedelta(months=3)).strftime("%Y-%m-%d") query.start_max=(date.today()+relativedelta(months=3)).strftime("%Y-%m-%d") query.singleevents='true' result=urlfetch.fetch(query.ToUri(), allow_truncated=False, deadline=10) if result.status_code == 200: detection=chardet.detect(result.content) self.last_fetch=datetime.now() self.put() cache_key="%s-%s-%s" %(self.site.slug, self.slug,timestamp) memcache.add(cache_key, result.content.decode(detection['encoding']),600) logging.warning("cached gdata with key %s"% cache_key) taskqueue.add(url='/sources/split_gdata/', params={'ical_key': self.key(), 'cache_key':cache_key, 'timestamp':timestamp}, name=cache_key ) logging.warning("enqueued splitting of %s" % self.ical_href) else: result=urlfetch.fetch(self.ical_href, allow_truncated=True, deadline=5) if result.status_code == 200: detection=chardet.detect(result.content) self.last_fetch=datetime.now() self.put() cache_key="%s-%s-%s" %(self.site.slug, self.slug,timestamp) memcache.add(cache_key, result.content.decode(detection['encoding']),600) logging.warning("cached ical with key %s"% cache_key) taskqueue.add(url='/sources/split_ical/', params={'ical_key': self.key(), 'cache_key':cache_key, 'timestamp':timestamp}, name=cache_key ) logging.warning("enqueued splitting of %s" % self.ical_href) return
class Resource(DictModel): """Container for all kinds of resource.""" title = db.StringProperty(required=True) description = db.StringProperty() author = db.ReferenceProperty(Author, collection_name='author_one_set') second_author = db.ReferenceProperty(Author, collection_name='author_two_set') url = db.StringProperty() social_url = db.StringProperty() browser_support = db.StringListProperty() update_date = db.DateProperty() publication_date = db.DateProperty() #generic tags and html5 feature group tags('offline', 'multimedia', etc.) tags = db.StringListProperty() draft = db.BooleanProperty(default=True) # Don't publish by default. @classmethod def get_all(self, order=None, limit=None, qfilter=None): limit = limit or common.MAX_FETCH_LIMIT key = '%s|tutorials' % (common.MEMCACHE_KEY_PREFIX, ) if order is not None: key += '|%s' % (order, ) if qfilter is not None: key += '|%s%s' % (qfilter[0], qfilter[1]) key += '|%s' % (str(limit), ) #import logging #logging.info(key) results = memcache.get(key) if results is None: query = self.all() query.order(order) if qfilter is not None: query.filter(qfilter[0], qfilter[1]) query.filter('draft =', False) # Never return drafts by default. results = query.fetch(limit=limit) memcache.set(key, results) return results @classmethod def get_tutorials_by_author(self, author_id): tutorials_by_author = memcache.get( '%s|tutorials_by|%s' % (common.MEMCACHE_KEY_PREFIX, author_id)) if tutorials_by_author is None: tutorials_by_author1 = Author.get_by_key_name( author_id).author_one_set tutorials_by_author2 = Author.get_by_key_name( author_id).author_two_set tutorials_by_author = [ x for x in tutorials_by_author1 if not x.draft ] temp2 = [x for x in tutorials_by_author2 if not x.draft] tutorials_by_author.extend(temp2) # Order by published date. Latest first. tutorials_by_author.sort(key=lambda x: x.publication_date, reverse=True) memcache.set( '%s|tutorials_by|%s' % (common.MEMCACHE_KEY_PREFIX, author_id), tutorials_by_author) return tutorials_by_author
class Tiddlywiki(db.Model): """ keyname: name parent: owner """ name = db.StringProperty(required=True) namespace = db.ReferenceProperty(Namespace, required=True) private_access = db.BooleanProperty(required=True) owner = db.ReferenceProperty(User, required=True) title = db.StringProperty() subtitle = db.StringProperty() tiddlers = db.ListProperty(db.Key) @classmethod def create_or_update(cls, name, owner, namespace, private_access, title='', subtitle='', tiddlers=''): # Only owner can update an instance t = Tiddlywiki(key_name= name, parent=owner, owner=owner.key(), name=name, namespace= namespace.key(), private_access=private_access, \ title=title, subtitle=subtitle, tiddlers=tiddlers) if t and not t.own_by(User.get_current_user()): if User.get_current_user(): username = User.get_current_user().username else: username = '******' raise OwnerException( "user '%s' try to update a tiddlywiki owns by '%s'." % (username, owner.username)) t.put() return t @classmethod def list_in_html(cls, user, type_url): query = db.Query(Tiddlywiki) query.ancestor(user) results = query.fetch(9999) r = "<ul>\n" for tiddlywiki in results: r += "<li><a href=\"%s/%s\">%s</a></li>\n" % ( type_url, util.url_encode(tiddlywiki.name), tiddlywiki.name) r += "</ul>\n" return r @classmethod def query_for_user(cls, owner, for_user): if (not owner or not for_user or (owner.system_user != for_user.system_user)): query = db.GqlQuery( "SELECT * FROM Tiddlywiki " + " WHERE owner = :1" + " AND private_access = :2 " + " ORDER BY name", owner, False) else: query = db.GqlQuery( "SELECT * FROM Tiddlywiki " + " WHERE owner = :1" + " ORDER BY name", owner) r = query.fetch(9999) return r def __replace_chunk(self, source, begin_marker, end_marker, sub): match = re.match("^(.*?%s).*?(%s.*?)$" % (begin_marker, end_marker), source, re.DOTALL) if match: begin = match.group(1) end = match.group(2) source = begin + sub + end return source def __update_markup_block(self, source, block_name, tiddler_name): # tiddler in this tiddlywiki tiddler = Namespace.get_tiddler(self.name + "::" + tiddler_name, self.owner.username, self.namespace.name) if not tiddler: # tiddler in Namespace tiddler = Namespace.get_tiddler(tiddler_name, self.owner.username, self.namespace.name) if not tiddler: # tiddler in Shadow tiddler = Namespace.get_tiddler(tiddler_name) if tiddler: source = self.__replace_chunk(source, "<!--%s-START-->" % block_name, "<!--%s-END-->" % block_name, "\n" + tiddler.text + "\n") return source def accessible_by(self, user): return not self.private_access or (user and self.own_by(user)) def addTiddler(self, tiddler): if tiddler: if not self.tiddlers: self.tiddlers = [] if self.tiddlers.count(tiddler.key()) == 0: self.tiddlers.append(tiddler.key()) self.tiddlers.sort() self.put() def removeTiddler(self, tiddler): if tiddler: if self.tiddlers: self.tiddlers.remove(tiddler.key()) self.put() def delete(self): """ """ # Only owner can delete an instance if not self.own_by(User.get_current_user()): raise OwnerException( "user '%s' try to update a namespace owns by '%s'." % (User.get_current_user().username, self.owner.username)) return super(Tiddlywiki, self).delete() def display_in_html(self, out, url): #get template path = os.path.join(os.path.dirname(__file__), 'empty.html') f = open(path) try: data = f.read() finally: f.close() #Edit title and subtitle in html and shadow titlers data = self.__replace_chunk(data, "<title>", "</title>", self.title + ' - ' + self.subtitle) data = re.sub(r'SiteTitle: "My TiddlyWiki"', 'SiteTitle: "' + self.title + '"', data) data = re.sub( r'SiteSubtitle: "a reusable non-linear personal web notebook"', 'SiteSubtitle: "' + self.subtitle + '"', data) data = re.sub(r'SiteUrl: "http://www\.tiddlywiki\.com/"', 'SiteUrl: "' + url + '"', data) #Update markupBlock data = self.__update_markup_block(data, "PRE-HEAD", "MarkupPreHead") data = self.__update_markup_block(data, "POST-HEAD", "MarkupPostHead") data = self.__update_markup_block(data, "PRE-BODY", "MarkupPreBody") data = self.__update_markup_block(data, "POST-SCRIPT", "MarkupPostBody") #find storearea and insert tiddlers match = re.match( "^(.*?<div id=\"storeArea\">).*?(</div>.*?<!--POST-STOREAREA-->.*?)$", data, re.DOTALL) if match: begin = match.group(1) end = match.group(2) out.write(begin) tiddlers = {} for t in self.tiddlers: if t: tiddler = Tiddler.get(t) if tiddler: tiddlers[tiddler.title] = tiddler # add dynamic tiddlywiki if 'UploadTiddlerPlugin' not in tiddlers: tiddlers['UploadTiddlerPlugin'] = Namespace.get_tiddler( 'UploadTiddlerPlugin') tiddler = None if self.namespace.own_by(User.get_current_user()): tiddler = Tiddler.create_or_update( self.namespace, 'zzTiddlyHomeTweaks', self.owner.username, tags='systemConfig excludeLists excludeSearch', newTitle='zzTiddlyHomeTweaks', text=config.tweaks_tiddler % { 'username': self.owner.username, 'filename': self.name, 'storeUrl': config.storeTiddler_url }) tiddlers['zzTiddlyHomeTweaks'] = tiddler keys = tiddlers.keys() keys.sort() for ti in keys: if ti: out.write(tiddlers[ti].displayInStorearea(self.name)) out.write(end) if tiddler: tiddler.delete() else: raise Error("Maformed empty.html file") def display_in_xml(self, out): #get template path = os.path.join(os.path.dirname(__file__), 'empty.xml') f = open(path) try: data = f.read() finally: f.close() path = os.path.join(os.path.dirname(__file__), 'item.xml') f = open(path) try: item = f.read() finally: f.close() #find storearea items = '' tiddlers = [] for t in self.tiddlers: t = Tiddler.get(t) if t and t.modified: tiddlers.append((t.modified, t)) else: tiddlers.append((datetime.today(), t)) tiddlers.sort() tiddlers.reverse() for (modified, t) in tiddlers: if t: if t.html: text = util.html_escape(t.html) else: text = util.html_escape(t.text) items += item % { 'title': t.title, 'text': text, 'tag': t.tags, 'link': config.TH_url + self.owner.username + '/' + config.namespace_name + '/' + self.namespace.name + '/' + config.tiddler_name + '/' + util.url_encode(t.title) + '.html', 'date': modified.strftime("%Y-%m-%d %H:%M"), } data = data % { 'title': self.title, 'link': 'http://bidix.appspot.com/', 'description': self.subtitle, 'username': self.owner.username, 'pubDate': datetime.utcnow().strftime("%Y-%m-%d %H:%M"), 'lastBuildDate': datetime.utcnow().strftime("%Y-%m-%d %H:%M"), 'items': items, } out.write(data) def store(self, file): file = file.decode('utf-8') if file: #find storearea storeareaMatch = re.search( "(<div id=\"storeArea\">.*?)<!--POST-STOREAREA-->", file, re.DOTALL) if storeareaMatch: storearea = storeareaMatch.group() if storearea: tiddlers = re.findall("(<div title=.*?</div>)", storearea, re.DOTALL) self.tiddlers = [] for t in tiddlers: ti = Tiddler.from_div(self.namespace, t, self.name) self.tiddlers.append(ti.key()) self.put() def own_by(self, user): return user and self.parent() and (self.parent().key() == user.key())
class EntityPropertyMetadata(BaseModel): type_info = db.ReferenceProperty(TypeInfo, required=False) max_length = db.IntegerProperty(default=0) is_primary_key = db.BooleanProperty(default=False) is_foreign_key = db.BooleanProperty(default=False) is_required = db.BooleanProperty(default=False) is_search_key = db.BooleanProperty(default=False) is_full_text = db.BooleanProperty(default=False) in_list_view = db.BooleanProperty(default=False) in_show_view = db.BooleanProperty(default=False) in_edit_view = db.BooleanProperty(default=False) display_width = db.IntegerProperty(default=0) display_height = db.IntegerProperty(default=0) display_group = db.StringProperty() display_index = db.IntegerProperty(default=0) enumeration = db.ReferenceProperty(Enumeration, required=False) min_value = db.IntegerProperty() max_value = db.IntegerProperty() default_value = db.StringProperty() flags = db.IntegerProperty() entity_metadata = db.ReferenceProperty(EntityMetadata, required=False) entity_relation = db.StringProperty() reg_exp = db.StringProperty() is_deleted = db.BooleanProperty(default=False) has_generated = db.BooleanProperty(default=False) human_name = db.StringProperty() camel_name = db.StringProperty() is_read_only = db.BooleanProperty(default=False) display_type = db.StringProperty() ref_type = db.StringProperty() metadata = db.ReferenceProperty(Metadata, required=False) org = db.StringProperty()
class DirEntry(db.Model): """ A directory entry from Dropbox or Fake. Fake entries have no parent_dir and are not files Of the real entries, only the unique root (with key '\') has no parent_dir """ parent_dir = db.SelfReferenceProperty(collection_name='dir_members') modified = db.DateTimeProperty(required=True, default= BEGINNING_OF_TIME) revision = db.IntegerProperty(required=True, default=0) is_dir = db.BooleanProperty(required=True, default=False) bytes = db.IntegerProperty(required=True, default=0) hash_ = db.TextProperty() @classmethod def get_root_entry(cls): return cls.get_or_insert(key_name='/', is_dir=True) def accept_visitor(self, visitor): """ Visitor must support visitor.visit_dir(entry, member_responses) visitor.visit_file(entry) """ if self.is_dir: responses= [e.accept_visitor(visitor) for e in self.dir_members ] return visitor.visit_dir(self, responses) return visitor.visit_file(self) def __str__(self): return '%s %s rev. %d@%s'%( ((self.is_dir and 'D') or 'F'), self.get_path(), self.revision, self.modified.isoformat()) def delete_below(self): """ Delete all nodes below, including descendants. Does not delete self. TODO: reimplement to use fast delete """ for m in self.dir_members: m.delete_below() m.delete() for d in db.query_descendants(self).run(): d.delete() def get_path(self): return self.key().name() def download_content(self, gov): """ Download and return the content of the corresponding file """ if self.is_dir: return site=gov.site filename=site.dropbox_base_dir+self.get_path() conn=gov.db_client.get_file(site.dropbox_config['root'], filename) if conn.status >= 400: conn.close() msg = 'While reading %s. Reason: %s'%(filename, conn.reason) gov.access_error_notify(msg) raise DropboxError(conn.status, msg) content = conn.read() logging.debug('Downloaded revision %s. Status: %s'%(filename, conn.status)) conn.close() return content def is_root(self): """ We only allow file entries to be fake """ if self.parent_dir is None and self.is_dir: assert self.key().name()=='/', 'DirEntry %s is an orphan directory!'%self return True def is_fake(self): if self.parent_dir is None: if self.is_dir: assert self.key().name()=='/', 'DirEntry %s is an orphan directory!'%self else: return True field_converters=[ (lambda a: parse_dropbox_datetime(a), 'modified', 'modified'), (lambda a: a, 'revision', 'revision'), (lambda a: a, 'is_dir', 'is_dir'), (lambda a: a, 'bytes', 'bytes'), (lambda a: a, 'hash', 'hash_'), ] @classmethod def make_attr_dict(cls, metadata_dict): return dict( (attr_name, f(metadata_dict.get(meta_name))) for (f, meta_name, attr_name) in cls.field_converters if meta_name in metadata_dict) def set_from_dict(self, attr_dict): """ Update an entry according to dict. Return list of changed attributes. Note that 'hash' is 'mapped to hash_' to avoid clash with builtin """ modlist = [] for f, dict_name, attr_name in self.field_converters: new_value = f(attr_dict.get(dict_name,None)) if new_value != getattr(self, attr_name): setattr(self, attr_name, new_value) modlist.append(dict_name) return modlist def _sync(self, response, normalize_path, update, remove, visit): """ A helper for handlers.dropbox.perform_sync Adds entry objects to update/remove iff datastore put/delete is required Adds all directories below to 'visit' Will append to update, remove and visit. """ data = response.data logging.debug('DBSync: Dropbox response data:%s'%data) ## Case -A: unknown if response.status == 404: logging.debug('DBSync: Dropbox returned 404') remove.append(self) return ## Case A: single file if (response.status == 200 and not data['is_dir']) or (response.status == 304 and not self.is_dir): logging.debug('DirEntry: Handling a single file %s'%(self)) ## Corner case -- changed status from dir if self.is_dir: logging.info('DirEntry %s changed from dir to file'%self) remove.append(self) ml=self.set_from_dict(data) if ml: update.append(self) return ## Case B: Unmodified directory if response.status==304: logging.debug('Matching hash for %s'%self) visit.extend([de for de in self.dir_members if de.is_dir]) return ## Case C: Directory without matching hash ## Corner case: changed status from file to dir: if not self.is_dir: logging.debug('Entry %s changed from file to dir'%self) remove.append(self) self_modlist=self.set_from_dict(data) assert self.is_dir, 'Somehow %s is not a dir...?'%self logging.debug('Parsing full listing for %s'%self) ## Build dictionary of current dropbox and datastore entries db_contents = dict([(normalize_path(e['path']), e) for e in data['contents'] ]) ds_contents = dict([(e.get_path(), e) for e in self.dir_members]) db_keys = set(db_contents.keys()) ds_keys = set(ds_contents.keys()) ## Handle the three Venn-sectors one at a time: db_not_ds = db_keys.difference(ds_keys) db_and_ds = db_keys.intersection(ds_keys) ds_not_db = ds_keys.difference(db_keys) # ds_not_db: Removed from dropbox remove.extend([ds_contents[k] for k in ds_not_db]) # db_not_ds: Created in dropbox for k, entry in itertools.izip( db_not_ds, DirEntry.get_by_key_name(db_not_ds)): # First, check that there is no orphan around: if not entry: entry = DirEntry.get_or_insert( key_name=k, parent_dir = self, **self.make_attr_dict(db_contents[k])) logging.debug('Creating entry: %s'%entry) else: logging.debug('Found orphan entry: %s'%entry) modlist = entry.set_from_dict(db_contents[k]) entry.parent_dir=self assert entry.hash_ is None if entry.is_dir: #Visitor will check is_saved and save visit.append(entry) else: update.append(entry) # db_and_ds: Existing entries for k in db_and_ds: entry = ds_contents[k] if db_contents[k]['is_dir']: if not entry.is_dir: logging.debug('Entry %s changed file->dir, should be handled when visiting'%entry) # Visit all dirs. # Handle file -> dir corner case when visiting visit.append(entry) else: # only update files now: modlist = entry.set_from_dict(db_contents[k]) if modlist: update.append(entry) if entry.is_dir: # Status change dir -> file remove.append(entry) ## Maybe update self (new hash/initial call) if self_modlist or not self.is_saved(): update.append(self) @classmethod def flush_resources(cls): """Flush all resources from database""" root=cls.get_root_entry() root.delete_below() root.delete() @classmethod def verify_all_resources(cls, gov): """ This function will call the 'handle_metadata_changes' for all resources in the exact same order done if everything was being resynces from Dropbox after a purge. To completely fix the database, a call should be followed by a call to Resource.find_orphans """ # Find the root and any fake resources: roots = [] nmax = 0 while len(roots)==nmax: nmax+=100 roots = cls.all().filter('parent_dir = ', None).order('__key__').fetch(nmax) if len(roots)==0: logging.debug('verify_all_resources called on empty database') return root = roots.pop(0) assert root.is_root(), "Weirdness - no root!" # First the real dropbox files: visit = [root] while visit: visiting = visit.pop() logging.debug('VerifyAll: Processing all members of %s'%visiting) members = visiting.dir_members visit.extend([d for d in members if d.is_dir]) gov.handle_metadata_changes(updated=[f for f in members if not f.is_dir]+[visiting]) #only fakes are left in roots: if roots: assert all(f.is_fake() for f in roots), 'Not all remaining roots are fake!' logging.debug('VerifyAll: Processing fake files %s'%', '.join(str(f) for f in roots)) gov.handle_metadata_changes(updated = roots)