class UserCounters(ndb.Model): name = ndb.StringProperty(indexed=False) lastLogin = ndb.DateTimeProperty(auto_now_add=True) videoWatchHistory = ndb.StringProperty() loginCounter = ndb.IntegerProperty() videoWatchCounter = ndb.IntegerProperty()
class Purchase(ndb.Model): price = ndb.IntegerProperty()
class Slip(ndb.Model): number = ndb.IntegerProperty(required=True) current_boat = ndb.StringProperty() arrival_date = ndb.StringProperty() departure_history = ndb.StringProperty(repeated=True)
class DataFragment(ndb.Model): start = ndb.IntegerProperty() end = ndb.IntegerProperty()
class BusyUser(ndb.Model): worker_count = ndb.IntegerProperty() shout = ndb.BooleanProperty()
class TaskToRun(ndb.Model): """Defines a TaskRequest ready to be scheduled on a bot. This specific request for a specific task can be executed multiple times, each execution will create a new child task_result.TaskResult of task_result.TaskResultSummary. This entity must be kept small and contain the minimum data to enable the queries for two reasons: - it is updated inside a transaction for each scheduling event, e.g. when a bot gets assigned this task item to work on. - all the ones currently active are fetched at once in a cron job. The key id is: - lower 4 bits is the try number. The only supported values are 1 and 2. - next 5 bits are TaskResultSummary.current_task_slice (shifted by 4 bits). - rest is 0. """ # This entity is used in transactions. It is not worth using either cache. # https://cloud.google.com/appengine/docs/standard/python/ndb/cache _use_cache = False _use_memcache = False # Used to know when retries are enqueued. The very first TaskToRun # (try_number=1) has the same value as TaskRequest.created_ts, but following # ones have created_ts set at the time the new entity is created: when the # task is reenqueued. created_ts = ndb.DateTimeProperty(indexed=False) # Moment by which this TaskSlice has to be requested by a bot. # expiration_ts is based on TaskSlice.expiration_ts. This is used to figure # out TaskSlice fallback and enable a cron job query to clean up stale tasks. expiration_ts = ndb.DateTimeProperty() # Everything above is immutable, everything below is mutable. # priority and request creation timestamp are mixed together to allow queries # to order the results by this field to allow sorting by priority first, and # then timestamp. See _gen_queue_number() for details. This value is only set # when the task is available to run, i.e. # ndb.TaskResult.query(ancestor=self.key).get().state==AVAILABLE. # If this task it not ready to be scheduled, it must be None. queue_number = ndb.IntegerProperty() @property def task_slice_index(self): """Returns the TaskRequest.task_slice() index this entity represents as pending. """ return task_to_run_key_slice_index(self.key) @property def try_number(self): """Returns the try number, 1 or 2.""" return task_to_run_key_try_number(self.key) @property def is_reapable(self): """Returns True if the task is ready to be scheduled.""" return bool(self.queue_number) @property def request_key(self): """Returns the TaskRequest ndb.Key that is parent to the task to run.""" return task_to_run_key_to_request_key(self.key) @property def run_result_key(self): """Returns the TaskRunResult ndb.Key that will be created for this TaskToRun once reaped. """ summary_key = task_pack.request_key_to_result_summary_key( self.request_key) return task_pack.result_summary_key_to_run_result_key( summary_key, self.try_number) @property def task_id(self): """Returns an encoded task id for this TaskToRun. Note: this includes the try_number but not the task_slice_index. """ return task_pack.pack_run_result_key(self.run_result_key) def to_dict(self): """Purely used for unit testing.""" out = super(TaskToRun, self).to_dict() # Consistent formatting makes it easier to reason about. if out['queue_number']: out['queue_number'] = '0x%016x' % out['queue_number'] out['try_number'] = self.try_number out['task_slice_index'] = self.task_slice_index return out def _pre_put_hook(self): super(TaskToRun, self)._pre_put_hook() if self.expiration_ts is None and self.queue_number: raise datastore_errors.BadValueError( ('%s.queue_number must be None when expiration_ts is None' % self.__class__.__name__))
class Entry(ndb.Model): guid = ndb.StringProperty(required=True) creating = ndb.BooleanProperty(default=False) title = ndb.StringProperty(indexed=False) summary = ndb.TextProperty(indexed=False) link = ndb.StringProperty() short_url = ndb.StringProperty() added = ndb.DateTimeProperty(auto_now_add=True) published = ndb.BooleanProperty(default=False) published_post = ndb.BooleanProperty(default=False) published_channel = ndb.BooleanProperty(default=False) overflow = ndb.BooleanProperty(default=False) overflow_reason = ndb.IntegerProperty(default=0) published_at = ndb.DateTimeProperty() status = ndb.IntegerProperty(default=ENTRY_STATE.ACTIVE) language = ndb.StringProperty() extra_info = ndb.JsonProperty(indexed=False) image_url = ndb.StringProperty() image_width = ndb.IntegerProperty() image_height = ndb.IntegerProperty() thumbnail_image_url = ndb.StringProperty() thumbnail_image_width = ndb.IntegerProperty() thumbnail_image_height = ndb.IntegerProperty() video_oembed = ndb.PickleProperty(indexed=False) tags = ndb.StringProperty(repeated=True) author = ndb.StringProperty(indexed=False) feed_item = ndb.PickleProperty(indexed=False) meta_tags = ndb.JsonProperty(indexed=False) images_in_html = ndb.JsonProperty(repeated=True, indexed=False) def to_json(self, feed=None, format=False): include = ['title', 'link', 'published', 'published_at', 'added'] data = {} for attr in include: data[attr] = getattr(self, attr, None) for dt in ['published_at', 'added_at']: if data.get(dt): data['%s_in_secs' % (dt)] = time.mktime(data[dt].timetuple()) data[dt] = format_date(data[dt]) if self.overflow: data['overflow_reason'] = OVERFLOW_REASON.for_display( self.overflow_reason) if self.key: data['id'] = self.key.urlsafe() feed = feed or self.key.parent().get() if format: data['html'] = {} for post, kind in feed.format_entry_for_adn(self).get_result(): data['html'][kind] = build_html_from_post(post) if feed and feed.channel_id: data['alert'] = broadcast_format_for_adn(feed, self) width = None height = None if feed.include_thumb and self.thumbnail_image_url: data['thumbnail_image_url'] = self.thumbnail_image_url width = self.thumbnail_image_width height = self.thumbnail_image_height if feed.include_video and self.video_oembed: data['thumbnail_image_url'] = self.video_oembed[ 'thumbnail_url'] width = self.video_oembed['thumbnail_width'] height = self.video_oembed['thumbnail_height'] if width and height: width, height = fit_to_box(width, height, 100, 100) data['thumbnail_image_width'] = width data['thumbnail_image_height'] = height return data @classmethod def entry_preview(cls, entries, feed, format=False): return [entry.to_json(feed=feed, format=format) for entry in entries] @classmethod @ndb.synctasklet def entry_preview_for_feed(cls, feed): parsed_feed, resp = yield fetch_parsed_feed_for_url(feed.feed_url) # Try and fix bad feed_urls on the fly new_feed_url = find_feed_url(resp, feed.feed_url) if new_feed_url: parsed_feed, resp = yield fetch_parsed_feed_for_url(new_feed_url) entries = [] futures = [] for item in parsed_feed.entries[0:3]: futures.append((item, prepare_entry_from_item(item, feed=feed))) for item, future in futures: entry = cls(**(yield future)) if entry: entries.append(entry) raise ndb.Return(cls.entry_preview(entries, feed, format=True)) @classmethod @ndb.tasklet def drain_queue(cls, feed): more = True cursor = None while more: entries, cursor, more = yield cls.latest_unpublished( feed).fetch_page_async(25, start_cursor=cursor) for entry in entries: entry.overflow = True entry.published = True entry.overflow_reason = OVERFLOW_REASON.FEED_OVERFLOW yield entry.put_async() @classmethod @ndb.tasklet def publish_for_feed(cls, feed, skip_queue=False): if not feed: logger.info("Asked to publish for a non-exsistant feed") raise ndb.Return(0) minutes_schedule = DEFAULT_PERIOD_SCHEDULE max_stories_to_publish = MAX_STORIES_PER_PERIOD if feed.manual_control: minutes_schedule = feed.schedule_period max_stories_to_publish = feed.max_stories_per_period if feed.dump_excess_in_period: max_stories_to_publish = 1 # How many stories have been published in the last period_length now = datetime.now() period_ago = now - timedelta(minutes=minutes_schedule) lastest_published_entries = yield cls.latest_published( feed, since=period_ago).count_async() max_stories_to_publish = max_stories_to_publish - lastest_published_entries entries_posted = 0 # If we still have time left in this period publish some more. if max_stories_to_publish > 0 or skip_queue: # If we are skipping the queue if skip_queue: max_stories_to_publish = max_stories_to_publish or 1 latest_entries = yield cls.latest_unpublished(feed).fetch_async( max_stories_to_publish + 1) more_to_publish = False if len(latest_entries) > max_stories_to_publish: more_to_publish = True latest_entries = latest_entries[0:max_stories_to_publish] for entry in latest_entries: yield publish_entry(entry, feed) entries_posted += 1 if not more_to_publish: feed.is_dirty = False yield feed.put_async() if more_to_publish and feed.dump_excess_in_period: yield cls.drain_queue(feed) raise ndb.Return(entries_posted) @classmethod @ndb.tasklet def process_parsed_feed(cls, parsed_feed, feed, overflow, overflow_reason=OVERFLOW_REASON.BACKLOG): raise ndb.Return( process_parsed_feed(cls, parsed_feed, feed, overflow, overflow_reason)) @classmethod @ndb.tasklet def update_for_feed(cls, feed, publish=False, skip_queue=False, overflow=False, overflow_reason=OVERFLOW_REASON.BACKLOG): parsed_feed, resp, feed = yield fetch_parsed_feed_for_feed(feed) num_new_items = 0 drain_queue = False # There should be no data in here anyway if resp.status_code != 304: etag = resp.headers.get('ETag') modified_feed = False # Update feed location if resp.was_permanente_redirect: feed.feed_url = resp.final_url modified_feed = True publish = False elif etag and feed.etag != etag: feed.etag = etag modified_feed = True if 'language' in parsed_feed.feed: lang = get_language(parsed_feed.feed.language) if lang != feed.language: feed.language = lang modified_feed = True if modified_feed: yield feed.put_async() new_guids, old_guids = yield cls.process_parsed_feed( parsed_feed, feed, overflow, overflow_reason) num_new_items = len(new_guids) if len(new_guids + old_guids) >= 5 and len(new_guids) == len( new_guids + old_guids): drain_queue = True if publish: yield cls.publish_for_feed(feed, skip_queue) if drain_queue: yield cls.drain_queue(feed) raise ndb.Return((parsed_feed, num_new_items)) @classmethod @ndb.tasklet def delete_for_feed(cls, feed): more = True cursor = None while more: entries, cursor, more = yield cls.latest_for_feed( feed).fetch_page_async(25, start_cursor=cursor) entries_keys = [x.key for x in entries] ndb.delete_multi_async(entries_keys) @classmethod def latest_for_feed(cls, feed): return cls.query(cls.creating == False, ancestor=feed.key) @classmethod def latest_for_feed_by_added(cls, feed): return cls.query(cls.creating == False, ancestor=feed.key).order(-cls.added) @classmethod def latest_unpublished( cls, feed, ): query = cls.query(cls.published == False, cls.creating == False, ancestor=feed.key).order(-cls.added) return query @classmethod def latest(cls, feed, include_overflow=False, overflow_cats=None, order_by='added'): q = cls.query(cls.published == True, cls.creating == False, ancestor=feed.key) logger.info('Order by: %s', order_by) if order_by == 'added': q = q.order(cls.added) if order_by == '-published_at': q = q.order(-cls.published_at) if overflow_cats is None: overflow_cats = [ OVERFLOW_REASON.MALFORMED, OVERFLOW_REASON.FEED_OVERFLOW ] if include_overflow: overflow_and_in_cat = ndb.AND( cls.overflow_reason.IN(overflow_cats), cls.overflow == True) not_overflow_or_overflow_in_cat = ndb.OR(cls.overflow == False, overflow_and_in_cat) q = q.filter(not_overflow_or_overflow_in_cat) else: q = q.filter(cls.overflow == False) return q @classmethod def latest_published(cls, feed, since=None): q = cls.query( cls.published == True, cls.creating == False, ancestor=feed.key).order(-cls.published_at).order(-cls.added) if since: q = q.filter(cls.published_at >= since) return q
class VersionedModel(BaseModel): """Model that handles storage of the version history of model instances. To use this class, you must declare a SNAPSHOT_METADATA_CLASS and a SNAPSHOT_CONTENT_CLASS. The former must contain the String fields 'committer_id', 'commit_type' and 'commit_message', and a JSON field for the Python list of dicts, 'commit_cmds'. The latter must contain the JSON field 'content'. The item that is being versioned must be serializable to a JSON blob. Note that commit() should be used for VersionedModels, as opposed to put() for direct subclasses of BaseModel. """ # The class designated as the snapshot model. This should be a subclass of # BaseSnapshotMetadataModel. SNAPSHOT_METADATA_CLASS = None # The class designated as the snapshot content model. This should be a # subclass of BaseSnapshotContentModel. SNAPSHOT_CONTENT_CLASS = None # Whether reverting is allowed. Default is False. ALLOW_REVERT = False # IMPORTANT: Subclasses should only overwrite things above this line. # The possible commit types. _COMMIT_TYPE_CREATE = 'create' _COMMIT_TYPE_REVERT = 'revert' _COMMIT_TYPE_EDIT = 'edit' _COMMIT_TYPE_DELETE = 'delete' # A list containing the possible commit types. COMMIT_TYPE_CHOICES = [ _COMMIT_TYPE_CREATE, _COMMIT_TYPE_REVERT, _COMMIT_TYPE_EDIT, _COMMIT_TYPE_DELETE ] # The reserved prefix for keys that are automatically inserted into a # commit_cmd dict by this model. _AUTOGENERATED_PREFIX = 'AUTO' # The command string for a revert commit. CMD_REVERT_COMMIT = '%s_revert_version_number' % _AUTOGENERATED_PREFIX # The command string for a delete commit. CMD_DELETE_COMMIT = '%s_mark_deleted' % _AUTOGENERATED_PREFIX # The current version number of this instance. In each PUT operation, # this number is incremented and a snapshot of the modified instance is # stored in the snapshot metadata and content models. The snapshot # version number starts at 1 when the model instance is first created. # All data in this instance represents the version at HEAD; data about the # previous versions is stored in the snapshot models. version = ndb.IntegerProperty(default=0) def _require_not_marked_deleted(self): """Checks whether the model instance is deleted.""" if self.deleted: raise Exception('This model instance has been deleted.') def compute_snapshot(self): """Generates a snapshot (dict) from the model property values.""" return self.to_dict(exclude=['created_on', 'last_updated']) def _reconstitute(self, snapshot_dict): """Populates the model instance with the snapshot. Args: snapshot_dict: dict(str, *). The snapshot with the model property values. Returns: VersionedModel. The instance of the VersionedModel class populated with the the snapshot. """ self.populate(**snapshot_dict) return self def _reconstitute_from_snapshot_id(self, snapshot_id): """Gets a reconstituted instance of this model class, based on the given snapshot id. Args: snapshot_id: str. Returns: VersionedModel. Reconstituted instance. """ snapshot_model = self.SNAPSHOT_CONTENT_CLASS.get(snapshot_id) snapshot_dict = snapshot_model.content reconstituted_model = self._reconstitute(snapshot_dict) # TODO(sll): The 'created_on' and 'last_updated' values here will be # slightly different from the values the entity model would have had, # since they correspond to the corresponding fields for the snapshot # content model instead. Figure out whether this is a problem or not, # and whether we need to record the contents of those fields in the # actual entity model (in which case we also need a way to deal with # old snapshots that don't have this information). reconstituted_model.created_on = snapshot_model.created_on reconstituted_model.last_updated = snapshot_model.last_updated return reconstituted_model @classmethod def get_snapshot_id(cls, instance_id, version_number): """Gets a unique snapshot id for this instance and version. Args: instance_id: str. version_number: int. Returns: str. The unique snapshot id corresponding to the given instance and version. """ return '%s%s%s' % (instance_id, _VERSION_DELIMITER, version_number) def _trusted_commit(self, committer_id, commit_type, commit_message, commit_cmds): """Evaluates and executes commit. Main function for all commit types. Args: committer_id: str. The user_id of the user who committed the change. commit_type: str. Unique identifier of commit type. Possible values are in COMMIT_TYPE_CHOICES. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, should give sufficient information to reconstruct the commit. Dict always contains: cmd: str. Unique command. And then additional arguments for that command. For example: {'cmd': 'AUTO_revert_version_number' 'version_number': 4} Raises: Exception: No snapshot metadata class has been defined. Exception: No snapshot content class has been defined. Exception: commit_cmds is not a list of dicts. """ if self.SNAPSHOT_METADATA_CLASS is None: raise Exception('No snapshot metadata class defined.') if self.SNAPSHOT_CONTENT_CLASS is None: raise Exception('No snapshot content class defined.') if not isinstance(commit_cmds, list): raise Exception( 'Expected commit_cmds to be a list of dicts, received %s' % commit_cmds) self.version += 1 snapshot = self.compute_snapshot() snapshot_id = self.get_snapshot_id(self.id, self.version) snapshot_metadata_instance = self.SNAPSHOT_METADATA_CLASS.create( snapshot_id, committer_id, commit_type, commit_message, commit_cmds) snapshot_content_instance = (self.SNAPSHOT_CONTENT_CLASS.create( snapshot_id, snapshot)) transaction_services.run_in_transaction( BaseModel.put_multi, [snapshot_metadata_instance, snapshot_content_instance, self]) def delete(self, committer_id, commit_message, force_deletion=False): """Deletes this model instance. Args: committer_id: str. The user_id of the user who committed the change. commit_message: str. The commit description message. force_deletion: bool. If True this model is deleted completely from storage, otherwise it is only marked as deleted. Default is False. Raises: Exception: This model instance has been already deleted. """ if force_deletion: current_version = self.version version_numbers = [ python_utils.UNICODE(num + 1) for num in python_utils.RANGE(current_version) ] snapshot_ids = [ self.get_snapshot_id(self.id, version_number) for version_number in version_numbers ] metadata_keys = [ ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id) for snapshot_id in snapshot_ids ] ndb.delete_multi(metadata_keys) content_keys = [ ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id) for snapshot_id in snapshot_ids ] ndb.delete_multi(content_keys) super(VersionedModel, self).delete() else: self._require_not_marked_deleted() # pylint: disable=protected-access self.deleted = True commit_cmds = [{'cmd': self.CMD_DELETE_COMMIT}] self._trusted_commit(committer_id, self._COMMIT_TYPE_DELETE, commit_message, commit_cmds) @classmethod def delete_multi(cls, entity_ids, committer_id, commit_message, force_deletion=False): """Deletes the given cls instancies with the given entity_ids. Args: entity_ids: list(str). Ids of entities to delete. committer_id: str. The user_id of the user who committed the change. commit_message: str. The commit description message. force_deletion: bool. If True these models are deleted completely from storage, otherwise there are only marked as deleted. Default is False. Raises: Exception: This model instance has been already deleted. """ versioned_models = cls.get_multi(entity_ids) if force_deletion: all_models_metadata_keys = [] all_models_content_keys = [] for model in versioned_models: model_version_numbers = [ python_utils.UNICODE(num + 1) for num in python_utils.RANGE(model.version) ] model_snapshot_ids = [ model.get_snapshot_id(model.id, version_number) for version_number in model_version_numbers ] all_models_metadata_keys.extend([ ndb.Key(model.SNAPSHOT_METADATA_CLASS, snapshot_id) for snapshot_id in model_snapshot_ids ]) all_models_content_keys.extend([ ndb.Key(model.SNAPSHOT_CONTENT_CLASS, snapshot_id) for snapshot_id in model_snapshot_ids ]) versioned_models_keys = [model.key for model in versioned_models] transaction_services.run_in_transaction( ndb.delete_multi, all_models_metadata_keys + all_models_content_keys + versioned_models_keys) else: for model in versioned_models: model._require_not_marked_deleted() # pylint: disable=protected-access model.deleted = True commit_cmds = [{'cmd': cls.CMD_DELETE_COMMIT}] snapshot_metadata_models = [] snapshot_content_models = [] for model in versioned_models: model.version += 1 snapshot = model.compute_snapshot() snapshot_id = model.get_snapshot_id(model.id, model.version) snapshot_metadata_models.append( model.SNAPSHOT_METADATA_CLASS.create( snapshot_id, committer_id, cls._COMMIT_TYPE_DELETE, commit_message, commit_cmds)) snapshot_content_models.append( model.SNAPSHOT_CONTENT_CLASS.create(snapshot_id, snapshot)) transaction_services.run_in_transaction( BaseModel.put_multi, snapshot_metadata_models + snapshot_content_models + versioned_models) def put(self, *args, **kwargs): """For VersionedModels, this method is replaced with commit().""" raise NotImplementedError def commit(self, committer_id, commit_message, commit_cmds): """Saves a version snapshot and updates the model. Args: committer_id: str. The user_id of the user who committed the change. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, should give sufficient information to reconstruct the commit. Dict always contains: cmd: str. Unique command. And then additional arguments for that command. For example: {'cmd': 'AUTO_revert_version_number' 'version_number': 4} Raises: Exception: This model instance has been already deleted. Exception: commit_cmd is in invalid format. """ self._require_not_marked_deleted() for item in commit_cmds: if not isinstance(item, dict): raise Exception( 'Expected commit_cmds to be a list of dicts, received %s' % commit_cmds) for commit_cmd in commit_cmds: if 'cmd' not in commit_cmd: raise Exception( 'Invalid commit_cmd: %s. Expected a \'cmd\' key.' % commit_cmd) if commit_cmd['cmd'].startswith(self._AUTOGENERATED_PREFIX): raise Exception('Invalid change list command: %s' % commit_cmd['cmd']) commit_type = (self._COMMIT_TYPE_CREATE if self.version == 0 else self._COMMIT_TYPE_EDIT) self._trusted_commit(committer_id, commit_type, commit_message, commit_cmds) @classmethod def revert(cls, model, committer_id, commit_message, version_number): """Reverts model to previous version. Args: model: VersionedModel. committer_id: str. The user_id of the user who committed the change. commit_message: str. The commit description message. version_number: int. Version to revert to. Raises: Exception: This model instance has been deleted. Exception: Reverting is not allowed on this model. """ model._require_not_marked_deleted() # pylint: disable=protected-access if not model.ALLOW_REVERT: raise Exception('Reverting objects of type %s is not allowed.' % model.__class__.__name__) commit_cmds = [{ 'cmd': model.CMD_REVERT_COMMIT, 'version_number': version_number }] # Do not overwrite the version number. current_version = model.version # If a new property is introduced after a certain version of a model, # the property should be its default value when an old snapshot of the # model is applied during reversion. E.g. states_schema_version in # ExplorationModel may be added after some version of a saved # exploration. If that exploration is reverted to a version that does # not have a states_schema_version property, it should revert to the # default states_schema_version value rather than taking the # states_schema_version value from the latest exploration version. # pylint: disable=protected-access snapshot_id = model.get_snapshot_id(model.id, version_number) new_model = cls(id=model.id) new_model._reconstitute_from_snapshot_id(snapshot_id) new_model.version = current_version new_model._trusted_commit(committer_id, cls._COMMIT_TYPE_REVERT, commit_message, commit_cmds) # pylint: enable=protected-access @classmethod def get_version(cls, entity_id, version_number, strict=True): """Gets model instance representing the given version. The snapshot content is used to populate this model instance. The snapshot metadata is not used. Args: entity_id: str. version_number: int. strict: bool. Whether to fail noisily if no entity with the given id exists in the datastore. Default is True. Returns: VersionedModel. Model instance representing given version. Raises: Exception: This model instance has been deleted. """ # pylint: disable=protected-access current_version_model = cls.get(entity_id, strict=strict) if current_version_model is None: return None current_version_model._require_not_marked_deleted() snapshot_id = cls.get_snapshot_id(entity_id, version_number) try: return cls(id=entity_id, version=version_number)._reconstitute_from_snapshot_id( snapshot_id) except cls.EntityNotFoundError as e: if not strict: return None raise e # pylint: enable=protected-access @classmethod def get_multi_versions(cls, entity_id, version_numbers): """Gets model instances for each version specified in version_numbers. Args: entity_id: str. ID of the entity. version_numbers: list(int). List of version numbers. Returns: list(VersionedModel). Model instances representing the given versions. Raises: ValueError. The given entity_id is invalid. ValueError. Requested version number cannot be higher than the current version number. ValueError. At least one version number is invalid. """ instances = [] entity = cls.get(entity_id, strict=False) if not entity: raise ValueError('The given entity_id %s is invalid.' % (entity_id)) current_version = entity.version max_version = max(version_numbers) if max_version > current_version: raise ValueError( 'Requested version number %s cannot be higher than the current ' 'version number %s.' % (max_version, current_version)) snapshot_ids = [] # pylint: disable=protected-access for version in version_numbers: snapshot_id = cls.get_snapshot_id(entity_id, version) snapshot_ids.append(snapshot_id) snapshot_models = cls.SNAPSHOT_CONTENT_CLASS.get_multi(snapshot_ids) for snapshot_model in snapshot_models: if snapshot_model is None: raise ValueError('At least one version number is invalid.') snapshot_dict = snapshot_model.content reconstituted_model = cls( id=entity_id)._reconstitute(snapshot_dict) reconstituted_model.created_on = snapshot_model.created_on reconstituted_model.last_updated = snapshot_model.last_updated instances.append(reconstituted_model) # pylint: enable=protected-access return instances @classmethod def get(cls, entity_id, strict=True, version=None): """Gets model instance. Args: entity_id: str. strict: bool. Whether to fail noisily if no entity with the given id exists in the datastore. Default is True. version: int. Version we want to get. Default is None. Returns: VersionedModel. If version is None, get the newest version of the model. Otherwise, get the specified version. """ if version is None: return super(VersionedModel, cls).get(entity_id, strict=strict) else: return cls.get_version(entity_id, version, strict=strict) @classmethod def get_snapshots_metadata(cls, model_instance_id, version_numbers, allow_deleted=False): """Gets a list of dicts, each representing a model snapshot. One dict is returned for each version number in the list of version numbers requested. If any of the version numbers does not exist, an error is raised. Args: model_instance_id: str. Id of requested model. version_numbers: list(int). List of version numbers. allow_deleted: bool. If is False, an error is raised if the current model has been deleted. Default is False. Returns: list(dict). Each dict contains metadata for a particular snapshot. It has the following keys: committer_id: str. The user_id of the user who committed the change. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, should give sufficient information to reconstruct the commit. Dict always contains: cmd: str. Unique command. And then additional arguments for that command. For example: {'cmd': 'AUTO_revert_version_number' 'version_number': 4} commit_type: str. Unique identifier of commit type. Possible values are in COMMIT_TYPE_CHOICES. version_number: int. created_on_ms: float. Snapshot creation time in milliseconds since the Epoch. Raises: Exception: There is no model instance corresponding to at least one of the given version numbers. """ # pylint: disable=protected-access if not allow_deleted: cls.get(model_instance_id)._require_not_marked_deleted() snapshot_ids = [ cls.get_snapshot_id(model_instance_id, version_number) for version_number in version_numbers ] # pylint: enable=protected-access metadata_keys = [ ndb.Key(cls.SNAPSHOT_METADATA_CLASS, snapshot_id) for snapshot_id in snapshot_ids ] returned_models = ndb.get_multi(metadata_keys) for ind, model in enumerate(returned_models): if model is None: raise Exception( 'Invalid version number %s for model %s with id %s' % (version_numbers[ind], cls.__name__, model_instance_id)) return [{ 'committer_id': model.committer_id, 'commit_message': model.commit_message, 'commit_cmds': model.commit_cmds, 'commit_type': model.commit_type, 'version_number': version_numbers[ind], 'created_on_ms': utils.get_time_in_millisecs(model.created_on), } for (ind, model) in enumerate(returned_models)]
class QuestionModel(base_models.VersionedModel): """Model for storing Questions. The ID of instances of this class has the form {{random_hash_of_12_chars}} """ SNAPSHOT_METADATA_CLASS = QuestionSnapshotMetadataModel SNAPSHOT_CONTENT_CLASS = QuestionSnapshotContentModel ALLOW_REVERT = True # An object representing the question state data. question_state_data = ndb.JsonProperty(indexed=False, required=True) # The schema version for the question state data. question_state_data_schema_version = ndb.IntegerProperty(required=True, indexed=True) # The ISO 639-1 code for the language this question is written in. language_code = ndb.StringProperty(required=True, indexed=True) # The skill ids linked to this question. linked_skill_ids = ndb.StringProperty(indexed=True, repeated=True) @staticmethod def get_deletion_policy(): """Question should be kept but the creator should be anonymized.""" return base_models.DELETION_POLICY.LOCALLY_PSEUDONYMIZE @classmethod def has_reference_to_user_id(cls, user_id): """Check whether QuestionModel snapshots references the given user. Args: user_id: str. The ID of the user whose data should be checked. Returns: bool. Whether any models refer to the given user ID. """ return cls.SNAPSHOT_METADATA_CLASS.exists_for_user_id(user_id) @staticmethod def get_user_id_migration_policy(): """QuestionModel doesn't have any field with user ID.""" return base_models.USER_ID_MIGRATION_POLICY.NOT_APPLICABLE @classmethod def _get_new_id(cls): """Generates a unique ID for the question of the form {{random_hash_of_12_chars}} Returns: new_id: int. ID of the new QuestionModel instance. Raises: Exception: The ID generator for QuestionModel is producing too many collisions. """ for _ in python_utils.RANGE(base_models.MAX_RETRIES): new_id = utils.convert_to_hash( python_utils.UNICODE( utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for QuestionModel is producing too many ' 'collisions.') def _trusted_commit(self, committer_id, commit_type, commit_message, commit_cmds): """Record the event to the commit log after the model commit. Note that this extends the superclass method. Args: committer_id: str. The user_id of the user who committed the change. commit_type: str. The type of commit. Possible values are in core.storage.base_models.COMMIT_TYPE_CHOICES. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, which should give sufficient information to reconstruct the commit. Each dict always contains: cmd: str. Unique command. and then additional arguments for that command. """ super(QuestionModel, self)._trusted_commit(committer_id, commit_type, commit_message, commit_cmds) committer_user_settings_model = ( user_models.UserSettingsModel.get_by_id(committer_id)) committer_username = (committer_user_settings_model.username if committer_user_settings_model else '') question_commit_log = QuestionCommitLogEntryModel.create( self.id, self.version, committer_id, committer_username, commit_type, commit_message, commit_cmds, constants.ACTIVITY_STATUS_PUBLIC, False) question_commit_log.question_id = self.id question_commit_log.put() @classmethod def create(cls, question_state_data, language_code, version, linked_skill_ids): """Creates a new QuestionModel entry. Args: question_state_data: dict. An dict representing the question state data. language_code: str. The ISO 639-1 code for the language this question is written in. version: str. The version of the question. linked_skill_ids: list(str). The skill ids linked to the question. Returns: QuestionModel. Instance of the new QuestionModel entry. Raises: Exception: A model with the same ID already exists. """ instance_id = cls._get_new_id() question_model_instance = cls(id=instance_id, question_state_data=question_state_data, language_code=language_code, version=version, linked_skill_ids=linked_skill_ids) return question_model_instance @classmethod def put_multi_questions(cls, questions): """Puts multiple question models into the datastore. Args: questions: list(Question). The list of question objects to put into the datastore. """ cls.put_multi(questions)
class ExpSummaryModel(base_models.BaseModel): """Summary model for an Oppia exploration. This should be used whenever the content blob of the exploration is not needed (e.g. in search results, etc). A ExpSummaryModel instance stores the following information: id, title, category, objective, language_code, tags, last_updated, created_on, status (private, public), community_owned, owner_ids, editor_ids, viewer_ids, version. The key of each instance is the exploration id. """ # What this exploration is called. title = ndb.StringProperty(required=True) # The category this exploration belongs to. category = ndb.StringProperty(required=True, indexed=True) # The objective of this exploration. objective = ndb.TextProperty(required=True, indexed=False) # The ISO 639-1 code for the language this exploration is written in. language_code = ndb.StringProperty(required=True, indexed=True) # Tags associated with this exploration. tags = ndb.StringProperty(repeated=True, indexed=True) # Aggregate user-assigned ratings of the exploration. ratings = ndb.JsonProperty(default=None, indexed=False) # Scaled average rating for the exploration. scaled_average_rating = ndb.FloatProperty(indexed=True) # Time when the exploration model was last updated (not to be # confused with last_updated, which is the time when the # exploration *summary* model was last updated). exploration_model_last_updated = ndb.DateTimeProperty(indexed=True) # Time when the exploration model was created (not to be confused # with created_on, which is the time when the exploration *summary* # model was created). exploration_model_created_on = ndb.DateTimeProperty(indexed=True) # Time when the exploration was first published. first_published_msec = ndb.FloatProperty(indexed=True) # The publication status of this exploration. status = ndb.StringProperty( default=constants.ACTIVITY_STATUS_PRIVATE, indexed=True, choices=[ constants.ACTIVITY_STATUS_PRIVATE, constants.ACTIVITY_STATUS_PUBLIC ] ) # Whether this exploration is owned by the community. community_owned = ndb.BooleanProperty(required=True, indexed=True) # The user_ids of owners of this exploration. owner_ids = ndb.StringProperty(indexed=True, repeated=True) # The user_ids of users who are allowed to edit this exploration. editor_ids = ndb.StringProperty(indexed=True, repeated=True) # The user_ids of users who are allowed to voiceover this exploration. voice_artist_ids = ndb.StringProperty(indexed=True, repeated=True) # The user_ids of users who are allowed to view this exploration. viewer_ids = ndb.StringProperty(indexed=True, repeated=True) # The user_ids of users who have contributed (humans who have made a # positive (not just a revert) change to the exploration's content). # NOTE TO DEVELOPERS: contributor_ids and contributors_summary need to be # synchronized, meaning that the keys in contributors_summary need be # equal to the contributor_ids list. contributor_ids = ndb.StringProperty(indexed=True, repeated=True) # A dict representing the contributors of non-trivial commits to this # exploration. Each key of this dict is a user_id, and the corresponding # value is the number of non-trivial commits that the user has made. contributors_summary = ndb.JsonProperty(default={}, indexed=False) # The version number of the exploration after this commit. Only populated # for commits to an exploration (as opposed to its rights, etc.). version = ndb.IntegerProperty() # DEPRECATED in v2.8.3. Do not use. translator_ids = ndb.StringProperty(indexed=True, repeated=True) @staticmethod def get_deletion_policy(): """Exploration summary is deleted only if the corresponding exploration is not public. """ return base_models.DELETION_POLICY.KEEP_IF_PUBLIC @classmethod def has_reference_to_user_id(cls, user_id): """Check whether ExpSummaryModel references user. Args: user_id: str. The ID of the user whose data should be checked. Returns: bool. Whether any models refer to the given user ID. """ return cls.query(ndb.OR( cls.owner_ids == user_id, cls.editor_ids == user_id, cls.voice_artist_ids == user_id, cls.viewer_ids == user_id, cls.contributor_ids == user_id )).get(keys_only=True) is not None @staticmethod def get_user_id_migration_policy(): """ExpSummaryModel has multiple fields with user ID.""" return base_models.USER_ID_MIGRATION_POLICY.CUSTOM @classmethod def migrate_model(cls, old_user_id, new_user_id): """Migrate model to use the new user ID in the owner_ids, editor_ids, voice_artist_ids, viewer_ids and contributor_ids. Args: old_user_id: str. The old user ID. new_user_id: str. The new user ID. """ migrated_models = [] for model in cls.query(ndb.OR( cls.owner_ids == old_user_id, cls.editor_ids == old_user_id, cls.voice_artist_ids == old_user_id, cls.viewer_ids == old_user_id, cls.contributor_ids == old_user_id)).fetch(): model.owner_ids = [ new_user_id if owner_id == old_user_id else owner_id for owner_id in model.owner_ids] model.editor_ids = [ new_user_id if editor_id == old_user_id else editor_id for editor_id in model.editor_ids] model.voice_artist_ids = [ new_user_id if voice_art_id == old_user_id else voice_art_id for voice_art_id in model.voice_artist_ids] model.viewer_ids = [ new_user_id if viewer_id == old_user_id else viewer_id for viewer_id in model.viewer_ids] model.contributor_ids = [ new_user_id if contributor_id == old_user_id else contributor_id for contributor_id in model.contributor_ids] if old_user_id in model.contributors_summary: model.contributors_summary[new_user_id] = ( model.contributors_summary[old_user_id]) del model.contributors_summary[old_user_id] migrated_models.append(model) cls.put_multi(migrated_models, update_last_updated_time=False) @classmethod def get_non_private(cls): """Returns an iterable with non-private ExpSummary models. Returns: iterable. An iterable with non-private ExpSummary models. """ return ExpSummaryModel.query().filter( ExpSummaryModel.status != constants.ACTIVITY_STATUS_PRIVATE ).filter( ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison ).fetch(feconf.DEFAULT_QUERY_LIMIT) @classmethod def get_top_rated(cls, limit): """Fetches the top-rated exp summaries that are public in descending order of scaled_average_rating. Args: limit: int. The maximum number of results to return. Returns: iterable. An iterable with the top rated exp summaries that are public in descending order of scaled_average_rating. """ return ExpSummaryModel.query().filter( ExpSummaryModel.status == constants.ACTIVITY_STATUS_PUBLIC ).filter( ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison ).order( -ExpSummaryModel.scaled_average_rating ).fetch(limit) @classmethod def get_private_at_least_viewable(cls, user_id): """Fetches private exp summaries that are at least viewable by the given user. Args: user_id: The id of the given user. Returns: iterable. An iterable with private exp summaries that are at least viewable by the given user. """ return ExpSummaryModel.query().filter( ExpSummaryModel.status == constants.ACTIVITY_STATUS_PRIVATE ).filter( ndb.OR(ExpSummaryModel.owner_ids == user_id, ExpSummaryModel.editor_ids == user_id, ExpSummaryModel.voice_artist_ids == user_id, ExpSummaryModel.viewer_ids == user_id) ).filter( ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison ).fetch(feconf.DEFAULT_QUERY_LIMIT) @classmethod def get_at_least_editable(cls, user_id): """Fetches exp summaries that are at least editable by the given user. Args: user_id: The id of the given user. Returns: iterable. An iterable with exp summaries that are at least editable by the given user. """ return ExpSummaryModel.query().filter( ndb.OR(ExpSummaryModel.owner_ids == user_id, ExpSummaryModel.editor_ids == user_id) ).filter( ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison ).fetch(feconf.DEFAULT_QUERY_LIMIT) @classmethod def get_recently_published(cls, limit): """Fetches exp summaries that are recently published. Args: limit: int. The maximum number of results to return. Returns: iterable. An iterable with exp summaries that are recently published. The returned list is sorted by the time of publication with latest being first in the list. """ return ExpSummaryModel.query().filter( ExpSummaryModel.status == constants.ACTIVITY_STATUS_PUBLIC ).filter( ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison ).order( -ExpSummaryModel.first_published_msec ).fetch(limit) @staticmethod def get_export_policy(): """Model data has already been exported as a part of the ExplorationModel and thus does not need a separate export_data function. """ return base_models.EXPORT_POLICY.NOT_APPLICABLE def verify_model_user_ids_exist(self): """Check if UserSettingsModel exists for all the ids in owner_ids, editor_ids, voice_artist_ids, viewer_ids and contributor_ids. """ user_ids = (self.owner_ids + self.editor_ids + self.voice_artist_ids + self.viewer_ids + self.contributor_ids) user_ids = [user_id for user_id in user_ids if user_id not in feconf.SYSTEM_USERS] user_settings_models = user_models.UserSettingsModel.get_multi( user_ids, include_deleted=True) return all(model is not None for model in user_settings_models)
class BaseCommitLogEntryModel(BaseModel): """Base Model for the models that store the log of commits to a construct. """ # Update superclass model to make these properties indexed. created_on = ndb.DateTimeProperty(auto_now_add=True, indexed=True) last_updated = ndb.DateTimeProperty(auto_now=True, indexed=True) # The id of the user. user_id = ndb.StringProperty(indexed=True, required=True) # The username of the user, at the time of the edit. username = ndb.StringProperty(indexed=True, required=True) # The type of the commit: 'create', 'revert', 'edit', 'delete'. commit_type = ndb.StringProperty(indexed=True, required=True) # The commit message. commit_message = ndb.TextProperty(indexed=False) # The commit_cmds dict for this commit. commit_cmds = ndb.JsonProperty(indexed=False, required=True) # The status of the entity after the edit event ('private', 'public'). post_commit_status = ndb.StringProperty(indexed=True, required=True) # Whether the entity is community-owned after the edit event. post_commit_community_owned = ndb.BooleanProperty(indexed=True) # Whether the entity is private after the edit event. Having a # separate field for this makes queries faster, since an equality query # on this property is faster than an inequality query on # post_commit_status. post_commit_is_private = ndb.BooleanProperty(indexed=True) # The version number of the model after this commit. version = ndb.IntegerProperty() @classmethod def has_reference_to_user_id(cls, user_id): """Check whether BaseCommitLogEntryModel references user. Args: user_id: str. The ID of the user whose data should be checked. Returns: bool. Whether any models refer to the given user ID. """ return cls.query(cls.user_id == user_id).get( keys_only=True) is not None @staticmethod def get_user_id_migration_policy(): """BaseCommitLogEntryModel has one field that contains user ID.""" return USER_ID_MIGRATION_POLICY.ONE_FIELD @classmethod def get_user_id_migration_field(cls): """Return field that contains user ID.""" return cls.user_id @classmethod def create(cls, entity_id, version, committer_id, committer_username, commit_type, commit_message, commit_cmds, status, community_owned): """This method returns an instance of the CommitLogEntryModel for a construct with the common fields filled. Args: entity_id: str. The ID of the construct corresponding to this commit log entry model (e.g. the exp_id for an exploration, the story_id for a story, etc.). version: int. The version number of the model after the commit. committer_id: str. The user_id of the user who committed the change. committer_username: str. The username of the user who committed the change. commit_type: str. The type of commit. Possible values are in core.storage.base_models.COMMIT_TYPE_CHOICES. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, which should give sufficient information to reconstruct the commit. Each dict always contains: cmd: str. Unique command. and then additional arguments for that command. status: str. The status of the entity after the commit. community_owned: bool. Whether the entity is community_owned after the commit. Returns: CommitLogEntryModel. Returns the respective CommitLogEntryModel instance of the construct from which this is called. """ return cls(id=cls._get_instance_id(entity_id, version), user_id=committer_id, username=committer_username, commit_type=commit_type, commit_message=commit_message, commit_cmds=commit_cmds, version=version, post_commit_status=status, post_commit_community_owned=community_owned, post_commit_is_private=( status == constants.ACTIVITY_STATUS_PRIVATE)) @classmethod def _get_instance_id(cls, target_entity_id, version): """This method should be implemented in the inherited classes. Args: target_entity_id: str. The ID of the construct corresponding to this commit log entry model (e.g. the exp_id for an exploration, the story_id for a story, etc.). version: int. The version number of the model after the commit. Raises: NotImplementedError: The method is not overwritten in derived classes. """ raise NotImplementedError @classmethod def get_all_commits(cls, page_size, urlsafe_start_cursor): """Fetches a list of all the commits sorted by their last updated attribute. Args: page_size: int. The maximum number of entities to be returned. urlsafe_start_cursor: str or None. If provided, the list of returned entities starts from this datastore cursor. Otherwise, the returned entities start from the beginning of the full list of entities. Returns: 3-tuple (results, cursor, more). As described in fetch_page() at: https://developers.google.com/appengine/docs/python/ndb/queryclass, where: results: List of query results. cursor: str or None. A query cursor pointing to the next batch of results. If there are no more results, this might be None. more: bool. If True, there are (probably) more results after this batch. If False, there are no further results after this batch. """ return cls._fetch_page_sorted_by_last_updated(cls.query(), page_size, urlsafe_start_cursor) @classmethod def get_commit(cls, target_entity_id, version): """Returns the commit corresponding to an instance id and version number. Args: target_entity_id: str. The ID of the construct corresponding to this commit log entry model (e.g. the exp_id for an exploration, the story_id for a story, etc.). version: int. The version number of the instance after the commit. Returns: BaseCommitLogEntryModel. The commit with the target entity id and version number. """ commit_id = cls._get_instance_id(target_entity_id, version) return cls.get_by_id(commit_id)
class ExplorationModel(base_models.VersionedModel): """Versioned storage model for an Oppia exploration. This class should only be imported by the exploration services file and the exploration model test file. """ SNAPSHOT_METADATA_CLASS = ExplorationSnapshotMetadataModel SNAPSHOT_CONTENT_CLASS = ExplorationSnapshotContentModel ALLOW_REVERT = True # What this exploration is called. title = ndb.StringProperty(required=True) # The category this exploration belongs to. category = ndb.StringProperty(required=True, indexed=True) # The objective of this exploration. objective = ndb.TextProperty(default='', indexed=False) # The ISO 639-1 code for the language this exploration is written in. language_code = ndb.StringProperty( default=constants.DEFAULT_LANGUAGE_CODE, indexed=True) # Tags (topics, skills, concepts, etc.) associated with this # exploration. tags = ndb.StringProperty(repeated=True, indexed=True) # A blurb for this exploration. blurb = ndb.TextProperty(default='', indexed=False) # 'Author notes' for this exploration. author_notes = ndb.TextProperty(default='', indexed=False) # The version of the states blob schema. states_schema_version = ndb.IntegerProperty( required=True, default=0, indexed=True) # The name of the initial state of this exploration. init_state_name = ndb.StringProperty(required=True, indexed=False) # A dict representing the states of this exploration. This dict should # not be empty. states = ndb.JsonProperty(default={}, indexed=False) # The dict of parameter specifications associated with this exploration. # Each specification is a dict whose keys are param names and whose values # are each dicts with a single key, 'obj_type', whose value is a string. param_specs = ndb.JsonProperty(default={}, indexed=False) # The list of parameter changes to be performed once at the start of a # reader's encounter with an exploration. param_changes = ndb.JsonProperty(repeated=True, indexed=False) # A boolean indicating whether automatic text-to-speech is enabled in # this exploration. auto_tts_enabled = ndb.BooleanProperty(default=True, indexed=True) # A boolean indicating whether correctness feedback is enabled in this # exploration. correctness_feedback_enabled = ndb.BooleanProperty( default=False, indexed=True) # DEPRECATED in v2.0.0.rc.2. Do not use. Retaining it here because deletion # caused GAE to raise an error on fetching a specific version of the # exploration model. # TODO(sll): Fix this error and remove this property. skill_tags = ndb.StringProperty(repeated=True, indexed=True) # DEPRECATED in v2.0.1. Do not use. # TODO(sll): Remove this property from the model. default_skin = ndb.StringProperty(default='conversation_v1') # DEPRECATED in v2.5.4. Do not use. skin_customizations = ndb.JsonProperty(indexed=False) @staticmethod def get_deletion_policy(): """Exploration is deleted only if it is not public.""" return base_models.DELETION_POLICY.KEEP_IF_PUBLIC @staticmethod def get_export_policy(): """Model does not contain user data.""" return base_models.EXPORT_POLICY.NOT_APPLICABLE @classmethod def has_reference_to_user_id(cls, user_id): """Check whether ExplorationModel or its snapshots references the given user. Args: user_id: str. The ID of the user whose data should be checked. Returns: bool. Whether any models refer to the given user ID. """ return cls.SNAPSHOT_METADATA_CLASS.exists_for_user_id(user_id) @staticmethod def get_user_id_migration_policy(): """ExplorationModel doesn't have any field with user ID.""" return base_models.USER_ID_MIGRATION_POLICY.NOT_APPLICABLE @classmethod def get_exploration_count(cls): """Returns the total number of explorations.""" return cls.get_all().count() def _trusted_commit( self, committer_id, commit_type, commit_message, commit_cmds): """Record the event to the commit log after the model commit. Note that this extends the superclass method. Args: committer_id: str. The user_id of the user who committed the change. commit_type: str. The type of commit. Possible values are in core.storage.base_models.COMMIT_TYPE_CHOICES. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, which should give sufficient information to reconstruct the commit. Each dict always contains: cmd: str. Unique command. and then additional arguments for that command. """ super(ExplorationModel, self)._trusted_commit( committer_id, commit_type, commit_message, commit_cmds) committer_user_settings_model = ( user_models.UserSettingsModel.get_by_id(committer_id)) committer_username = ( committer_user_settings_model.username if committer_user_settings_model else '') exp_rights = ExplorationRightsModel.get_by_id(self.id) # TODO(msl): Test if put_async() leads to any problems (make # sure summary dicts get updated correctly when explorations # are changed). exploration_commit_log = ExplorationCommitLogEntryModel.create( self.id, self.version, committer_id, committer_username, commit_type, commit_message, commit_cmds, exp_rights.status, exp_rights.community_owned ) exploration_commit_log.exploration_id = self.id exploration_commit_log.put() @classmethod def delete_multi( cls, entity_ids, committer_id, commit_message, force_deletion=False): """Deletes the given cls instances with the given entity_ids. Note that this extends the superclass method. Args: entity_ids: list(str). Ids of entities to delete. committer_id: str. The user_id of the user who committed the change. commit_message: str. The commit description message. force_deletion: bool. If True these models are deleted completely from storage, otherwise there are only marked as deleted. Default is False. """ super(ExplorationModel, cls).delete_multi( entity_ids, committer_id, commit_message, force_deletion=force_deletion) if not force_deletion: committer_user_settings_model = ( user_models.UserSettingsModel.get_by_id(committer_id)) committer_username = ( committer_user_settings_model.username if committer_user_settings_model else '') commit_log_models = [] exp_rights_models = ExplorationRightsModel.get_multi( entity_ids, include_deleted=True) versioned_models = cls.get_multi(entity_ids, include_deleted=True) versioned_and_exp_rights_models = python_utils.ZIP( versioned_models, exp_rights_models) for model, rights_model in versioned_and_exp_rights_models: exploration_commit_log = ExplorationCommitLogEntryModel.create( model.id, model.version, committer_id, committer_username, cls._COMMIT_TYPE_DELETE, commit_message, [{'cmd': cls.CMD_DELETE_COMMIT}], rights_model.status, rights_model.community_owned ) exploration_commit_log.exploration_id = model.id commit_log_models.append(exploration_commit_log) ndb.put_multi_async(commit_log_models)
class PurchaseLogMonthly(ndb.Model): name = ndb.StringProperty(indexed=False) ofYearMonth = ndb.IntegerProperty(indexed=False) ofYear = ndb.IntegerProperty(indexed=False) ofMonth = ndb.IntegerProperty(indexed=False)
class GlobalCounters(ndb.Model): videoWatchCounter = ndb.IntegerProperty() loginCounter = ndb.IntegerProperty() purchasesCounter = ndb.IntegerProperty() revenueCounter = ndb.IntegerProperty()
class State(ndb.Model): id = ndb.IntegerProperty(required=True) name = ndb.StringProperty(required=True)
class StorySource(ndb.Model): title = ndb.StringProperty(required=True) storyCount = ndb.IntegerProperty(required=True, default=0) wordCount = ndb.IntegerProperty(required=True, default=0) listUrl = ndb.StringProperty() exclusions = ndb.StringProperty(repeated=True)
class Metadata(ndb.Model, CsvMixin): omitFields = ["ad0", "ad1", "ad2", "ad3", "ad4", "ad5"] metadataId = ndb.IntegerProperty() receivedDateTime = ndb.DateTimeProperty() sender = ndb.KeyProperty() rawData = ndb.KeyProperty() dataList = ndb.KeyProperty(repeated=True) executedCommandIds = ndb.IntegerProperty(repeated=True) executedResults = ndb.StringProperty(repeated=True) dataJson = ndb.JsonProperty(indexed=False) #fieldnames = ["metadataId", "receivedDateTime", "sender", "rawData", "dataList", "executedCommandIds", "executedResults" ] # def getFields(self): # fields = [] # fields.append(self.metadataId) # fields.append(self.receivedDateTime) # fields.append(self.sender) # fields.append(self.rawData) # fields.append(self.dataList) # #fields.append(self.executedCommandIds) # #fields.append(self.executedResults) # return fields @classmethod def queryRecent(cls): query = ndb.Query(kind="Metadata") query = query.order(-cls.metadataId) return query @classmethod def queryRange(cls, start, end): assert isinstance(start, int) assert isinstance(end, int) query = ndb.Query(kind="Metadata") query = query.order(-cls.metadataId) if start <= end: query = query.filter(cls.metadataId >= start) query = query.filter(cls.metadataId <= end) return query else: query = query.filter(cls.metadataId <= start) query = query.filter(cls.metadataId >= end) return query @classmethod def fetchRange(cls, start, end, limit=100): return cls.queryRange(start, end).fetch(keys_only=True, limit=limit) @classmethod def queryDateRange(cls, start, end): assert isinstance(start, datetime) assert isinstance(end, datetime) query = ndb.Query(kind="Metadata") if start <= end: query = query.filter(cls.receivedDateTime >= start) query = query.filter(cls.receivedDateTime <= end) return query else: query = query.filter(cls.receivedDateTime <= start) query = query.filter(cls.receivedDateTime >= end) query = query.order(-cls.receivedDateTime) return query @classmethod def queryDateRangeAndData(cls, start, end, data): assert isinstance(start, datetime) assert isinstance(end, datetime) query = ndb.Query(kind="Metadata") if start <= end: query = query.filter(cls.receivedDateTime >= start) query = query.filter(cls.receivedDateTime <= end) query = query.filter(cls.dataList == data) return query else: query = query.filter(cls.receivedDateTime <= start) query = query.filter(cls.receivedDateTime >= end) query = query.order(-cls.receivedDateTime) query = query.filter(cls.dataList == data) return query @classmethod def queryAfter(cls, start): assert isinstance(start, datetime) query = ndb.Query(kind="Metadata") return query @classmethod def queryByData(cls, data_key): assert isinstance(data_key, ndb.Key) query = ndb.Query(kind="Metadata") query = query.filter(cls.dataList == data_key) return query @classmethod def fetchByData(cls, data_key): return cls.queryByData(data_key).fetch(keys_only=True, limit=100) @classmethod def putMetadata(cls, sender, raw_data, data_keys): assert isinstance(sender, Sender) assert isinstance(raw_data, RawData) assert isinstance(data_keys, list) metadata = Metadata() metadata.metadataId = Counter.GetNextId("metadataId") now = datetime.datetime.now() info(now.strftime('%Y/%m/%d %H:%M:%S%z')) metadata.receivedDateTime = now metadata.sender = sender metadata.rawData = raw_data metadata.dataList = data_keys return metadata.put() @classmethod def fetchDateRangeAndData(cls, start, end, data_key): assert isinstance(start, datetime) assert isinstance(end, datetime) assert isinstance(data_key, ndb.Key) query = ndb.Query(kind="Metadata") query = query.filter(cls.receivedDateTime >= start) query = query.filter(cls.receivedDateTime <= end) query = query.order(cls.receivedDateTime) return query.fetch(keys_only=True) @classmethod def fetchDateRangeAndDataList(cls, start, end, data_keys): assert isinstance(start, datetime) assert isinstance(end, datetime) assert isinstance(data_keys, list) metadata_keys_set = set() for data_key in data_keys: metadata_keys_set.update(cls.fetchDateRangeAndData(start, end, data_key)) return metadata_keys_set @classmethod def fetchByDataList(cls, data_keys): assert isiterable(data_keys) metadata_keys = set() for data_key in data_keys: metadata_keys.update(cls.fetchByData(data_key)) return list(metadata_keys) def adjustDataList(self): """adjustDatalist removes Data instances to be omitted and save all Data instances to dataJson.""" if self.dataJson is None: d = {} for data in self.dataList: assert isinstance(data, Data) d[data.field] = data.string self.dataJson = d l = [] for data in self.dataList: if data.field in self.omitFields: continue l.append(data) self.dataList = l self.put_async()
class Rec(ndb.Model): storyKey = ndb.KeyProperty(required=True, kind='Story') title = ndb.StringProperty() order = ndb.IntegerProperty(required=True)
class StatisticsRealtimeModel( jobs.BaseRealtimeDatastoreClassForContinuousComputations): num_starts = ndb.IntegerProperty(default=0) num_completions = ndb.IntegerProperty(default=0)
class TopicModel(base_models.VersionedModel): """Model for storing Topics. This class should only be imported by the topic services file and the topic model test file. """ SNAPSHOT_METADATA_CLASS = TopicSnapshotMetadataModel SNAPSHOT_CONTENT_CLASS = TopicSnapshotContentModel ALLOW_REVERT = False # The name of the topic. name = ndb.StringProperty(required=True, indexed=True) # The description of the topic. description = ndb.TextProperty(indexed=False) # This consists of the list of canonical story ids that are part of # this topic. canonical_story_ids = ndb.StringProperty(repeated=True, indexed=True) # This consists of the list of additional (non-canonical) story ids that # are part of this topic. additional_story_ids = ndb.StringProperty(repeated=True, indexed=True) # This consists of the list of uncategorized skill ids that are not part of # any subtopic. uncategorized_skill_ids = ndb.StringProperty(repeated=True, indexed=True) # The list of subtopics that are part of the topic. subtopics = ndb.JsonProperty(repeated=True, indexed=False) # The schema version of the subtopic dict. subtopic_schema_version = ndb.IntegerProperty(required=True, indexed=True) # The id for the next subtopic. next_subtopic_id = ndb.IntegerProperty(required=True) # The ISO 639-1 code for the language this topic is written in. language_code = ndb.StringProperty(required=True, indexed=True) def _trusted_commit(self, committer_id, commit_type, commit_message, commit_cmds): """Record the event to the commit log after the model commit. Note that this extends the superclass method. Args: committer_id: str. The user_id of the user who committed the change. commit_type: str. The type of commit. Possible values are in core.storage.base_models.COMMIT_TYPE_CHOICES. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, which should give sufficient information to reconstruct the commit. Each dict always contains: cmd: str. Unique command. and then additional arguments for that command. """ super(TopicModel, self)._trusted_commit(committer_id, commit_type, commit_message, commit_cmds) committer_user_settings_model = ( user_models.UserSettingsModel.get_by_id(committer_id)) committer_username = (committer_user_settings_model.username if committer_user_settings_model else '') topic_commit_log_entry = TopicCommitLogEntryModel.create( self.id, self.version, committer_id, committer_username, commit_type, commit_message, commit_cmds, feconf.ACTIVITY_STATUS_PUBLIC, False) topic_commit_log_entry.topic_id = self.id topic_commit_log_entry.put()
class StartTime(ndb.Model): """StartTime == used for the startTime property of the Conference class to make the start time of allow greater/less than queries of a conferences start times.""" hour = ndb.IntegerProperty() minute = ndb.IntegerProperty()
class CameraModel(ndb.Model): camera_id = ndb.StringProperty() count = ndb.IntegerProperty() timestamp = ndb.DateTimeProperty()
class User(ndb.Model): last_updated = ndb.IntegerProperty() fragments = ndb.JsonProperty(repeated=True)
class ClickCounter(ndb.Model): clicks = ndb.IntegerProperty()
class Counter(ndb.Model): podcast_downloads = ndb.IntegerProperty(default=0) podcast_plays = ndb.IntegerProperty(default=0) registrations = ndb.IntegerProperty(default=0) created = ndb.DateTimeProperty(auto_now_add=True)
class Slice(ndb.Model): budgetId = ndb.StringProperty( ) # To verify slice belongs to budget, and indexed to retrieve popular slices title = ndb.StringProperty( ) # Indexed to retrieve popular reasons per title reason = ndb.StringProperty() creator = ndb.StringProperty() # Indexed to check slice sum is valid fromEditPage = ndb.BooleanProperty( ) # To keep slices from budget-creator only from edit-page voteCount = ndb.IntegerProperty(default=0) sizeToCount = ndb.JsonProperty(default={}) # map[ size -> vote-count ] score = ndb.FloatProperty(default=0) # For matching input-words to make suggestions words = ndb.StringProperty(repeated=True) # Key slices by budgetId+hash(content), to prevent duplicates # Prevents problem of voting for slice by ID that was deleted (down-voted) between display & vote @staticmethod def toKeyId(budgetId, title, reason): hasher = hashlib.md5() if title is None: title = '' if reason is None: reason = '' hasher.update(text.utf8(title + '\t' + reason)) return '{}-{}'.format(budgetId, hasher.hexdigest()) @staticmethod def create(budgetId, title, reason, creator=None, fromEditPage=False): slice = Slice(id=Slice.toKeyId(budgetId, title, reason), budgetId=budgetId, title=title, reason=reason, creator=creator, fromEditPage=fromEditPage) # Index content words content = ' '.join([w for w in [title, reason] if w]) words = text.uniqueInOrder(text.removeStopWords( text.tokenize(content))) words = words[0: conf.MAX_WORDS_INDEXED] # Limit number of words indexed slice.words = text.tuples(words, maxSize=2) return slice @staticmethod def get(budgetId, title, reason): return Slice.get_by_id(Slice.toKeyId(budgetId, title, reason)) def hasTitle(self): return self.title and self.title.strip() def hasTitleAndReason(self): return self.title and self.title.strip( ) and self.reason and self.reason.strip() def incrementSizeCount(self, size, increment): size = str(size) # JSON-field stores keys as strings countOld = self.sizeToCount.get(size, 0) self.sizeToCount[size] = max(0, countOld + increment) # Do not allow negative counts self.sizeToCount = { s: c for s, c in self.sizeToCount.iteritems() if 0 < c } # Filter zeros def medianSize(self): return stats.medianKey(self.sizeToCount) def sumScoreBelowSize(self, size): return voteCountToScore(self.countVotesBelowSize(size), self.title, self.reason) def sumScoreAboveSize(self, size): return voteCountToScore(self.countVotesAboveSize(size), self.title, self.reason) def countVotesBelowSize(self, size): resultSum = sum( [c for s, c in self.sizeToCount.iteritems() if int(s) < size]) logging.debug('countVotesBelowSize() resultSum=' + str(resultSum) + ' size=' + str(size) + ' sizeToCount=' + str(self.sizeToCount)) return resultSum def countVotesAboveSize(self, size): resultSum = sum( [c for s, c in self.sizeToCount.iteritems() if size < int(s)]) logging.debug('countVotesAboveSize() resultSum=' + str(resultSum) + ' size=' + str(size) + ' sizeToCount=' + str(self.sizeToCount)) return resultSum
class Purchase(ndb.Model): customer = ndb.KeyProperty(kind=Customer) price = ndb.IntegerProperty()
class EveLinkCache(ndb.Model): value = ndb.PickleProperty() expiration = ndb.IntegerProperty()
class CollectionModel(base_models.VersionedModel): """Versioned storage model for an Oppia collection. This class should only be imported by the collection services file and the collection model test file. """ SNAPSHOT_METADATA_CLASS = CollectionSnapshotMetadataModel SNAPSHOT_CONTENT_CLASS = CollectionSnapshotContentModel ALLOW_REVERT = True # What this collection is called. title = ndb.StringProperty(required=True) # The category this collection belongs to. category = ndb.StringProperty(required=True, indexed=True) # The objective of this collection. objective = ndb.TextProperty(default='', indexed=False) # The language code of this collection. language_code = ndb.StringProperty( default=constants.DEFAULT_LANGUAGE_CODE, indexed=True) # Tags associated with this collection. tags = ndb.StringProperty(repeated=True, indexed=True) # The version of all property blob schemas. schema_version = ndb.IntegerProperty( required=True, default=1, indexed=True) # A dict representing the contents of a collection. Currently, this # contains the list of nodes. This dict should contain collection data # whose structure might need to be changed in the future. collection_contents = ndb.JsonProperty(default={}, indexed=False) # DEPRECATED in v2.4.2. Do not use. nodes = ndb.JsonProperty(default={}, indexed=False) @staticmethod def get_deletion_policy(): """Collection is deleted only if it is not public.""" return base_models.DELETION_POLICY.KEEP_IF_PUBLIC @staticmethod def get_export_policy(): """Model does not contain user data.""" return base_models.EXPORT_POLICY.NOT_APPLICABLE @classmethod def has_reference_to_user_id(cls, user_id): """Check whether CollectionModel snapshots references the given user. Args: user_id: str. The ID of the user whose data should be checked. Returns: bool. Whether any models refer to the given user ID. """ return cls.SNAPSHOT_METADATA_CLASS.exists_for_user_id(user_id) @staticmethod def get_user_id_migration_policy(): """CollectionModel doesn't have any field with user ID.""" return base_models.USER_ID_MIGRATION_POLICY.NOT_APPLICABLE @classmethod def get_collection_count(cls): """Returns the total number of collections.""" return cls.get_all().count() def _trusted_commit( self, committer_id, commit_type, commit_message, commit_cmds): """Record the event to the commit log after the model commit. Note that this extends the superclass method. Args: committer_id: str. The user_id of the user who committed the change. commit_type: str. The type of commit. Possible values are in core.storage.base_models.COMMIT_TYPE_CHOICES. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, which should give sufficient information to reconstruct the commit. Each dict always contains: cmd: str. Unique command. and then additional arguments for that command. """ super(CollectionModel, self)._trusted_commit( committer_id, commit_type, commit_message, commit_cmds) committer_user_settings_model = ( user_models.UserSettingsModel.get_by_id(committer_id)) committer_username = ( committer_user_settings_model.username if committer_user_settings_model else '') collection_rights = CollectionRightsModel.get_by_id(self.id) # TODO(msl): test if put_async() leads to any problems (make # sure summary dicts get updated correctly when collections # are changed). collection_commit_log = CollectionCommitLogEntryModel.create( self.id, self.version, committer_id, committer_username, commit_type, commit_message, commit_cmds, collection_rights.status, collection_rights.community_owned ) collection_commit_log.collection_id = self.id collection_commit_log.put() @classmethod def delete_multi( cls, entity_ids, committer_id, commit_message, force_deletion=False): """Deletes the given cls instances with the given entity_ids. Note that this extends the superclass method. Args: entity_ids: list(str). Ids of entities to delete. committer_id: str. The user_id of the user who committed the change. commit_message: str. The commit description message. force_deletion: bool. If True these models are deleted completely from storage, otherwise there are only marked as deleted. Default is False. """ super(CollectionModel, cls).delete_multi( entity_ids, committer_id, commit_message, force_deletion=force_deletion) if not force_deletion: committer_user_settings_model = ( user_models.UserSettingsModel.get_by_id(committer_id)) committer_username = ( committer_user_settings_model.username if committer_user_settings_model else '') commit_log_models = [] collection_rights_models = CollectionRightsModel.get_multi( entity_ids, include_deleted=True) versioned_models = cls.get_multi(entity_ids, include_deleted=True) for model, rights_model in python_utils.ZIP( versioned_models, collection_rights_models): collection_commit_log = CollectionCommitLogEntryModel.create( model.id, model.version, committer_id, committer_username, cls._COMMIT_TYPE_DELETE, commit_message, [{'cmd': cls.CMD_DELETE_COMMIT}], rights_model.status, rights_model.community_owned ) collection_commit_log.collection_id = model.id commit_log_models.append(collection_commit_log) ndb.put_multi_async(commit_log_models)
class PurchaseLogEntry(ndb.Model): purchasedOn = ndb.DateTimeProperty(auto_now_add=False) userName = ndb.StringProperty(indexed=False) videoName = ndb.StringProperty(indexed=False) videoPrice = ndb.IntegerProperty()