class BitMapTest(ndb.Model): #bitmap = ndb.BlobProperty(indexed=False) # compressed=True bitmap = ndb.PickleProperty(indexed=False) #compressed=True name = ndb.StringProperty()
class Config(model.Base, model.ConfigAuth): analytics_id = ndb.StringProperty(default='', verbose_name='Tracking ID') announcement_html = ndb.TextProperty(default='', verbose_name='Announcement HTML') announcement_type = ndb.StringProperty( default='info', choices=['info', 'warning', 'success', 'danger']) anonymous_recaptcha = ndb.BooleanProperty( default=False, verbose_name='Use reCAPTCHA in forms for unauthorized users') brand_name = ndb.StringProperty(default=config.APPLICATION_ID) bucket_name = ndb.StringProperty(default=config.DEFAULT_GCS_BUCKET_NAME) check_unique_email = ndb.BooleanProperty( default=True, verbose_name='Check for uniqueness of the verified emails') email_authentication = ndb.BooleanProperty( default=False, verbose_name='Email authentication for sign in/sign up') feedback_email = ndb.StringProperty(default='') flask_secret_key = ndb.StringProperty(default=util.uuid()) letsencrypt_challenge = ndb.StringProperty( default='', verbose_name=u'Let’s Encrypt Challenge') letsencrypt_response = ndb.StringProperty( default='', verbose_name=u'Let’s Encrypt Response') notify_on_new_user = ndb.BooleanProperty( default=True, verbose_name='Send an email notification when a user signs up') recaptcha_private_key = ndb.StringProperty(default='', verbose_name='Private Key') recaptcha_public_key = ndb.StringProperty(default='', verbose_name='Public Key') salt = ndb.StringProperty(default=util.uuid()) verify_email = ndb.BooleanProperty(default=True, verbose_name='Verify user emails') @property def has_anonymous_recaptcha(self): return bool(self.anonymous_recaptcha and self.has_recaptcha) @property def has_email_authentication(self): return bool(self.email_authentication and self.feedback_email and self.verify_email) @property def has_recaptcha(self): return bool(self.recaptcha_private_key and self.recaptcha_public_key) @classmethod def get_master_db(cls): return cls.get_or_insert('master') FIELDS = { 'analytics_id': fields.String, 'announcement_html': fields.String, 'announcement_type': fields.String, 'anonymous_recaptcha': fields.Boolean, 'brand_name': fields.String, 'bucket_name': fields.String, 'check_unique_email': fields.Boolean, 'email_authentication': fields.Boolean, 'feedback_email': fields.String, 'flask_secret_key': fields.String, 'letsencrypt_challenge': fields.String, 'letsencrypt_response': fields.String, 'notify_on_new_user': fields.Boolean, 'recaptcha_private_key': fields.String, 'recaptcha_public_key': fields.String, 'salt': fields.String, 'verify_email': fields.Boolean, } FIELDS.update(model.Base.FIELDS) FIELDS.update(model.ConfigAuth.FIELDS)
class UserPhoto(ndb.Model): user = ndb.StringProperty() blob_key = ndb.BlobKeyProperty()
class BuildbucketTryJobResult(models.TryJobResult): """A TryJobResult created from a BuildBucket build. Not stored, but only passed to pathset.html template. """ build_id = ndb.StringProperty() @property def is_from_buildbucket(self): # Used in build_result.html template. return True @classmethod def convert_status_to_result(cls, build): """Converts build status to TryJobResult.result. See buildbucket docs here: https://chromium.googlesource.com/infra/infra/+/master/appengine/cr-buildbucket/doc/index.md#Build """ status = build.get('status') if status == 'SCHEDULED': return cls.TRYPENDING if status == 'COMPLETED': if build.get('result') == 'SUCCESS': return cls.SUCCESS if build.get('result') == 'FAILURE': if build.get('failure_reason') == 'BUILD_FAILURE': return cls.FAILURE if build.get('result') == 'CANCELED': if build.get('cancelation_reason') == 'TIMEOUT': return cls.SKIPPED return cls.EXCEPTION if status == 'STARTED': return cls.STARTED logging.warning('Unexpected build %s status: %s', build.get('id'), status) return None @staticmethod def parse_tags(tag_list): """Parses a list of colon-delimited tags to a map.""" return dict(tag.split(':', 1) for tag in tag_list) @classmethod def from_build(cls, build): """Converts a BuildBucket build to BuildBucketTryJobResult.""" tags = cls.parse_tags(build.get('tags', [])) result_details = load_json_dict_safe(build, 'result_details_json') parameters = load_json_dict_safe(build, 'parameters_json') properties = (result_details.get('properties') or parameters.get('properties')) if not isinstance(properties, dict): properties = {} def read_prop(name, expected_type): return dict_get_safe(properties, name, expected_type) requester = None requester_str = read_prop('requester', basestring) if requester_str: try: requester = users.User(requester_str) except users.UserNotFoundError: pass timestamp = timestamp_to_datetime(build.get('status_changed_ts')) if timestamp is None: logging.warning('Build %s has status_changed_ts=None', build['id']) return cls( id=build['id'], # Required for to_dict() serialization. build_id=build['id'], url=dict_get_safe(build, 'url', basestring), result=cls.convert_status_to_result(build), master=tags.get('master'), builder=dict_get_safe(parameters, 'builder_name', basestring), slave=read_prop('slavename', basestring), buildnumber=read_prop('buildnumber', int), reason=read_prop('reason', basestring), revision=read_prop('revision', basestring), timestamp=timestamp, clobber=read_prop('clobber', bool), tests=read_prop('testfilter', list) or [], project=read_prop('patch_project', basestring), requester=requester, category=read_prop('category', basestring), build_properties=json.dumps(properties, sort_keys=True), )
class User(ndb.Model): id = ndb.StringProperty() fname = ndb.StringProperty(required=True) lname = ndb.StringProperty(required=True) email = ndb.StringProperty(required=True) self = ndb.StringProperty()
class FluxSessions(ndb.Model): uId = ndb.StringProperty() DateTime = ndb.DateTimeProperty(auto_now_add=True) AverageTemp = ndb.FloatProperty() mlUsed = ndb.FloatProperty()
class sessIdPointers(ndb.Model): SessionId = ndb.StringProperty()
class ExampleStore(ndb.Model): original_html = ndb.TextProperty('h', indexed=False) microdata = ndb.TextProperty('m', indexed=False) rdfa = ndb.TextProperty('r', indexed=False) jsonld = ndb.TextProperty('j', indexed=False) egmeta = ndb.PickleProperty('e', indexed=False) keyvalue = ndb.StringProperty('o', indexed=True) layer = ndb.StringProperty('l', indexed=False) @staticmethod def initialise(): EXAMPLESTORECACHE = [] import time log.info("[%s]ExampleStore initialising Data Store" % (getInstanceId(short=True))) loops = 0 ret = 0 while loops < 10: keys = ExampleStore.query().fetch(keys_only=True, use_memcache=False, use_cache=False) count = len(keys) if count == 0: break log.info("[%s]ExampleStore deleting %s keys" % (getInstanceId(short=True), count)) ndb.delete_multi(keys, use_memcache=False, use_cache=False) ret += count loops += 1 time.sleep(0.01) return {"ExampleStore": ret} @staticmethod def add(example): e = ExampleStore(id=example.keyvalue, original_html=example.original_html, microdata=example.microdata, rdfa=example.rdfa, jsonld=example.jsonld, egmeta=example.egmeta, keyvalue=example.keyvalue, layer=example.layer) EXAMPLESTORECACHE.append(e) @staticmethod def store(examples): for e in examples: ExampleStore.add(e) if len(EXAMPLESTORECACHE): ndb.put_multi(EXAMPLESTORECACHE, use_cache=False) def get(self, name): if name == 'original_html': return self.original_html if name == 'microdata': return self.microdata if name == 'rdfa': return self.rdfa if name == 'jsonld': return self.jsonld return "" @staticmethod def getEgmeta(id): em = ExampleStore.get_by_id(id) ret = em.emeta if ret: return ret return {}
class RecallTaskModel(ndb.Model): """Model for each running/completed message recall task.""" owner_email = ndb.StringProperty(required=True) message_criteria = ndb.StringProperty(required=True) domain = ndb.ComputedProperty(lambda self: self.owner_email.split('@')[1]) start_datetime = ndb.DateTimeProperty(required=True, auto_now_add=True) end_datetime = ndb.DateTimeProperty(indexed=False, auto_now=True) task_state = ndb.StringProperty(required=True, default=TASK_STARTED, choices=TASK_STATES) is_aborted = ndb.BooleanProperty(required=True, default=True) @classmethod def FetchTaskFromSafeId(cls, user_domain, task_key_urlsafe): """Utility to query and fetch a specific task from its safe id. get() is a shortcut for fetch() of one record only. Args: user_domain: String to force safety check of proper domain. task_key_urlsafe: String representation of task key safe for urls. Returns: A single task model object. """ return cls.GetQueryBySafeUrlTaskKey(user_domain, task_key_urlsafe).get() @classmethod def FetchOneUIPageOfTasksForDomain(cls, user_domain, urlsafe_cursor): """Utility to query and fetch all tasks. Args: user_domain: String to force safety check of proper domain. urlsafe_cursor: String cursor from previous fetch_page() calls. Returns: Iterable of one page of RecallTaskModel tasks. """ return cls.GetQueryForAllTasks(user_domain).fetch_page( _TASK_ROWS_FETCH_PAGE, start_cursor=Cursor(urlsafe=urlsafe_cursor)) @classmethod def GetTaskByKey(cls, task_key_id): """Helper to retrieve a task entity using its key. Args: task_key_id: String (serializable) unique id of the task record. Returns: The RecallTaskModel entity or None. """ retries = 0 task = None while not task and retries < _GET_ENTITY_RETRIES: task = cls.get_by_id(int(task_key_id)) if task: return task time.sleep(2**retries) retries += 1 raise recall_errors.MessageRecallDataError( 'Cannot locate RecallTaskModel id=%s.', task_key_id) @classmethod def GetQueryBySafeUrlTaskKey(cls, user_domain, task_key_urlsafe): """Prepare a Query object to retrieve one specific task. Args: user_domain: String to force safety check of proper domain. task_key_urlsafe: String representation of task key safe for urls. Returns: Query object filtered to one specific task. """ return cls.query(cls.domain == user_domain, cls.key == ndb.Key(urlsafe=task_key_urlsafe)) @classmethod def GetQueryForAllTasks(cls, user_domain): """Prepare a Query object to retrieve all tasks for a domain. Args: user_domain: String to force safety check of proper domain. Returns: Query object filtered to all tasks for a domain. """ return cls.query(cls.domain == user_domain).order(-cls.start_datetime) @classmethod def SetTaskState(cls, task_key_id, new_state, is_aborted=True): """Utility method to update the state of the master task record. Args: task_key_id: String (serializable) unique id of the task record. new_state: String update for the ndb StringProperty field. is_aborted: Boolean; False when performing final update. """ task = cls.GetTaskByKey(task_key_id) if task: if task.task_state != TASK_DONE: task.task_state = new_state if new_state == TASK_DONE: _LOG.warning('RecallTaskModel id=%s Done.', task_key_id) task.is_aborted = is_aborted task.put() def GetErrorReasonCountForTask(self): """Count the #error reasons associated with the current task. Returns: Integer number of error reasons associated with the current task. """ return error_reason.ErrorReasonModel.GetErrorReasonCountForTask( task_key_id=self.key.id()) def GetQueryForAllTaskUsers(self, user_state_filters=None, message_state_filters=None): """Prepare a Query object to retrieve all users for a specific task. Args: user_state_filters: List of strings to filter users from the USER_STATES list in domain_user.py. e.g. ['Done', 'Suspended'] message_state_filters: List of strings to filter users from the MESSAGE_STATES list in domain_user.py. e.g. ['Found', 'Purged'] Returns: Query object filtered to all users in a specific task. """ return domain_user.DomainUserToCheckModel.GetQueryForAllTaskUsers( task_key_id=self.key.id(), user_state_filters=user_state_filters, message_state_filters=message_state_filters) def GetUserCountForTask(self, user_state_filters=None, message_state_filters=None): """Count the #users associated with the current task and message state. The states may be empty to count all users or the states may have elements to count users in a particular state. Args: user_state_filters: List of strings to filter users from the USER_STATES list in domain_user.py. e.g. ['Done', 'Suspended'] message_state_filters: List of strings to filter users from the MESSAGE_STATES list in domain_user.py. e.g. ['Found', 'Purged'] Returns: Integer number of users associated with the current task with the supplied message states. """ return domain_user.DomainUserToCheckModel.GetUserCountForTask( task_key_id=self.key.id(), user_state_filters=user_state_filters, message_state_filters=message_state_filters) def GetUserCountForTaskWithTerminalUserStates(self): """Helper to count users who have completed processing. Returns: Integer number of users associated with the current task with terminal user states. """ return (domain_user.DomainUserToCheckModel. GetUserCountForTaskWithTerminalUserStates( task_key_id=self.key.id())) def AmIAborted(self): return self.is_aborted and (self.task_state == TASK_DONE) @classmethod def IsTaskAborted(cls, task_key_id): """Convenience method to check if another task aborted the recall. Args: task_key_id: key id of the RecallTask model object for this recall. Returns: True if task found and aborted is True else False. """ task = cls.GetTaskByKey(task_key_id=task_key_id) return task.is_aborted and (task.task_state == TASK_DONE)
class School(ndb.Model): name = ndb.StringProperty(required=True) facility = ndb.StringProperty(required=True)
class SentEmailModel(base_models.BaseModel): """Records the content and metadata of an email sent from Oppia. This model is read-only; entries cannot be modified once created. The id/key of instances of this class has the form [INTENT].[random hash]. """ # TODO(sll): Implement functionality to get all emails sent to a particular # user with a given intent within a given time period. # The user ID of the email recipient. recipient_id = ndb.StringProperty(required=True, indexed=True) # The email address of the recipient. recipient_email = ndb.StringProperty(required=True) # The user ID of the email sender. For site-generated emails this is equal # to feconf.SYSTEM_COMMITTER_ID. sender_id = ndb.StringProperty(required=True) # The email address used to send the notification. sender_email = ndb.StringProperty(required=True) # The intent of the email. intent = ndb.StringProperty( required=True, indexed=True, choices=[ feconf.EMAIL_INTENT_SIGNUP, feconf.EMAIL_INTENT_MARKETING, feconf.EMAIL_INTENT_DAILY_BATCH, feconf.EMAIL_INTENT_EDITOR_ROLE_NOTIFICATION, feconf.EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION, feconf.EMAIL_INTENT_SUBSCRIPTION_NOTIFICATION, feconf.EMAIL_INTENT_SUGGESTION_NOTIFICATION, feconf.EMAIL_INTENT_PUBLICIZE_EXPLORATION, feconf.EMAIL_INTENT_UNPUBLISH_EXPLORATION, feconf.EMAIL_INTENT_DELETE_EXPLORATION, feconf.EMAIL_INTENT_REPORT_BAD_CONTENT, feconf.EMAIL_INTENT_QUERY_STATUS_NOTIFICATION, feconf.BULK_EMAIL_INTENT_TEST ]) # The subject line of the email. subject = ndb.TextProperty(required=True) # The HTML content of the email body. html_body = ndb.TextProperty(required=True) # The datetime the email was sent, in UTC. sent_datetime = ndb.DateTimeProperty(required=True, indexed=True) # The hash of the recipient id, email subject and message body. email_hash = ndb.StringProperty(indexed=True) @classmethod def _generate_id(cls, intent): id_prefix = '%s.' % intent for _ in range(base_models.MAX_RETRIES): new_id = '%s.%s' % ( id_prefix, utils.convert_to_hash( str(utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH)) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for SentEmailModel is producing too many ' 'collisions.') @classmethod def create(cls, recipient_id, recipient_email, sender_id, sender_email, intent, subject, html_body, sent_datetime): """Creates a new SentEmailModel entry.""" instance_id = cls._generate_id(intent) email_model_instance = cls(id=instance_id, recipient_id=recipient_id, recipient_email=recipient_email, sender_id=sender_id, sender_email=sender_email, intent=intent, subject=subject, html_body=html_body, sent_datetime=sent_datetime) email_model_instance.put() def put(self): email_hash = self._generate_hash(self.recipient_id, self.subject, self.html_body) self.email_hash = email_hash super(SentEmailModel, self).put() @classmethod def get_by_hash(cls, email_hash, sent_datetime_lower_bound=None): """Returns all messages with a given email_hash. This also takes an optional sent_datetime_lower_bound argument, which is a datetime instance. If this is given, only SentEmailModel instances sent after sent_datetime_lower_bound should be returned. """ if sent_datetime_lower_bound is not None: if not isinstance(sent_datetime_lower_bound, datetime.datetime): raise Exception('Expected datetime, received %s of type %s' % (sent_datetime_lower_bound, type(sent_datetime_lower_bound))) query = cls.query().filter(cls.email_hash == email_hash) if sent_datetime_lower_bound is not None: query = query.filter(cls.sent_datetime > sent_datetime_lower_bound) messages = query.fetch() return messages @classmethod def _generate_hash(cls, recipient_id, email_subject, email_body): """Generate hash for a given recipient_id, email_subject and cleaned email_body. """ hash_value = utils.convert_to_hash( recipient_id + email_subject + email_body, 100) return hash_value @classmethod def check_duplicate_message(cls, recipient_id, email_subject, email_body): """Check for a given recipient_id, email_subject and cleaned email_body, whether a similar message has been sent in the last DUPLICATE_EMAIL_INTERVAL_MINS. """ email_hash = cls._generate_hash(recipient_id, email_subject, email_body) datetime_now = datetime.datetime.utcnow() time_interval = datetime.timedelta( minutes=feconf.DUPLICATE_EMAIL_INTERVAL_MINS) sent_datetime_lower_bound = datetime_now - time_interval messages = cls.get_by_hash( email_hash, sent_datetime_lower_bound=sent_datetime_lower_bound) for message in messages: if (message.recipient_id == recipient_id and message.subject == email_subject and message.html_body == email_body): return True return False
class Event(ndb.Model): activity = ndb.StringProperty(required=True) location = ndb.StringProperty(required=True) timeDate = ndb.DateTimeProperty(required=True) creator = ndb.StringProperty(required=True) attendees = ndb.KeyProperty(kind=Profile, repeated=True)
class Profile(ndb.Model): name = ndb.StringProperty(required=True) email = ndb.StringProperty(required=True) classYear = ndb.StringProperty(required=True)
class UsNyInmate(Inmate): us_ny_inmate_id = ndb.StringProperty()
class UserPost(ndb.Model): post_date = ndb.DateTimeProperty() post_file = ndb.BlobKeyProperty() post_caption = ndb.StringProperty() post_user = ndb.StringProperty()
class Invite(ndb.Model): '''A database entry representing a single user.''' email = ndb.StringProperty() event_key = ndb.KeyProperty(Event)
class Rooms(ndb.Model): user1 = User.key #User2 = user.key comments = ndb.StringProperty()
class UserCalender(ndb.Model): avaliableTimes = ndb.StringProperty() email = ndb.StringProperty() date = ndb.StringProperty()
class Users(auth_models.User): email = ndb.StringProperty() name = ndb.StringProperty() Sensors = ndb.StringProperty(repeated=True) locations = ndb.StringProperty(repeated=True) sublocations = ndb.StringProperty(repeated=True)
class User(ndb.Model): email = ndb.StringProperty()
class Dataset(StorageObject): """Generic text data files.""" # Anyone owning the parent also owns the dataset. A null parent id means the # dataset is only accessible by super admins. parent_id = ndb.StringProperty() # Makes it easier to query for datasets with no parent. has_parent = ndb.ComputedProperty(lambda self: bool(self.parent_id)) allowed_content_types = { 'application/json': lambda d: json.dumps(d), 'text/csv': lambda d: d.encode('utf-8'), } @classmethod def property_types(klass): # Tell the api to accept boolean query params for this computed prop. types = super(klass, klass).property_types() types['has_parent'] = bool return types @classmethod def create(klass, filename, data, content_type, parent_id=None, **kwargs): if content_type not in klass.allowed_content_types: raise Exception("Forbidden content type: {}".format(content_type)) ds = super(klass, klass).create(filename=filename, content_type=content_type, parent_id=parent_id, **kwargs) ds.data = data # will write this to gcs on put; not for datastore ds.gcs_path = '/{bucket}{namespace}/{file_hash}'.format( bucket=app_identity.get_application_id() + '-datasets', namespace=os.environ['GCS_UPLOAD_PREFIX'], file_hash=hashlib.md5(Dataset._dumps(ds)).hexdigest(), ) return ds @classmethod def _dumps(klass, dataset): return klass.allowed_content_types[dataset.content_type](dataset.data) def before_put(self, *args, **kwargs): if not hasattr(self, 'data'): # This property only exists immediately after creation. Putting an # entity which was loaded from the db won't have it, so do nothing. return open_kwargs = { 'content_type': self.content_type, 'retry_params': gcs.RetryParams(backoff_factor=1.1), 'options': { 'Content-Disposition': 'attachment; filename={}'.format(self.filename), # Theoretically allows figuring out an attachment history for # a given task. 'x-goog-meta-dataset-id': self.uid, } } with gcs.open(self.gcs_path, 'w', **open_kwargs) as gcs_file: gcs_file.write(Dataset._dumps(self)) # Grab the size so it can be saved on the entity. self.size = gcs_file.tell()
class _TaskResultCommon(ndb.Model): """Contains properties that is common to both TaskRunResult and TaskResultSummary. It is not meant to be instantiated on its own. TODO(maruel): Overhaul this entity: - Get rid of TaskOutput as it is not needed anymore (?) """ # Bot that ran this task. bot_id = ndb.StringProperty() # Bot version (as a hash) of the code running the task. bot_version = ndb.StringProperty() # Bot dimensions at the moment the bot reaped the task. Not set for old tasks. bot_dimensions = datastore_utils.DeterministicJsonProperty(json_type=dict, compressed=True) # Active server version(s). Note that during execution, the active server # version may have changed, this list will list all versions seen as the task # was updated. server_versions = ndb.StringProperty(repeated=True) # This entity is updated everytime the bot sends data so it is equivalent to # 'last_ping'. modified_ts = ndb.DateTimeProperty() # Records that the task failed, e.g. one process had a non-zero exit code. The # task may be retried if desired to weed out flakiness. failure = ndb.ComputedProperty(_calculate_failure) # Internal infrastructure failure, in which case the task should be retried # automatically if possible. internal_failure = ndb.BooleanProperty(default=False) # Number of TaskOutputChunk entities for the output. stdout_chunks = ndb.IntegerProperty(indexed=False) # Process exit code. exit_code = ndb.IntegerProperty(indexed=False, name='exit_codes') # Task duration in seconds as seen by the process who started the child task, # excluding all overheads. If the task was not isolated, this is the value # returned by task_runner. If the task was isolated, this is the value # returned by run_isolated. duration = ndb.FloatProperty(indexed=False, name='durations') # Time when a bot reaped this task. started_ts = ndb.DateTimeProperty() # Time when the bot completed the task. Note that if the job was improperly # handled, for example state is BOT_DIED, abandoned_ts is used instead of # completed_ts. completed_ts = ndb.DateTimeProperty() abandoned_ts = ndb.DateTimeProperty() # Children tasks that were triggered by this task. This is set when the task # reentrantly creates other Swarming tasks. Note that the task_id is to a # TaskResultSummary. children_task_ids = ndb.StringProperty(validator=_validate_task_summary_id, repeated=True) # File outputs of the task. Only set if TaskRequest.properties.sources_ref is # set. The isolateserver and namespace should match. outputs_ref = ndb.LocalStructuredProperty(task_request.FilesRef) @property def can_be_canceled(self): """Returns True if the task is in a state that can be canceled.""" # TOOD(maruel): To be able to add State.RUNNING, the following must be done: # task_scheduler.cancel_task() must be strictly a transaction relative to # task_scheduler.bot_kill_task() and task_scheduler.bot_update_task(). # # The tricky part is to keep this code performant. On the other hand, all # the entities under the transaction (TaskToRun, TaskResultSummary and # TaskRunResult) are under the same entity root, so it's definitely # feasible, likely using a transaction is not a problem in practice. The # important part would be to ensure that TaskOuputChunks are not also stored # as part of the transaction, since they do not need to. # https://code.google.com/p/swarming/issues/detail?id=62 return self.state == State.PENDING @property def duration_as_seen_by_server(self): """Returns the timedelta the task spent executing, including server<->bot communication overhead. This is the task duration as seen by the server, not by the bot. Task abandoned or not yet completed are not applicable and return None. """ if not self.started_ts or not self.completed_ts: return None return self.completed_ts - self.started_ts def duration_now(self, now): """Returns the timedelta the task spent executing as of now, including overhead while running but excluding overhead after running.. """ if self.duration is not None: return datetime.timedelta(seconds=self.duration) if not self.started_ts or self.abandoned_ts: return None return (self.completed_ts or now) - self.started_ts @property def ended_ts(self): return self.completed_ts or self.abandoned_ts @property def is_exceptional(self): """Returns True if the task is in an exceptional state. Mostly for html view. """ return self.state in State.STATES_EXCEPTIONAL @property def is_pending(self): """Returns True if the task is still pending. Mostly for html view.""" return self.state == State.PENDING @property def is_running(self): """Returns True if the task is still pending. Mostly for html view.""" return self.state == State.RUNNING @property def performance_stats(self): """Returns the PerformanceStats associated with this task results. Returns an empty instance if none is available. """ # Keeps a cache. It's still paying the full latency cost of a DB fetch. if not hasattr(self, '_performance_stats_cache'): key = None if self.deduped_from else self.performance_stats_key # pylint: disable=attribute-defined-outside-init self._performance_stats_cache = ( (key.get() if key else None) or PerformanceStats(isolated_download=IsolatedOperation(), isolated_upload=IsolatedOperation())) return self._performance_stats_cache @property def overhead_isolated_inputs(self): """Returns the overhead from isolated setup in timedelta.""" perf = self.performance_stats if perf.isolated_download.duration is not None: return datetime.timedelta(seconds=perf.isolated_download.duration) @property def overhead_isolated_outputs(self): """Returns the overhead from isolated results upload in timedelta.""" perf = self.performance_stats if perf.isolated_upload.duration is not None: return datetime.timedelta(seconds=perf.isolated_upload.duration) @property def overhead_server(self): """Returns the overhead from server<->bot communication in timedelta.""" perf = self.performance_stats if perf.bot_overhead is not None: duration = self.duration + perf.bot_overhead duration += (perf.isolated_download.duration or 0) duration += (perf.isolated_upload.duration or 0) out = (self.duration_as_seen_by_server - datetime.timedelta(seconds=duration)) if out.total_seconds() >= 0: return out @property def overhead_task_runner(self): """Returns the overhead from task_runner in timedelta, excluding isolated overhead. This is purely bookeeping type of overhead. """ perf = self.performance_stats if perf.bot_overhead is not None: return datetime.timedelta(seconds=perf.bot_overhead) @property def pending(self): """Returns the timedelta the task spent pending to be scheduled. Returns None if not started yet or if the task was deduped from another one. """ if not self.deduped_from and self.started_ts: return self.started_ts - self.created_ts return None def pending_now(self, now): """Returns the timedelta the task spent pending to be scheduled as of now. Similar to .pending except that its return value is not deterministic. """ if self.deduped_from: return None return (self.started_ts or now) - self.created_ts @property def request(self): """Returns the TaskRequest that is related to this entity.""" # Keeps a cache. It's still paying the full latency cost of a DB fetch. if not hasattr(self, '_request_cache'): # pylint: disable=attribute-defined-outside-init self._request_cache = self.request_key.get() return self._request_cache @property def run_result_key(self): """Returns the active TaskRunResult key.""" raise NotImplementedError() def to_string(self): return state_to_string(self) def to_dict(self): out = super(_TaskResultCommon, self).to_dict() # stdout_chunks is an implementation detail. out.pop('stdout_chunks') out['id'] = self.task_id return out def signal_server_version(self, server_version): """Adds `server_version` to self.server_versions if relevant.""" if not self.server_versions or self.server_versions[ -1] != server_version: self.server_versions.append(server_version) def get_output(self): """Returns the output, either as str or None if no output is present.""" return self.get_output_async().get_result() @ndb.tasklet def get_output_async(self): """Returns the stdout as a ndb.Future. Use out.get_result() to get the data as a str or None if no output is present. """ if not self.run_result_key or not self.stdout_chunks: # The task was not reaped or no output was streamed for this index yet. raise ndb.Return(None) output_key = _run_result_key_to_output_key(self.run_result_key) out = yield TaskOutput.get_output_async(output_key, self.stdout_chunks) raise ndb.Return(out) def _pre_put_hook(self): """Use extra validation that cannot be validated throught 'validator'.""" super(_TaskResultCommon, self)._pre_put_hook() if self.state == State.EXPIRED: if self.failure or self.exit_code is not None: raise datastore_errors.BadValueError( 'Unexpected State, a task can\'t fail if it hasn\'t started yet' ) if self.state == State.TIMED_OUT and not self.failure: raise datastore_errors.BadValueError( 'Timeout implies task failure') if not self.modified_ts: raise datastore_errors.BadValueError('Must update .modified_ts') if (self.duration is None) != (self.exit_code is None): raise datastore_errors.BadValueError( 'duration and exit_code must both be None or not None') if self.state in State.STATES_DONE: if self.duration is None: raise datastore_errors.BadValueError( 'duration and exit_code must be set with state %s' % State.to_string(self.state)) elif self.state != State.BOT_DIED: # With BOT_DIED, it can be either ways. if self.duration is not None: raise datastore_errors.BadValueError( 'duration and exit_code must not be set with state %s' % State.to_string(self.state)) if self.deduped_from: if self.state != State.COMPLETED: raise datastore_errors.BadValueError( 'state must be COMPLETED on deduped task') if self.failure: raise datastore_errors.BadValueError( 'failure can\'t be True on deduped task') self.children_task_ids = sorted(set(self.children_task_ids), key=lambda x: int(x, 16)) @classmethod def _properties_fixed(cls): """Returns all properties with their member name, excluding computed properties. """ return [ prop._code_name for prop in cls._properties.itervalues() if not isinstance(prop, ndb.ComputedProperty) ]
class HistoryMark(ndb.Model): created = ndb.DateProperty(auto_now_add=True) updated = ndb.DateProperty(auto_now=True) sort_index = ndb.IntegerProperty(default=0) category = ndb.IntegerProperty() available = ndb.BooleanProperty(default=False) name = ndb.StringProperty(required=True) start = ndb.DateProperty(required=True) end = ndb.DateProperty(required=True) image = ndb.StringProperty(default="") description = ndb.StringProperty(default="") text = ndb.KeyProperty(kind=Text) test = ndb.KeyProperty(kind=Test) country = ndb.KeyProperty(required=True, kind=HistoryCountry) period = ndb.KeyProperty() # kind=HistoryPeriod ## parent event = ndb.KeyProperty() # kind=HistoryEvent ## parent person = ndb.KeyProperty() # kind=HistoryPerson ## parent group_title = ndb.StringProperty(default=DEFAULT_GROUP_TITLE) dependencies = ndb.KeyProperty(repeated=True) # kind=self @classmethod def get_new_marks(cls, country, amount, timestamp): return cls.query( cls.country == country.key, #cls.created < datetime.fromtimestamp(timestamp), cls.available == True).order(-cls.created).fetch(amount) @classmethod def get_by_mark(cls, mark, consider_avail=True): if mark.category == PERSON_CATEGORY: return cls.get_by_person(mark, consider_avail) elif mark.category == EVENT_CATEGORY: return cls.get_by_event(mark, consider_avail) elif mark.category == PERIOD_CATEGORY: res = cls.get_by_period(mark, consider_avail) return res @classmethod def get_by_period(cls, period, consider_avail=True): response = [] for mark in cls.query(cls.period == period.key).fetch(): if not consider_avail or mark.available: response.append(mark) return response @classmethod def get_by_event(cls, event, consider_avail=True): response = [] for mark in cls.query(cls.event == event.key).fetch(): if not consider_avail or mark.available: response.append(mark) return response @classmethod def get_by_person(cls, person, consider_avail=True): response = [] for mark in cls.query(cls.person == person.key).fetch(): if not consider_avail or mark.available: response.append(mark) return response def get_count(self): from models.video import YoutubeVideo result = 1 if self.test.get().max_questions > 0 else 0 result += sum(mark.get_count() for mark in HistoryPerson.get_by_mark(self)) result += sum(mark.get_count() for mark in HistoryEvent.get_by_mark(self)) result += sum(video.get_count() for video in YoutubeVideo.get_by_mark(self)) return result def get_period(self): if self.category == PERIOD_CATEGORY: return self mark = self while mark.period is None: if mark.person: mark = mark.person.get() elif mark.event: mark = mark.event.get() if mark.period: return mark.period return None def dict(self): from methods.mapping import get_year_title, timestamp return { 'id': str(self.key.id()), 'category': str(self.category), 'created': str(timestamp(self.created)), 'name': self.name, 'image': self.image, 'description': self.description, 'group_title': self.group_title, 'year_title': get_year_title(self.start, self.end), }
class TaskResultSummary(_TaskResultCommon): """Represents the overall result of a task. Parent is a TaskRequest. Key id is always 1. This includes the relevant result taking in account all tries. This entity is basically a cache plus a bunch of indexes to speed up common queries. It's primary purpose is for status pages listing all the active tasks or recently completed tasks. """ # These properties are directly copied from TaskRequest. They are only copied # here to simplify searches with the Web UI and to enable DB queries based on # both user and results properties (e.g. all requests from X which succeeded). # They are immutable. # TODO(maruel): Investigate what is worth copying over. created_ts = ndb.DateTimeProperty(required=True) name = ndb.StringProperty() user = ndb.StringProperty() tags = ndb.StringProperty(repeated=True) # Value of TaskRequest.properties.properties_hash only when these conditions # are met: # - TaskRequest.properties.idempotent is True # - self.state == State.COMPLETED # - self.failure == False # - self.internal_failure == False properties_hash = ndb.BlobProperty(indexed=True) # State of this task. The value from TaskRunResult will be copied over. state = StateProperty(default=State.PENDING) # Represent the last try attempt of the task. Starts at 1 EXCEPT when the # results were deduped, in this case it's 0. try_number = ndb.IntegerProperty() # Effective cost of this task for each try. Use self.cost_usd for the sum. # It's empty on deduped task, since nothing was executed. costs_usd = ndb.FloatProperty(repeated=True, indexed=False) # Cost saved for deduped task. This is the value of TaskResultSummary.cost_usd # from self.deduped_from. cost_saved_usd = ndb.FloatProperty(indexed=False) # Set to the task run id when the task result was retrieved from another task. # A task run id is a reference to a TaskRunResult generated via # pack_run_result_key(). The reason so store the packed version instead of # KeyProperty is that it's much shorter and it futureproofing refactoring of # the entity hierarchy. # # Note that when it's set, there's no TaskRunResult child since there was no # run. deduped_from = ndb.StringProperty(indexed=False) @property def cost_usd(self): """Returns the sum of the cost of each try.""" return sum(self.costs_usd) if self.costs_usd else 0. @property def performance_stats_key(self): key = self.run_result_key if key: return task_pack.run_result_key_to_performance_stats_key(key) @property def request_key(self): """Returns the TaskRequest ndb.Key that is related to this entity.""" return task_pack.result_summary_key_to_request_key(self.key) @property def run_result_key(self): if self.deduped_from: # Return the run results for the original task. return task_pack.unpack_run_result_key(self.deduped_from) if not self.try_number: return None return task_pack.result_summary_key_to_run_result_key( self.key, self.try_number) @property def task_id(self): return task_pack.pack_result_summary_key(self.key) def reset_to_pending(self): """Resets this entity to pending state.""" self.duration = None self.exit_code = None self.internal_failure = False self.outputs_ref = None self.started_ts = None self.state = State.PENDING def set_from_run_result(self, run_result, request): """Copies all the relevant properties from a TaskRunResult into this TaskResultSummary. If the task completed, succeeded and is idempotent, self.properties_hash is set. """ assert isinstance(run_result, TaskRunResult), run_result for property_name in _TaskResultCommon._properties_fixed(): setattr(self, property_name, getattr(run_result, property_name)) # Include explicit support for 'state' and 'try_number'. TaskRunResult.state # is a ComputedProperty so it can't be copied as-is, and try_number is a # generated property. # pylint: disable=W0201 self.state = run_result.state self.try_number = run_result.try_number while len(self.costs_usd) < run_result.try_number: self.costs_usd.append(0.) self.costs_usd[run_result.try_number - 1] = run_result.cost_usd if (self.state == State.COMPLETED and not self.failure and not self.internal_failure and request.properties.idempotent and not self.deduped_from): # Signal the results are valid and can be reused. self.properties_hash = request.properties.properties_hash assert self.properties_hash def need_update_from_run_result(self, run_result): """Returns True if set_from_run_result() would modify this instance. E.g. they are different and TaskResultSummary needs to be updated from the corresponding TaskRunResult. """ assert isinstance(run_result, TaskRunResult), run_result # A previous try is still sending update. Ignore it from a result summary # PoV. if self.try_number and self.try_number > run_result.try_number: return False for property_name in _TaskResultCommon._properties_fixed(): if getattr(self, property_name) != getattr(run_result, property_name): return True # Include explicit support for 'state' and 'try_number'. TaskRunResult.state # is a ComputedProperty so it can't be copied as-is, and try_number is a # generated property. # pylint: disable=W0201 return (self.state != run_result.state or self.try_number != run_result.try_number) def to_dict(self): out = super(TaskResultSummary, self).to_dict() if out['properties_hash']: out['properties_hash'] = out['properties_hash'].encode('hex') return out
class MarketplaceModel(ndb.Model): """Entidade marketplace (loja) do usuário""" name = ndb.StringProperty(required=True, indexed=False) created_date = ndb.DateTimeProperty(auto_now_add=True)
class Chatroom(ndb.Model): """Sub model for representing an Chatroom.""" channel = ndb.StringProperty(indexed=True) created = ndb.DateTimeProperty(auto_now_add=True)
class Message(ndb.Model): body = ndb.StringProperty() created = ndb.DateTimeProperty(auto_now_add=True)
class Dream(ndb.Model): dream_text = ndb.StringProperty(required=True) sentiment = ndb.StringProperty(required=True)
class BotInfo(_BotCommon): """This entity declare the knowledge about a bot that successfully connected. Parent is BotRoot. Key id is 'info'. This entity is a cache of the last BotEvent and is additionally updated on poll, which does not create a BotEvent. """ # One of: NOT_IN_MAINTENANCE = 1 << 9 # 512 IN_MAINTENANCE = 1 << 8 # 256 # One of: ALIVE = 1 << 7 # 128 DEAD = 1 << 6 # 64 # One of: HEALTHY = 1 << 3 # 8 QUARANTINED = 1 << 2 # 4 # One of: IDLE = 1 << 1 # 2 BUSY = 1 << 0 # 1 # First time this bot was seen. first_seen_ts = ndb.DateTimeProperty(auto_now_add=True, indexed=False) # Must only be set when self.task_id is set. task_name = ndb.StringProperty(indexed=False) # Avoid having huge amounts of indices to query by quarantined/idle. composite = ndb.IntegerProperty(repeated=True) def _calc_composite(self): """Returns the value for BotInfo.composite, which permits quick searches.""" return [ self.IN_MAINTENANCE if self.maintenance_msg else self.NOT_IN_MAINTENANCE, self.DEAD if self.should_be_dead else self.ALIVE, self.QUARANTINED if self.quarantined else self.HEALTHY, self.BUSY if self.task_id else self.IDLE ] @property def should_be_dead(self): # check if the last seen is over deadline return self.last_seen_ts <= self._deadline() @property def is_dead(self): assert self.composite, 'Please store first' return self.DEAD in self.composite @property def is_alive(self): assert self.composite, 'Please store first' return self.ALIVE in self.composite def to_dict(self, exclude=None): out = super(BotInfo, self).to_dict(exclude=exclude) # Inject the bot id, since it's the entity key. out['id'] = self.id out['is_dead'] = self.is_dead return out def to_proto(self, out): """Converts self to a swarming_pb2.Bot.""" # This populates most of the data. super(BotInfo, self).to_proto(out) # https://crbug.com/757931: QUARANTINED_BY_SERVER # https://crbug.com/870723: OVERHEAD_BOT_INTERNAL # https://crbug.com/870723: HOST_REBOOTING # https://crbug.com/913978: RESERVED # TODO(maruel): Populate bot.info.host and bot.info.devices. # https://crbug.com/916570 def _pre_put_hook(self): super(BotInfo, self)._pre_put_hook() if not self.task_id: self.task_name = None self.composite = self._calc_composite() @classmethod def yield_dead_bots(cls): """Yields bots who should be dead.""" return cls.query(cls.last_seen_ts <= cls._deadline()) @staticmethod def _deadline(): dt = datetime.timedelta( seconds=config.settings().bot_death_timeout_secs) return utils.utcnow() - dt
class CommunityPosts(ndb.Model): createdBy = ndb.StringProperty() createdAt = ndb.DateTimeProperty() updatedBy = ndb.StringProperty() updatedAt = ndb.DateTimeProperty() content = ndb.StringProperty()