class College(ndb.Model): university_key = ndb.KeyProperty() #university_key name = ndb.StringProperty() departments = ndb.KeyProperty() created_by = ndb.StringProperty()
class Comment(ndb.Model): text = ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True) note_key = ndb.KeyProperty(kind=Note)
class Event(ndb.Model): """ Events represent FIRST Robotics Competition events, both official and unofficial. key_name is like '2010ct' """ name = ndb.StringProperty() event_type_enum = ndb.IntegerProperty(required=True) short_name = ndb.StringProperty( indexed=False ) # Should not contain "Regional" or "Division", like "Hartford" event_short = ndb.StringProperty( required=True, indexed=False) # Smaller abbreviation like "CT" first_code = ndb.StringProperty( ) # Event code used in FIRST's API, if different from event_short year = ndb.IntegerProperty(required=True) event_district_enum = ndb.IntegerProperty( default=DistrictType.NO_DISTRICT ) # Deprecated, use district_key instead district_key = ndb.KeyProperty(kind=District) start_date = ndb.DateTimeProperty() end_date = ndb.DateTimeProperty() playoff_type = ndb.IntegerProperty() # venue, venue_addresss, city, state_prov, country, and postalcode are from FIRST venue = ndb.StringProperty(indexed=False) # Name of the event venue venue_address = ndb.StringProperty( indexed=False ) # Most detailed venue address (includes venue, street, and location separated by \n) city = ndb.StringProperty() # Equivalent to locality. From FRCAPI state_prov = ndb.StringProperty() # Equivalent to region. From FRCAPI country = ndb.StringProperty() # From FRCAPI postalcode = ndb.StringProperty( ) # From ElasticSearch only. String because it can be like "95126-1215" # Normalized address from the Google Maps API, constructed using the above normalized_location = ndb.StructuredProperty(Location) timezone_id = ndb.StringProperty( ) # such as 'America/Los_Angeles' or 'Asia/Jerusalem' official = ndb.BooleanProperty( default=False) # Is the event FIRST-official? first_eid = ndb.StringProperty() # from USFIRST parent_event = ndb.KeyProperty( ) # This is the division -> event champs relationship divisions = ndb.KeyProperty(repeated=True) # event champs -> all divisions facebook_eid = ndb.StringProperty(indexed=False) # from Facebook custom_hashtag = ndb.StringProperty(indexed=False) # Custom HashTag website = ndb.StringProperty(indexed=False) webcast_json = ndb.TextProperty( indexed=False ) # list of dicts, valid keys include 'type' and 'channel' enable_predictions = ndb.BooleanProperty(default=False) remap_teams = ndb.JsonProperty( ) # Map of temporary team numbers to pre-rookie and B teams created = ndb.DateTimeProperty(auto_now_add=True, indexed=False) updated = ndb.DateTimeProperty(auto_now=True, indexed=False) def __init__(self, *args, **kw): # store set of affected references referenced keys for cache clearing # keys must be model properties self._affected_references = { 'key': set(), 'year': set(), 'district_key': set() } self._awards = None self._details = None self._location = None self._city_state_country = None self._matches = None self._teams = None self._venue_address_safe = None self._webcast = None self._updated_attrs = [ ] # Used in EventManipulator to track what changed self._week = None super(Event, self).__init__(*args, **kw) @ndb.tasklet def get_awards_async(self): from database import award_query self._awards = yield award_query.EventAwardsQuery( self.key_name).fetch_async() @property def alliance_selections(self): if self.details is None: return None else: return self.details.alliance_selections @property def alliance_teams(self): """ Load a list of team keys playing in elims """ alliances = self.alliance_selections if alliances is None: return [] teams = [] for alliance in alliances: for pick in alliance['picks']: teams.append(pick) return teams @property def awards(self): if self._awards is None: self.get_awards_async().wait() return self._awards @property def details(self): if self._details is None: self._details = EventDetails.get_by_id(self.key.id()) elif type(self._details) == Future: self._details = self._details.get_result() return self._details def prep_details(self): if self._details is None: self._details = ndb.Key(EventDetails, self.key.id()).get_async() @property def district_points(self): if self.details is None: return None else: return self.details.district_points @ndb.tasklet def get_matches_async(self): if self._matches is None: from database import match_query self._matches = yield match_query.EventMatchesQuery( self.key_name).fetch_async() def prep_matches(self): if self._matches is None: from database import match_query self._matches = match_query.EventMatchesQuery( self.key_name).fetch_async() @property def matches(self): if self._matches is None: self.get_matches_async().wait() elif type(self._matches) == Future: self._matches = self._matches.get_result() return self._matches def local_time(self): import pytz now = datetime.datetime.now() if self.timezone_id is not None: tz = pytz.timezone(self.timezone_id) try: now = now + tz.utcoffset(now) except (pytz.NonExistentTimeError, pytz.AmbiguousTimeError): # may happen during DST now = now + tz.utcoffset(now + datetime.timedelta( hours=1)) # add offset to get out of non-existant time return now def withinDays(self, negative_days_before, days_after): if not self.start_date or not self.end_date: return False now = self.local_time() after_start = self.start_date.date() + datetime.timedelta( days=negative_days_before) <= now.date() before_end = self.end_date.date() + datetime.timedelta( days=days_after) >= now.date() return (after_start and before_end) @property def now(self): if self.timezone_id is not None: return self.withinDays(0, 0) else: return self.within_a_day # overestimate what is "now" if no timezone @property def within_a_day(self): return self.withinDays(-1, 1) @property def past(self): return self.end_date.date() < self.local_time().date() and not self.now @property def future(self): return self.start_date.date() > self.local_time().date( ) and not self.now @property def starts_today(self): return self.start_date.date() == self.local_time().date() @property def ends_today(self): return self.end_date.date() == self.local_time().date() @property def week(self): """ Returns the week of the event relative to the first official season event as an integer Returns None if the event is not of type NON_CMP_EVENT_TYPES or is not official """ if self.event_type_enum not in EventType.NON_CMP_EVENT_TYPES or not self.official: return None # Cache week_start for the same context cache_key = '{}_week_start:{}'.format(self.year, ndb.get_context().__hash__()) week_start = context_cache.get(cache_key) if week_start is None: e = Event.query( Event.year == self.year, Event.event_type_enum.IN(EventType.NON_CMP_EVENT_TYPES), Event.start_date != None).order(Event.start_date).fetch( 1, projection=[Event.start_date]) if e: first_start_date = e[0].start_date diff_from_wed = (first_start_date.weekday() - 2) % 7 # 2 is Wednesday week_start = first_start_date - datetime.timedelta( days=diff_from_wed) else: week_start = None context_cache.set(cache_key, week_start) if self._week is None and week_start is not None: days = (self.start_date - week_start).days self._week = days / 7 return self._week @property def is_season_event(self): return self.event_type_enum in EventType.SEASON_EVENT_TYPES @ndb.tasklet def get_teams_async(self): from database import team_query self._teams = yield team_query.EventTeamsQuery( self.key_name).fetch_async() @property def teams(self): if self._teams is None: self.get_teams_async().wait() return self._teams @ndb.toplevel def prepAwardsMatchesTeams(self): yield self.get_awards_async(), self.get_matches_async( ), self.get_teams_async() @ndb.toplevel def prepTeams(self): yield self.get_teams_async() @ndb.toplevel def prepTeamsMatches(self): yield self.get_matches_async(), self.get_teams_async() @property def matchstats(self): if self.details is None: return None else: return self.details.matchstats @property def rankings(self): if self.details is None: return None else: return self.details.rankings @property def location(self): if self._location is None: split_location = [] if self.city: split_location.append(self.city) if self.state_prov: if self.postalcode: split_location.append(self.state_prov + ' ' + self.postalcode) else: split_location.append(self.state_prov) if self.country: split_location.append(self.country) self._location = ', '.join(split_location) return self._location @property def city_state_country(self): if not self._city_state_country and self.nl: self._city_state_country = self.nl.city_state_country if not self._city_state_country: location_parts = [] if self.city: location_parts.append(self.city) if self.state_prov: location_parts.append(self.state_prov) if self.country: country = self.country if self.country == 'US': country = 'USA' location_parts.append(country) self._city_state_country = ', '.join(location_parts) return self._city_state_country @property def nl(self): return self.normalized_location @property def venue_or_venue_from_address(self): if self.venue: return self.venue else: try: return self.venue_address.split('\r\n')[0] except: return None @property def venue_address_safe(self): """ Construct (not detailed) venue address if detailed venue address doesn't exist """ if not self.venue_address: if not self.venue or not self.location: self._venue_address_safe = None else: self._venue_address_safe = "{}\n{}".format( self.venue.encode('utf-8'), self.location.encode('utf-8')) else: self._venue_address_safe = self.venue_address.replace('\r\n', '\n') return self._venue_address_safe @property def webcast(self): """ Lazy load parsing webcast JSON """ if self._webcast is None: try: self._webcast = json.loads(self.webcast_json) except Exception, e: self._webcast = None return self._webcast
class GlobalOrganization(ndb.Model): orgKey = ndb.KeyProperty(kind="Organization") organizationNamespace = ndb.IntegerProperty()
class Employee(ndb.Model): """Profile -- User prjhklhjlkofile object""" name = ndb.StringProperty() address = ndb.StringProperty() enrollementNumber = ndb.IntegerProperty() createdBy = ndb.KeyProperty()
class ChildProcess(ndb.Model): background_job_key = ndb.KeyProperty(kind=BackgroundJob)
class WebsiteMeta(BaseModel): website = db.KeyProperty(kind="Website", required=True) meta_key = db.StringProperty(required=True) meta_value = db.StringProperty(required=True)
class Thread(ndb.Model): thread_id = ndb.IntegerProperty(required=True) drawings = ndb.KeyProperty(Drawing, repeated=True) captions = ndb.KeyProperty(Caption, repeated=True)
class BailOut(ndb.Model): thread = ndb.KeyProperty(Thread, required=True) last_edit = ndb.KeyProperty(required=True)
class Message(ndb.Model): sender = ndb.KeyProperty(kind='User', indexed=False) receiver = ndb.KeyProperty(kind='User', indexed=False) text = ndb.TextProperty() created = ndb.DateTimeProperty(auto_now_add=True)
class AppliedTag(ndb.Model): target = ndb.KeyProperty() applied_by = ndb.KeyProperty(kind='User') applied = ndb.DateTimeProperty(auto_now_add=True) tag = ndb.KeyProperty(kind=Tag)
class CustomURL(ndb.Model): ''' Describes a custom URL mapping. ''' slug = ndb.StringProperty('s', indexed=True, required=True) target = ndb.KeyProperty('t', indexed=True, required=True)
class Media(polymodel.PolyModel): ''' Describes an attachment between a Asset and a site object. ''' asset = ndb.KeyProperty('a', indexed=True, required=True) caption = ndb.StringProperty('c', indexed=True, required=False) description = ndb.TextProperty('d', indexed=False, required=False)
class User(ModelUtils, EndpointsModel, webapp2_extras.appengine.auth.models.User): """ User Base Model """ email = ndb.StringProperty(required=True) first_name = ndb.StringProperty(required=True) last_name = ndb.StringProperty(required=True) phone = ndb.StringProperty(required=True) stripeCustId = ndb.StringProperty(default=None) alias = ndb.StringProperty(default=None) appointments = ndb.KeyProperty(kind='Appointments', default=None, repeated=True) def id_setter(self, value): # Allocate IDs if DNE if value == '' or value is None or value == 'None': first, last = User.allocate_ids(2) self.UpdateFromKey(ndb.Key('User', int(first))) elif not isinstance(value, basestring) and not isinstance(value, int): raise endpoints.BadRequestException('ID not string or int') else: self.UpdateFromKey(ndb.Key('User', int(value))) @EndpointsAliasProperty(setter=id_setter, required=True) def id(self): if self.key is not None: return str(self.key.id()) def set_password(self, raw_password): """Sets the password for the current user :param raw_password: The raw password which will be hashed and stored """ self.password = security.generate_password_hash(raw_password, length=12) @classmethod def get_by_auth_token(cls, user_id, token, subject='auth'): """Returns a user object based on a user ID and token. :param user_id: The user_id of the requesting user. :param token: The token string to be verified. :returns: A tuple ``(User, timestamp)``, with a user object and the token timestamp, or ``(None, None)`` if both were not found. """ token_key = cls.token_model.get_key(user_id, subject, token) user_key = ndb.Key(cls, user_id) # Use get_multi() to save a RPC call. valid_token, user = ndb.get_multi([token_key, user_key]) if valid_token and user: timestamp = int(time.mktime(valid_token.created.timetuple())) if hasattr(user, 'force_login'): user.force_login = False user.put() return None, None else: return user, timestamp return None, None
class BuildCampOrder(ndb.Model): """Models a build camp order in the game.""" # The resource that the camp will harvest. tile_resource_key = ndb.KeyProperty(kind='TileResource')
class Edit(ndb.Model): user = ndb.KeyProperty(TeleUser, required=True) thread = ndb.KeyProperty(Thread, required=True) addition = ndb.KeyProperty(required=True)
class HarvestingCamp(ndb.Model): """Models a Harvesting Camp player structure.""" # Which resource the camp is harvesting. tile_resource_key = ndb.KeyProperty(kind='TileResource')
class InterimRecord(ndb.Model): routeKey = ndb.KeyProperty(kind=InterimRoute) recordData = ndb.StringProperty(required=True)
class BackgroundJob(ndb.Model): user_key = ndb.KeyProperty(kind=User) deployment_key = ndb.KeyProperty(kind=Deployment) job_type = ndb.IntegerProperty(indexed=True) status_message = ndb.TextProperty(indexed=True) status = ndb.IntegerProperty(indexed=True)
class Contact(model.Base): user_key = ndb.KeyProperty(kind=model.User, required=True) name = ndb.StringProperty(required=True) email = ndb.StringProperty(default='') phone = ndb.StringProperty(default='') address = ndb.StringProperty(default='')
class StudentAttendence(ndb.Model): studentKey = ndb.KeyProperty() date = ndb.DateProperty() isPresent = ndb.BooleanProperty(default=True) employeeKey = ndb.KeyProperty() when = ndb.DateTimeProperty(auto_now=True)
class Feature(ndb.Model): lawyer = ndb.KeyProperty(kind=Lawyer) case = ndb.KeyProperty(kind=Case) info = ndb.StringProperty() created = ndb.DateTimeProperty(auto_now_add=True) updated = ndb.DateTimeProperty(auto_now=True) @classmethod def save(cls, *args, **kwargs): feature_id = str(kwargs.get('id')) if feature_id and feature_id.isdigit(): feature = cls.get_by_id(int(feature_id)) else: feature = cls() lawyer_id = str(kwargs.get('lawyer')) if lawyer_id.isdigit(): lawyer_key = ndb.Key('Lawyer', int(lawyer_id)) feature.lawyer = lawyer_key case_id = str(kwargs.get('case')) if case_id.isdigit(): case_key = ndb.Key('Case', int(case_id)) feature.case = case_key if kwargs.get('info'): feature.info = kwargs.get('info') feature.put() return feature lawyer / find / lawyer - details @classmethod def allFeatureCase(cls, lawyer): feature_list = [] lawyer_id = str(lawyer) if lawyer_id.isdigit(): lawyer_key = ndb.Key('Lawyer', int(lawyer_id)) features = cls.query(cls.lawyer == lawyer_key).fetch() for feature in features: feature_list.append(feature.to_dict()) if not feature_list: feature_list = None return feature_list @classmethod def get_all_feature(cls, lawyer_id): list_of_features = [] if lawyer_id: lawyer = Lawyer.get_by_id(int(lawyer_id)) features = cls.query(cls.lawyer == lawyer.key).fetch() if features: for feature in features: list_of_features.append(feature.to_dict()) if not list_of_features: list_of_features = None return list_of_features def to_dict(self): data = {} data['info'] = self.info data['case'] = None if self.case: case = self.case.get() data['case'] = case.to_dict() return data
class Token(ndb.Model): studentKey = ndb.KeyProperty(kind=Student) employeeKey = ndb.KeyProperty(kind=Employee) tokenNumber = ndb.IntegerProperty()
class Price(ndb.Model): prediction_id = ndb.KeyProperty(kind=Prediction) date = ndb.DateTimeProperty() value = ndb.FloatProperty()
class Device(base_model.BaseModel): """Datastore model representing a device. Attributes: serial_number: str, unique serial number used to identify the device. asset_tag: str, unique org-specific identifier for the device. enrolled: bool, indicates the enrollment status of the device. device_model: int, identifies the model name of the device. due_date: datetime, the date that device is due for return. last_know_healthy: datetime, the date to indicate the last known healthy status. shelf: ndb.key, The shelf key the device is placed on. assigned_user: str, The email of the user who is assigned to the device. assignment_date: datetime, The date the device was assigned to a user. current_ou: str, The current organizational unit the device belongs to. ou_change_date: datetime, The date the organizational unit was changed. locked: bool, indicates whether or not the device is locked. lost: bool, indicates whether or not the device is lost. mark_pending_return_date: datetime, The date a user marked device returned. chrome_device_id: str, a unique device ID. last_heartbeat: datetime, the date of the last time the device checked in. damaged: bool, indicates the if the device is damaged. damaged_reason: str, A string denoting the reason for being reported as damaged. last_reminder: Reminder, Level, time, and count of the last reminder the device had. next_reminder: Reminder, Level, time, and count of the next reminder. """ serial_number = ndb.StringProperty() asset_tag = ndb.StringProperty() enrolled = ndb.BooleanProperty(default=True) device_model = ndb.StringProperty() due_date = ndb.DateTimeProperty() last_known_healthy = ndb.DateTimeProperty() shelf = ndb.KeyProperty(kind='Shelf') assigned_user = ndb.StringProperty() assignment_date = ndb.DateTimeProperty() current_ou = ndb.StringProperty() ou_changed_date = ndb.DateTimeProperty() locked = ndb.BooleanProperty(default=False) lost = ndb.BooleanProperty(default=False) mark_pending_return_date = ndb.DateTimeProperty() chrome_device_id = ndb.StringProperty() last_heartbeat = ndb.DateTimeProperty() damaged = ndb.BooleanProperty(default=False) damaged_reason = ndb.StringProperty() last_reminder = ndb.StructuredProperty(Reminder) next_reminder = ndb.StructuredProperty(Reminder) _INDEX_NAME = constants.DEVICE_INDEX_NAME _SEARCH_PARAMETERS = { 'a': 'asset_tag', 'at': 'asset_tag', 's': 'serial_number', 'sn': 'serial_number', 'u': 'assigned_user', 'au': 'assigned_user' } @property def is_assigned(self): return bool(self.assigned_user) @property def is_on_shelf(self): return bool(self.shelf) @property def overdue(self): if self.due_date: return bool(self.due_date < datetime.datetime.utcnow()) return False @property def identifier(self): return self.asset_tag or self.serial_number @property def guest_enabled(self): return self.current_ou == constants.ORG_UNIT_DICT['GUEST'] def _post_put_hook(self, future): """Overrides the _post_put_hook method.""" del future # Unused. index = Device.get_index() index.put(self.to_document()) @classmethod def list_by_user(cls, user): """Returns a list of devices assigned to a user. Args: user: str, the user's email address. Returns: A query of devices assigned to the user. """ return cls.query( ndb.AND(cls.assigned_user == user, cls.mark_pending_return_date == None)).fetch() # pylint: disable=g-equals-none,singleton-comparison @classmethod def enroll(cls, user_email, serial_number=None, asset_tag=None): """Enrolls a new device. Args: user_email: str, email address of the user making the request. serial_number: str, serial number of the device. asset_tag: str, optional, asset tag of the device. Returns: The enrolled device object. Raises: DeviceCreationError: raised when moving the device's OU fails or when the directory API responds with incomplete information or if the device is not found in the directory API. """ device_identifier_mode = config_model.Config.get( 'device_identifier_mode') if not asset_tag and device_identifier_mode in ( config_defaults.DeviceIdentifierMode.BOTH_REQUIRED, config_defaults.DeviceIdentifierMode.ASSET_TAG): raise datastore_errors.BadValueError(_ASSET_TAGS_REQUIRED_MSG) elif not serial_number and device_identifier_mode in ( config_defaults.DeviceIdentifierMode.BOTH_REQUIRED, config_defaults.DeviceIdentifierMode.SERIAL_NUMBER): raise datastore_errors.BadValueError(_SERIAL_NUMBERS_REQUIRED_MSG) directory_client = directory.DirectoryApiClient(user_email) device = cls.get(serial_number=serial_number, asset_tag=asset_tag) now = datetime.datetime.utcnow() existing_device = bool(device) if existing_device: device = _update_existing_device(device, user_email, asset_tag) else: device = cls(serial_number=serial_number, asset_tag=asset_tag) identifier = serial_number or asset_tag logging.info('Enrolling device %s', identifier) device = events.raise_event('device_enroll', device=device) if device.serial_number: serial_number = device.serial_number else: raise DeviceCreationError('No serial number for device.') if not existing_device: # If this implementation of the app can translate asset tags to serial # numbers, recheck for an existing device now that we may have the serial. if device_identifier_mode == ( config_defaults.DeviceIdentifierMode.ASSET_TAG): device_by_serial = cls.get(serial_number=serial_number) if device_by_serial: device = _update_existing_device(device_by_serial, user_email, asset_tag) existing_device = True try: # Get a Chrome OS Device object as per # https://developers.google.com/admin-sdk/directory/v1/reference/chromeosdevices directory_device_object = directory_client.get_chrome_device_by_serial( serial_number) except directory.DeviceDoesNotExistError as err: raise DeviceCreationError(str(err)) try: device.chrome_device_id = directory_device_object[ directory.DEVICE_ID] device.current_ou = directory_device_object[ directory.ORG_UNIT_PATH] device.device_model = directory_device_object[directory.MODEL] except KeyError: raise DeviceCreationError(_DIRECTORY_INFO_INCOMPLETE_MSG) try: directory_client.move_chrome_device_org_unit( device_id=directory_device_object[directory.DEVICE_ID], org_unit_path=constants.ORG_UNIT_DICT['DEFAULT']) except directory.DirectoryRPCError as err: raise DeviceCreationError( _FAILED_TO_MOVE_DEVICE_MSG % (serial_number, constants.ORG_UNIT_DICT['DEFAULT'], str(err))) device.current_ou = constants.ORG_UNIT_DICT['DEFAULT'] device.ou_changed_date = now device.last_known_healthy = now device.put() device.stream_to_bq(user_email, 'Enrolling device.') return device def unenroll(self, user_email): """Unenrolls a device, removing it from the Grab n Go program. This moves the device to the root Chrome OU, however it does not change its losr or locked attributes, nor does it unlock it if it's locked (i.e., disabled in the Directory API). Args: user_email: str, email address of the user making the request. Returns: The unenrolled device. Raises: FailedToUnenrollError: raised when moving the device's OU fails. """ unenroll_ou = config_model.Config.get('unenroll_ou') directory_client = directory.DirectoryApiClient(user_email) try: directory_client.move_chrome_device_org_unit( device_id=self.chrome_device_id, org_unit_path=unenroll_ou) except directory.DirectoryRPCError as err: raise FailedToUnenrollError( _FAILED_TO_MOVE_DEVICE_MSG % (self.identifier, unenroll_ou, str(err))) self.enrolled = False self.due_date = None self.shelf = None self.assigned_user = None self.assignment_date = None self.current_ou = unenroll_ou self.ou_changed_date = datetime.datetime.utcnow() self.mark_pending_return_date = None self.last_reminder = None self.next_reminder = None self = events.raise_event('device_unenroll', device=self) self.put() self.stream_to_bq(user_email, 'Unenrolling device.') return self @classmethod def create_unenrolled(cls, device_id, user_email): """Creates a Device but leave it unenrolled from the Grab n Go program. Args: device_id: str, a Chrome Device ID to pass to the directory API. user_email: str, email address of the user making the request. Returns: The newly created device. Raises: DeviceCreationError: if the Directory API doesn't find this device in the org or the info retrieved from the Directory API is incomplete. """ directory_client = directory.DirectoryApiClient(user_email) directory_info = directory_client.get_chrome_device(device_id) if not directory_info: raise DeviceCreationError('Device ID not found in org.') try: device = cls(serial_number=directory_info[directory.SERIAL_NUMBER], enrolled=False, device_model=directory_info.get(directory.MODEL), current_ou=directory_info[directory.ORG_UNIT_PATH], chrome_device_id=directory_info[directory.DEVICE_ID]) except KeyError: raise DeviceCreationError(_DIRECTORY_INFO_INCOMPLETE_MSG) device.put() return device @classmethod def get(cls, asset_tag=None, chrome_device_id=None, serial_number=None, unknown_identifier=None): """Retrieves a device object using one of several device identifiers. Args: asset_tag: str, the asset tag of the device. chrome_device_id: str, the Chrome device ID of a device. serial_number: str, the serial number of a device. unknown_identifier: str, either an asset tag or serial number of the device, and this function will attempt both. Returns: A device model, or None if one cannot be found. Raises: DeviceIdentifierError: if there is no device identifier supplied, or if an invalid URL-safe key is supplied. """ if asset_tag: return cls.query(cls.asset_tag == asset_tag).get() elif chrome_device_id: return cls.query(cls.chrome_device_id == chrome_device_id).get() elif serial_number: return cls.query(cls.serial_number == serial_number).get() elif unknown_identifier: return (cls.query(cls.serial_number == unknown_identifier).get() or cls.query(cls.asset_tag == unknown_identifier).get()) else: raise DeviceIdentifierError( 'No identifier supplied to get device.') def calculate_return_dates(self): """Calculates maximum and default return dates for a loan. Returns: A ReturnDates NamedTuple of datetimes. Raises: ReturnDatesCalculationError: When trying to calculate return dates for a device that has not been assigned. """ if not self.is_assigned: raise ReturnDatesCalculationError(_NOT_ASSIGNED_MSG) loan_duration = config_model.Config.get('loan_duration') max_loan_duration = config_model.Config.get('maximum_loan_duration') default_date = self.assignment_date + datetime.timedelta( days=loan_duration) max_loan_date = self.assignment_date + datetime.timedelta( days=max_loan_duration) return ReturnDates(max_loan_date, default_date) def lock(self, user_email): """Disables a device via the Directory API. Args: user_email: string, email address of the user making the request. """ logging.info('Contacting Directory to lock (disable) Device %s.', self.identifier) client = directory.DirectoryApiClient(user_email) try: client.disable_chrome_device(self.chrome_device_id) except directory.DeviceAlreadyDisabledError as err: logging.error(_ALREADY_DISABLED_MSG, err) else: self.stream_to_bq(user_email, 'Disabling device.') self.locked = True self.put() def unlock(self, user_email): """Re-enables a device via the Directory API. Args: user_email: str, email address of the user making the request. """ logging.info('Contacting Directory to unlock (re-enable) Device %s.', self.identifier) client = directory.DirectoryApiClient(user_email) client.reenable_chrome_device(self.chrome_device_id) if self.lost: self.lost = False self.locked = False self.move_to_default_ou(user_email=user_email) self.stream_to_bq(user_email, 'Re-enabling disabled device.') self.put() def loan_assign(self, user_email): """Assigns a device to a user. Args: user_email: str, email address of the user to whom the device should be assigned. Returns: The key of the datastore record. Raises: AssignmentError: if the device is not enrolled. """ if not self.enrolled: raise AssignmentError('Cannot assign an unenrolled device.') if self.assigned_user and self.assigned_user != user_email: self._loan_return(user_email) self.assigned_user = user_email self.assignment_date = datetime.datetime.utcnow() self.mark_pending_return_date = None self.shelf = None self.due_date = self.calculate_return_dates().default self.move_to_default_ou(user_email=user_email) self = events.raise_event('device_loan_assign', device=self) self.put() self.stream_to_bq(user_email, 'Beginning new loan.') return self.key def resume_loan(self, user_email, message='Resuming loan.'): """Resumes a loan if it has been marked pending return. Args: user_email: str, email address of the user initiating the resume. message: str, the optional string to stream to bigquery. """ if self.mark_pending_return_date: self.mark_pending_return_date = None self.put() self.stream_to_bq(user_email, message) def loan_resumes_if_late(self, user_email): """Resumes a loan on a device if it was marked returned, but later used. This allows a user who has marked their device returned to keep using it for the return_grace_period, but beyond that it restores the loan, with any ongoing reminders and consequences that entails. Args: user_email: str, email address of the user initiating the return. """ if self.mark_pending_return_date: time_since = datetime.datetime.utcnow( ) - self.mark_pending_return_date if time_since.total_seconds() / 60.0 > config_model.Config.get( 'return_grace_period'): self.resume_loan(user_email, message='Resuming loan, since use continued.') @validate_assignee_or_admin def loan_extend(self, user_email, extend_date_time): """Requests an extension to the provided date. Args: user_email: str, user_email of the user requesting the extension. extend_date_time: DateTime, the requested date to extend to. Raises: ExtendError: If the date is out of an acceptable range. UnassignedDeviceError: if the device is not assigned, guest mode should not be allowed. """ if not self.is_assigned: raise UnassignedDeviceError(_UNASSIGNED_DEVICE) extend_date = extend_date_time.date() if extend_date < datetime.date.today(): raise ExtendError('Extension date cannot be in the past.') return_dates = self.calculate_return_dates() if extend_date <= return_dates.max.date(): self.due_date = datetime.datetime.combine( extend_date, return_dates.default.time()) else: raise ExtendError('Extension date outside allowable date range.') self.put() self.stream_to_bq(user_email, 'Extending loan.') def _loan_return(self, user_email): """Returns a device in a loan. Args: user_email: str, user_email of the user initiating the return. Returns: The key of the datastore record. """ if self.lost: self.lost = False if self.locked: self.unlock(user_email) self.assigned_user = None self.assignment_date = None self.due_date = None self.mark_pending_return_date = None self.move_to_default_ou(user_email=user_email) self.last_reminder = None self.next_reminder = None self = events.raise_event('device_loan_return', device=self) self.put() self.stream_to_bq(user_email, 'Marking device as returned.') return self.key def record_heartbeat(self): """Records a heartbeat for a device.""" now = datetime.datetime.utcnow() self.last_heartbeat = now self.last_known_healthy = now self.put() @validate_assignee_or_admin def mark_pending_return(self, user_email): """Marks a device as returned, as reported by the user. Args: user_email: str, The email of the acting user. Raises: UnassignedDeviceError: if the device is not assigned, guest mode should not be allowed. """ if not self.is_assigned: raise UnassignedDeviceError(_UNASSIGNED_DEVICE) self.mark_pending_return_date = datetime.datetime.utcnow() self.move_to_default_ou(user_email=user_email) self.stream_to_bq(user_email, 'Marking device as Pending Return.') self.put() def set_last_reminder(self, reminder_level): """Records the last_reminder for a loaned device, overwriting existing one. Args: reminder_level: int, the level of the reminder, matching the reminder rule's reminder_level. """ count = 0 if self.last_reminder and self.last_reminder.level == reminder_level: count = self.last_reminder.count or 0 self.last_reminder = Reminder(level=reminder_level, time=datetime.datetime.utcnow(), count=count + 1) self.put() def set_next_reminder(self, reminder_level, delay_delta): """Sets the next_reminder for a loaned device, overwriting existing one. Args: reminder_level: int, the level of the reminder, matching the reminder rule's reminder_level. delay_delta: datetime.timedelta, noting time to wait until the reminder should happen, which this method will record as a UTC datetime. """ reminder_time = datetime.datetime.utcnow() + delay_delta self.next_reminder = Reminder(level=reminder_level, time=reminder_time) self.put() @validate_assignee_or_admin def mark_damaged(self, user_email, damaged_reason=None): """Marks a device as damaged. Args: user_email: string, the user that marked the device as damaged. damaged_reason: string, the reason the device is considered damaged. """ if not damaged_reason: damaged_reason = 'No reason provided' self.damaged = True self.damaged_reason = damaged_reason self.move_to_default_ou(user_email=user_email) self.stream_to_bq( user_email, 'Marking device as damaged, reason: {reason}'.format( reason=damaged_reason)) self.put() @validate_assignee_or_admin def mark_undamaged(self, user_email): """Resets a device's damaged state. Args: user_email: string, the user that is marking a device as undamaged """ self.damaged = False self.stream_to_bq(user_email, "Clearning the device's damaged state.") self.put() @validate_assignee_or_admin def mark_lost(self, user_email): """Marks a device as lost. Args: user_email: str, The email of the acting user. """ self.lost = True self.assigned_user = None self.assignment_date = None self.due_date = None self.last_reminder = None self.next_reminder = None self.move_to_default_ou(user_email=user_email) self.lock(user_email) self.stream_to_bq(user_email, 'Marking device lost and locking it.') @validate_assignee_or_admin def enable_guest_mode(self, user_email): """Moves a device into guest mode if allowed. Args: user_email: str, The email of the acting user. Raises: GuestNotAllowedError: when the allow_guest_mode config is not True. EnableGuestError: if there is an RPC error in the Directory API, or the allow_guest_mode setting is not True. UnassignedDeviceError: if the device is not assigned, guest mode should not be allowed. """ if not self.is_assigned: raise UnassignedDeviceError(_UNASSIGNED_DEVICE) if config_model.Config.get('allow_guest_mode'): directory_client = directory.DirectoryApiClient(user_email) guest_ou = constants.ORG_UNIT_DICT['GUEST'] try: directory_client.move_chrome_device_org_unit( device_id=self.chrome_device_id, org_unit_path=guest_ou) except directory.DirectoryRPCError as err: raise EnableGuestError(str(err)) else: self.current_ou = guest_ou self.ou_changed_date = datetime.datetime.utcnow() self.stream_to_bq(user_email, 'Moving device into Guest Mode.') self.put() if config_model.Config.get('timeout_guest_mode'): countdown = datetime.timedelta( hours=config_model.Config.get( 'guest_mode_timeout_in_hours')).total_seconds() deferred.defer(self._disable_guest_mode, user_email, _countdown=countdown) else: raise GuestNotAllowedError(_GUEST_MODE_DISABLED_MSG) def _disable_guest_mode(self, user_email): """Moves a device back to the default OU if still assigned. Args: user_email: str, The email of the acting user. """ if self.assigned_user == user_email: self.move_to_default_ou(user_email=user_email) self.put() def move_to_default_ou(self, user_email): """Corrects the current ou to be default during user actions. Args: user_email: str, The email of the acting user. Raises: UnableToMoveToDefaultOUError: when the directory api call fails to move the device into the default OU. """ if self.current_ou != constants.ORG_UNIT_DICT['DEFAULT']: directory_client = directory.DirectoryApiClient( user_email=user_email) try: directory_client.move_chrome_device_org_unit( device_id=self.chrome_device_id, org_unit_path=constants.ORG_UNIT_DICT['DEFAULT']) except directory.DirectoryRPCError as err: raise UnableToMoveToDefaultOUError( _FAILED_TO_MOVE_DEVICE_MSG % (self.identifier, constants.ORG_UNIT_DICT['DEFAULT'], str(err))) else: self.current_ou = constants.ORG_UNIT_DICT['DEFAULT'] self.ou_changed_date = datetime.datetime.utcnow() def device_audit_check(self): """Checks a device to make sure it passes all prechecks for audit. Raises: DeviceNotEnrolledError: when a device is not enrolled in the application. UnableToMoveToShelfError: when a deivce can not be checked into a shelf. """ if not self.enrolled: raise DeviceNotEnrolledError(DEVICE_NOT_ENROLLED_MSG % self.identifier) if self.damaged: raise UnableToMoveToShelfError(_DEVICE_DAMAGED_MSG % self.identifier) def move_to_shelf(self, shelf, user_email): """Checks a device into a shelf. Args: shelf: shelf_model.Shelf obj, the shelf to check device into. user_email: str, the email of the user taking the action. Raises: UnableToMoveToShelfError: when a deivce can not be checked into a shelf. """ if not shelf.enabled: raise UnableToMoveToShelfError( 'Unable to check device {} to shelf. Shelf {} is not ' 'active.'.format(self.identifier, shelf.location)) logging.info('Checking device %s into shelf %s.', self.identifier, shelf.location) self.shelf = shelf.key self.last_known_healthy = datetime.datetime.utcnow() self._loan_return(user_email=user_email) self.stream_to_bq( user_email, 'Placing device: {} on shelf: {}'.format(self.identifier, shelf.location)) def remove_from_shelf(self, shelf, user_email): """Removes a device's associated shelf. Args: shelf: shelf_model.Shelf obj, the shelf to remove device from. user_email: str, the email of the user taking the action. """ if self.shelf: if self.shelf.get().location is shelf.location: self.shelf = None self.put() self.stream_to_bq( user_email, 'Removing device: {} from shelf: {}'.format( self.identifier, shelf.location))
class BigQueryRow(base_model.BaseModel): """Datastore model representing a single row in BigQuery. Attributes: ndb_key: ndb.key, The key of the ndb entity being streamed to BigQuery. model_type: str, the model type being streamed to BigQuery. timestamp: datetime, the timestamp of when the action occurred. actor: str, the acting user of the action. method: str, method name performing the action. summary: str, Human-readable summary of what is occurring. entity: json, a flattened representation of the entity. streamed: bool, indicates if the data was streamed successfully. """ ndb_key = ndb.KeyProperty(required=True) model_type = ndb.StringProperty(required=True) timestamp = ndb.DateTimeProperty(required=True) actor = ndb.StringProperty(required=True) method = ndb.StringProperty(required=True) summary = ndb.StringProperty(required=True) entity = ndb.JsonProperty(required=True) streamed = ndb.BooleanProperty(default=False) @classmethod def add(cls, model_instance, timestamp, actor, method, summary): """Adds a row to the queue to be submitted to BigQuery. Args: model_instance: ndb model, the instance of the affected model. timestamp: datetime, a timestamp of when the change occurred. actor: str, user performing the action. method: str, the method name performing the action. summary: str, human-readable summary of what is occurring. Returns: The created row entity. """ row = cls( ndb_key=model_instance.key, model_type=type(model_instance).__name__, timestamp=timestamp, actor=actor, method=method, summary=summary, entity=model_instance.to_json_dict()) row.put() return row @classmethod def _fetch_unstreamed_rows(cls): """Retrieves all rows that have not been streamed.""" return cls.query(cls.streamed == False).fetch( # pylint: disable=g-explicit-bool-comparison,singleton-comparison limit=constants.BIGQUERY_ROW_MAX_BATCH_SIZE) @classmethod def _get_last_unstreamed_row(cls): """Retrieves the last row that was not streamed.""" return cls.query(cls.streamed == False).order( # pylint: disable=g-explicit-bool-comparison,singleton-comparison cls.streamed, cls.timestamp).get() @classmethod def _time_threshold_reached(cls): """Checks if the time threshold for a BigQuery stream was met.""" threshold = datetime.datetime.utcnow() - datetime.timedelta( minutes=constants.BIGQUERY_ROW_TIME_THRESHOLD) return cls._get_last_unstreamed_row().timestamp <= threshold @classmethod def _row_threshold_reached(cls): """Checks if the unstreamed row threshold for a BigQuery stream was met.""" return (cls.query(cls.streamed == False).count( # pylint: disable=g-explicit-bool-comparison,singleton-comparison limit=constants.BIGQUERY_ROW_MAX_BATCH_SIZE) >= constants.BIGQUERY_ROW_SIZE_THRESHOLD) @classmethod def threshold_reached(cls): """Determines whether or not entities should be streamed to BigQuery.""" return cls._time_threshold_reached() or cls._row_threshold_reached() @classmethod def stream_rows(cls): """Streams all unstreamed rows if a threshold has been reached.""" logging.info('Streaming rows to BigQuery.') if not cls.threshold_reached(): logging.info('Not streaming rows, thresholds not met.') return bq_client = bigquery.BigQueryClient() rows = cls._fetch_unstreamed_rows() tables = _format_for_bq(rows) try: for table_name in tables: bq_client.stream_table(table_name, tables[table_name]) except bigquery.InsertError: logging.error('Unable to stream rows.') return _set_streamed(rows)
class PhoneNumber(ndb.Model): """A model representing a phone number.""" contact = ndb.KeyProperty(Contact) phone_type = ndb.StringProperty(choices=('home', 'work', 'fax', 'mobile', 'other')) number = ndb.StringProperty()
class MoveOrder(ndb.Model): """Models a move order in the game.""" # The tile to move the unit to. destination_map_tile_key = ndb.KeyProperty(kind='MapTile')
class Board(ndb.Model): name = ndb.StringProperty() users = ndb.KeyProperty(repeated=True) created = ndb.DateTimeProperty(auto_now_add=True) updated = ndb.DateTimeProperty(auto_now=True) def to_dict(self): board = {} board['id'] = self.key.id() board['name'] = self.name board['users'] = [] for user in self.users: board['users'].append(user.id()) board['created'] = self.created.isoformat() + 'Z' board['updated'] = self.updated.isoformat() + 'Z' return board def add_user(self, user_id): user = ndb.Key(User, int(user_id)) self.users.append(user) self.put() return self def remove_user(self, user_id): user = ndb.Key(User, int(user_id)) self.users.remove(user) self.put() return self @classmethod def save(cls, **kwargs): if kwargs.get('id'): board = cls.get_by_id(int(kwargs['id'])) else: board = cls() if kwargs.get('name'): board.name = kwargs['name'] if kwargs.get('user_id'): user = ndb.Key(cls, int(kwargs['user_id'])) board.users = [user] board.put() return board @classmethod def get_user_boards(cls, user_id): results = [] user = ndb.Key(cls, int(user_id)) query = cls.query(cls.users == user) query = query.order(-cls.updated) boards = query.fetch() for board in boards: results.append(board.to_dict()) return results @classmethod def get_tasks(cls, board_id): results = [] board = ndb.Key(cls, int(board_id)) query = Task.query(Task.board == board) query = query.order(-Task.updated) tasks = query.fetch() for task in tasks: results.append(task.to_dict()) return results
class Event(mixin.Base, polymodel.PolyModel): """Blockable Event. key = Key(User, user_email) -> Key(Host, host_id) -> Key(..., Blockable, hash) -> Key(Event, '1') NOTE: The Blockable key may be of any length (e.g. for Bundles). NOTE: The Event id is always '1'. Attributes: blockable_key: key, key to the blockable associated with this event. cert_key: key, key to the cert associated with this event. host_id: str, unique ID for the host on which this event occurred. file_name: str, filename of the blockable on last block. file_path: str, path of the blockable on last block. publisher: str, publisher of this file. version: str, version number of this file. executing_user: str, user who executed the binary (may be a system user). event_type: str, reason this event was initially created. recorded_dt: datetime, when this event was received by the server. first_blocked_dt: datetime, time of the first block. last_blocked_dt: datetime, time of the last block. count: int, the number of times a given event has occurred. """ blockable_key = ndb.KeyProperty() cert_key = ndb.KeyProperty() file_name = ndb.StringProperty() file_path = ndb.StringProperty() publisher = ndb.StringProperty() version = ndb.StringProperty() host_id = ndb.StringProperty() executing_user = ndb.StringProperty() event_type = ndb.StringProperty( choices=constants.EVENT_TYPE.SET_ALL, required=True) recorded_dt = ndb.DateTimeProperty(auto_now_add=True) first_blocked_dt = ndb.DateTimeProperty() last_blocked_dt = ndb.DateTimeProperty() count = ndb.IntegerProperty(default=1) @property def run_by_local_admin(self): """Whether the Event was generated by the platform's admin user. Due to the platform-specific nature of "admin user," this property should be overridden by each platform's derivative models. Returns: bool, See method description. """ return False @property def user_key(self): if not self.key: return None return ndb.Key(flat=self.key.pairs()[0]) def _DedupeMoreRecentEvent(self, more_recent_event): """Updates if the related Event is more recent than the current one.""" self.last_blocked_dt = more_recent_event.last_blocked_dt self.file_name = more_recent_event.file_name self.file_path = more_recent_event.file_path self.executing_user = more_recent_event.executing_user self.event_type = more_recent_event.event_type def _DedupeEarlierEvent(self, earlier_event): """Updates if the related Event occurred earlier than the current one.""" self.first_blocked_dt = earlier_event.first_blocked_dt def Dedupe(self, related_event): """Updates the current Event state with another, related Event.""" self.count += related_event.count or 1 # related_event registered an Event earlier than the earliest recorded date if self.first_blocked_dt > related_event.first_blocked_dt: self._DedupeEarlierEvent(related_event) # related_event registered an Event more recently than the most recent # recorded date if self.last_blocked_dt < related_event.last_blocked_dt: self._DedupeMoreRecentEvent(related_event) @classmethod def DedupeMultiple(cls, events): """Dedupes an iterable of new-style Events. Args: events: An iterable of new-style Event entities to be deduped. Returns: A list of deduped Events. """ distinct_events = {} for event in events: duped_event = distinct_events.get(event.key) if duped_event: duped_event.Dedupe(event) else: distinct_events[event.key] = event return distinct_events.values() def to_dict(self, include=None, exclude=None): # pylint: disable=g-bad-name result = super(Event, self).to_dict(include=include, exclude=exclude) result['blockable_id'] = self.blockable_key.id() return result