class PlayHistory(Model): __tablename__ = 'play_history' id = Column(Integer, primary_key=True) song_title = Column(String(64)) block_number = Column(Integer) time_played = Column(DateTime(), default=datetime.utcnow) length_played = Column(Interval())
class MatchLog(Base): __tablename__ = 'match_log' id = Column(String(255), primary_key=True) upload_id = Column(String(255), ForeignKey('upload_log.id')) match_start_timestamp = Column(DateTime()) match_complete_timestamp = Column(DateTime()) match_status = Column(Boolean()) runtime = Column(Interval())
class Proxy(Base): __tablename__ = 'proxy' id = Column(Integer, primary_key=True) ip = Column(String(15), unique=True) port = Column(Integer) created_at = Column(DateTime, default=datetime.now()) updated_at = Column(DateTime, default=datetime.now()) status = Column(SMALLINT, default=ProxyStatus.UnAvailable) on_duration = Column(Interval(), default=timedelta())
class Model(Base): __tablename__ = 'article' id = Column(Integer, primary_key=True) string_field = Column(String(32)) unicode_field = Column(Unicode(32)) text_field = Column(Text()) int_field = Column(Integer()) float_field = Column(Float()) bool_field = Column(Boolean()) date_field = Column(Date()) datetime_field = Column(DateTime()) interval_field = Column(Interval()) time_field = Column(Time()) enum_field = Column(Enum(MyEnum))
class DBSensor(Base, Sensor): """Mixin class for a table with sensors. """ __tablename__ = "sensor" # two columns for db purposes: id is a row identifier id = Column(Integer, primary_key=True) # type is useful so we can use polymorphic inheritance # (https://docs.sqlalchemy.org/en/13/orm/inheritance.html#single-table-inheritance) type = Column(String(50), nullable=False) name = Column(String(120), nullable=False, default="") unit = Column(String(80), nullable=False, default="") timezone = Column(String(80), nullable=False, default="UTC") event_resolution = Column(Interval(), nullable=False, default=timedelta(hours=0)) knowledge_horizon_fnc = Column(String(80), nullable=False) knowledge_horizon_par = Column(JSON(), default={}, nullable=False) def __init__( self, name: str = "", unit: str = "", timezone: str = "UTC", event_resolution: Optional[timedelta] = None, knowledge_horizon: Optional[Union[timedelta, Tuple[Callable[[datetime, Any], timedelta], dict]]] = None, ): Sensor.__init__(self, name, unit, timezone, event_resolution, knowledge_horizon) Base.__init__(self) def __repr__(self): return "<DBSensor: %s (%s)>" % (self.id, self.name) @declared_attr def __mapper_args__(cls): if cls.__name__ == "DBSensor": return { "polymorphic_on": cls.type, "polymorphic_identity": "sensor" } else: return {"polymorphic_identity": cls.__name__}
class IncomingRequest(Base): __tablename__ = 'incoming_request' id = Column(BigInteger(), primary_key=True, index=True) # request request_datetime = Column(DateTime(timezone=True), index=True, nullable=False) request_url = Column(Unicode(), index=True, nullable=False) request_method = Column(Unicode(), index=True, nullable=False) request_headers = Column(JSON, nullable=True) request_body = Column(LargeBinary(), nullable=True) # response response_datetime = Column(DateTime(timezone=True), index=True, nullable=True) response_status_code = Column(Integer(), index=True, nullable=True) response_headers = Column(JSON, nullable=True) response_body = Column(LargeBinary(), nullable=True) # add elapsed_time = Column(Interval(), nullable=True) exception = Column(Unicode(), nullable=True)
class User(Member): __tablename__ = "member_user" __mapper_args__ = {'polymorphic_identity': 'user'} id = Column(String(32), ForeignKey('member.id', onupdate="cascade"), primary_key=True) last_check = Column( DateTime(), nullable=False, default=now, doc= "The last time the user checked their messages. You probably want to use the new_messages derived boolean instead." ) new_messages = Column(Boolean(), nullable=False, default=False) # FIXME: derived location_current = Golumn( Point(2), nullable=True, doc= "Current location, for geo-targeted assignments. Nullable for privacy") location_updated = Column(DateTime(), nullable=False, default=now) #dob = Column(DateTime(), nullable=True) # Needs to be stored in user settings but not nesiserally in the main db record email = Column(Unicode(250), nullable=True) email_unverified = Column(Unicode(250), nullable=True) summary_email_start = Column( DateTime(), nullable=True, doc= "users can opt into to haveing summary summary emails rather than an email on each notification" ) summary_email_interval = Column(Interval(), nullable=True) login_details = relationship("UserLogin", backref=('user'), cascade="all,delete-orphan") flaged = relationship( "FlaggedEntity", backref=backref('raising_member'), cascade="all,delete-orphan", primaryjoin="Member.id==FlaggedEntity.raising_member_id") __to_dict__ = copy.deepcopy(Member.__to_dict__) _extra_user_fields = { 'location_current': lambda member: location_to_string(member.location_home), 'location_updated': None, } __to_dict__['default'].update(_extra_user_fields) __to_dict__['full'].update(_extra_user_fields) def __unicode__(self): return self.name or self.id def hash(self): h = hashlib.md5(Member.hash(self)) for field in ("email", ): h.update(unicode(getattr(self, field)).encode('utf-8')) for login in self.login_details: h.update(login.token) return h.hexdigest() @property def email_normalized(self): return self.email or self.email_unverified
class Member(Base): "Abstract class" __tablename__ = "member" __type__ = Column(member_type) __mapper_args__ = { 'polymorphic_on': __type__, 'extension': CacheChangeListener() } _member_status = Enum("pending", "active", "suspended", name="member_status") id = Column(String(32), primary_key=True) name = Column(Unicode(250), nullable=False) join_date = Column(DateTime(), nullable=False, default=now) status = Column(_member_status, nullable=False, default="pending") avatar = Column(String(40), nullable=True) utc_offset = Column(Interval(), nullable=False, default="0 hours") location_home = Golumn(Point(2), nullable=True) payment_account_id = column_property( Column(Integer(), ForeignKey('payment_account.id'), nullable=True), extension=MemberPaymentAccountIdChangeListener()) #payment_account_id = Column(Integer(), ForeignKey('payment_account.id'), nullable=True) salt = Column(Binary(length=256), nullable=False, default=_generate_salt) description = Column(UnicodeText(), nullable=False, default=u"") verified = Column(Boolean(), nullable=False, default=False) extra_fields = Column(JSONType(mutable=True), nullable=False, default={}) num_following = Column(Integer(), nullable=False, default=0, doc="Controlled by postgres trigger") num_followers = Column(Integer(), nullable=False, default=0, doc="Controlled by postgres trigger") num_unread_messages = Column(Integer(), nullable=False, default=0, doc="Controlled by postgres trigger") num_unread_notifications = Column(Integer(), nullable=False, default=0, doc="Controlled by postgres trigger") last_message_timestamp = Column(DateTime(), nullable=True, doc="Controlled by postgres trigger") last_notification_timestamp = Column(DateTime(), nullable=True, doc="Controlled by postgres trigger") # AllanC - TODO - derived field trigger needed account_type = Column( account_types, nullable=False, default='free', doc="Controlled by Python MapperExtension event on PaymentAccount") flags = relationship( "FlaggedEntity", backref=backref('offending_member'), cascade="all,delete-orphan", primaryjoin="Member.id==FlaggedEntity.offending_member_id") content_edits = relationship("ContentEditHistory", backref=backref('member', order_by=id)) groups_roles = relationship( "GroupMembership", backref="member", cascade="all,delete-orphan", lazy='joined' ) #AllanC- TODO: needs eagerload group? does lazy=joined do it? ratings = relationship("Rating", backref=backref('member'), cascade="all,delete-orphan") feeds = relationship("Feed", backref=backref('member'), cascade="all,delete-orphan") # AllanC - I wanted to remove these but they are still used by actions.py because they are needed to setup the base test data following = relationship( "Member", primaryjoin="Member.id==Follow.follower_id", secondaryjoin= "(Member.id==Follow.member_id ) & (Follow.type!='trusted_invite')", secondary=Follow.__table__) followers = relationship( "Member", primaryjoin="Member.id==Follow.member_id", secondaryjoin= "(Member.id==Follow.follower_id) & (Follow.type!='trusted_invite')", secondary=Follow.__table__) followers_trusted = relationship( "Member", primaryjoin="Member.id==Follow.member_id", secondaryjoin= "(Member.id==Follow.follower_id) & (Follow.type=='trusted' )", secondary=Follow.__table__) assigments = relationship("MemberAssignment", backref=backref("member"), cascade="all,delete-orphan") # Content relation shortcuts #content = relationship( "Content", backref=backref('creator'), primaryjoin=and_("Member.id==Content.creator_id") )# ,"Content.__type__!='comment'" # cant get this to work, we want to filter out comments #, cascade="all,delete-orphan" #content_assignments = relationship("AssignmentContent") #content_articles = relationship( "ArticleContent") #content_drafts = relationship( "DraftContent") #See civicboom_init.py # content # content_assignments_active # content_assignments_previous # assignments_accepted = relationship("MemberAssignment", backref=backref("member"), cascade="all,delete-orphan") #interests = relationship("") #messages_to = relationship("Message", primaryjoin=and_(Message.source_id!=null(), Message.target_id==id ), backref=backref('target', order_by=id)) #messages_from = relationship("Message", primaryjoin=and_(Message.source_id==id , Message.target_id!=null()), backref=backref('source', order_by=id)) #messages_public = relationship("Message", primaryjoin=and_(Message.source_id==id , Message.target_id==null()) ) #messages_notification = relationship("Message", primaryjoin=and_(Message.source_id==null(), Message.target_id==id ) ) #groups = relationship("Group" , secondary=GroupMembership.__table__) # Could be reinstated with only "active" groups, need to add criteria __table_args__ = ( CheckConstraint("id ~* '^[a-z0-9_-]{4,}$'"), CheckConstraint("length(name) > 0"), CheckConstraint( "substr(extra_fields,1,1)='{' AND substr(extra_fields,length(extra_fields),1)='}'" ), {}) __to_dict__ = copy.deepcopy(Base.__to_dict__) __to_dict__.update({ 'default': { 'id': None, 'name': lambda member: member.name if member.name else member. id, # Normalize the member name and return username if name not present 'username': None, # AllanC - this should be depricated as it is a mirror of the id. This may need careful combing of the templates before removal 'avatar_url': None, 'type': lambda member: member.__type__, 'location_home': lambda member: '', #location_to_string(member.location_home) , # AllanC - this is remmed out because we do not want to show locations until we can have a text description or area 'num_followers': None, 'num_following': None, 'account_type': None, 'url': lambda member: member.__link__(), }, }) __to_dict__.update({'full': copy.deepcopy(__to_dict__['default'])}) __to_dict__['full'].update({ 'num_followers': None, 'utc_offset': lambda member: (member.utc_offset.days * 86400 + member.utc_offset.days), 'join_date': None, 'website': lambda member: member.extra_fields.get('website'), 'google_profile': lambda member: member.extra_fields.get('google_profile'), 'description': None, 'push_assignment': lambda member: member.extra_fields.get('push_assignment'), #'followers' : lambda member: [m.to_dict() for m in member.followers ] , #'following' : lambda member: [m.to_dict() for m in member.following ] , #'messages_public' : lambda member: [m.to_dict() for m in member.messages_public[:5] ] , #'assignments_accepted': lambda member: [a.to_dict() for a in member.assignments_accepted if a.private==False] , #'content_public' : lambda member: [c.to_dict() for c in member.content_public ] , #'groups_public' : lambda member: [update_dict(gr.group.to_dict(),{'role':gr.role}) for gr in member.groups_roles if gr.status=="active" and gr.group.member_visibility=="public"] , #AllanC - also duplicated in members_actions.groups ... can this be unifyed }) @property def username(self): import warnings warnings.warn("Member.username used", DeprecationWarning) return self.id _config = None # extra_fields_raw = synonym('extra_fields', descriptor=property(_get_extra_fields_raw, _set_extra_fields_raw)) @property def config(self): if self.extra_fields == None: self.extra_fields = {} if not self._config: self._config = _ConfigManager(self.extra_fields) return self._config def __unicode__(self): return "%s (%s)" % (self.name, self.id) def __str__(self): return unicode(self).encode('ascii', 'replace') def __link__(self): from civicboom.lib.web import url return url('member', id=self.id, sub_domain='www', qualified=True) def hash(self): h = hashlib.md5() for field in ( "id", "name", "join_date", "status", "avatar", "utc_offset"): #TODO: includes relationship fields in list? h.update(unicode(getattr(self, field)).encode('utf-8')) return h.hexdigest() def invalidate_cache(self, remove=False): from civicboom.lib.cache import invalidate_member invalidate_member(self, remove=remove) def action_list_for(self, member, **kwargs): action_list = [] #if self.can_message(member): # action_list.append('editable') if self != member: if 'push_assignment' in self.extra_fields: action_list.append('push_assignment') if self == member: action_list.append('settings') action_list.append('logout') if member.has_account_required('plus'): action_list.append('invite_trusted_followers') elif member and member.has_account_required('plus'): if self.is_following(member): if member.is_follower_trusted(self): action_list.append('follower_distrust') else: action_list.append('follower_trust') elif not member.is_follow_trusted_invitee(self): action_list.append('follower_invite_trusted') # GregM: if member and (member.is_following(self) or member.is_follow_trusted_inviter(self)): action_list.append('unfollow') if member and (not member.is_following(self) or member.is_follow_trusted_inviter(self)): if self != member: action_list.append('follow') if self != member: action_list.append('message') if member and member.__type__ == 'group' and not member.get_membership( self ): # If the observing member is a group, show invite to my group action action_list.append('invite') return action_list def send_email(self, **kargs): from civicboom.lib.communication.email_lib import send_email send_email(self, **kargs) def send_notification(self, m, **kwargs): import civicboom.lib.communication.messages as messages messages.send_notification(self, m, **kwargs) def send_notification_to_followers(self, m, private=False): followers_to = self.followers if private: followers_to = self.followers_trusted import civicboom.lib.communication.messages as messages messages.send_notification(followers_to, m) def follow(self, member, delay_commit=False): from civicboom.lib.database.actions import follow return follow(self, member, delay_commit=delay_commit) def unfollow(self, member, delay_commit=False): from civicboom.lib.database.actions import unfollow return unfollow(self, member, delay_commit=delay_commit) def follower_trust(self, member, delay_commit=False): from civicboom.lib.database.actions import follower_trust return follower_trust(self, member, delay_commit=delay_commit) def follower_distrust(self, member, delay_commit=False): from civicboom.lib.database.actions import follower_distrust return follower_distrust(self, member, delay_commit=delay_commit) # GregM: Added kwargs to allow for invite controller adding role (needed for group invite, trying to genericise things as much as possible) def follower_invite_trusted(self, member, delay_commit=False, **kwargs): from civicboom.lib.database.actions import follower_invite_trusted return follower_invite_trusted(self, member, delay_commit=delay_commit) def is_follower(self, member): #if not member: # return False #from civicboom.controllers.members import MembersController #member_search = MembersController().index #return bool(member_search(member=self, followed_by=member)['data']['list']['count']) from civicboom.lib.database.actions import is_follower return is_follower(self, member) def is_follower_trusted(self, member): from civicboom.lib.database.actions import is_follower_trusted return is_follower_trusted(self, member) def is_follow_trusted_invitee(self, member): # Was: is_follower_invited_trust from civicboom.lib.database.actions import is_follow_trusted_invitee as _is_follow_trusted_invitee return _is_follow_trusted_invitee(self, member) def is_follow_trusted_inviter(self, member): # Was: is_following_invited_trust from civicboom.lib.database.actions import is_follow_trusted_invitee as _is_follow_trusted_invitee return _is_follow_trusted_invitee(member, self) def is_following(self, member): #if not member: # return False #from civicboom.controllers.members import MembersController #member_search = MembersController().index #return bool(member_search(member=self, follower_of=member)['data']['list']['count']) from civicboom.lib.database.actions import is_follower return is_follower(member, self) @property def url(self): from civicboom.lib.web import url return url('member', id=self.id, qualified=True) @property def avatar_url(self, size=80): # if specified, use specified avatar if self.avatar: return wh_url("avatars", self.avatar) # for members with email addresses, fall back to gravatar if hasattr(self, "email"): email = self.email or self.email_unverified if email: hash = hashlib.md5(email.lower()).hexdigest() #default = "identicon" default = wh_url( "public", "images/default/avatar_%s.png" % self.__type__) args = urllib.urlencode({ 'd': default, 's': str(size), 'r': "pg" }) return "https://secure.gravatar.com/avatar/%s?%s" % (hash, args) # last resort, fall back to our own default return wh_url("public", "images/default/avatar_%s.png" % self.__type__) def add_to_interests(self, content): from civicboom.lib.database.actions import add_to_interests return add_to_interests(self, content) def has_account_required(self, required_account_type): return has_account_required(required_account_type, self.account_type) def can_publish_assignment(self): # AllanC - could be replaced with some form of 'get_permission('publish') ??? we could have lots of permissiong related methods ... just a thought #from civicboom.lib.constants import can_publish_assignment #return can_publish_assignment(self) #AllanC - TODO - check member payment level to acertain what the limit is - set limit to this users level # if not member.payment_level: limit = None from pylons import config if has_account_required('corp', self.account_type): pass elif has_account_required('plus', self.account_type): limit = config['payment.plus.assignment_limit'] elif has_account_required( 'free', self.account_type): #self.account_type == 'free': limit = config['payment.free.assignment_limit'] if not limit: return True if len(self.active_assignments_period) >= limit: return False return True #@property #def payment_account(self): # return self._payment_account #@payment_account.setter def set_payment_account(self, value, delay_commit=False): #self._payment_account = value from civicboom.lib.database.actions import set_payment_account return set_payment_account(self, value, delay_commit) # @property # AllanC - TODO this needs to be a derrived field # GregM - Done, MapperExtension on PaymentAccount updates this field! # def account_type(self): # if self.payment_account and self.payment_account.type: # return self.payment_account.type # return 'free' def delete(self): """ Not to be called in normal operation - this a convenience method for automated tests """ from civicboom.lib.database.actions import del_member del_member(self) def check_action_key(self, action, key): """ Check that this member was the one who generated the key to the specified action. """ return (key == self.get_action_key(action)) def flag(self, **kargs): """ Flag member as breaking T&C (can throw exception if fails) """ from civicboom.lib.database.actions import flag flag(self, **kargs) def get_action_key(self, action): """ Generate a key, anyone with this key is allowed to perform $action on behalf of this member. The key is the hash of (member.id, action, member.salt). Member.id is included because while the salt *should* be unique, the ID *is* unique by definition. The salt should be kept secret by the server, not even shown to the user who owns it -- thus when presented with a key, we can guarantee that this server is the one who generated it. If the key for a user/action pair is only given to that user after they've authenticated, then we can guarantee that anyone with that key has been given it by the user. Usage: ~~~~~~ Alice: key = alice.get_action_key('read article 42') bob.send_message("Hey bob, if you want to read article 42, "+ "tell the system I gave you this key: "+key) Bob: api.content.show(42, auth=(alice, key)) System: wanted_content = get_content(42) claimed_user = get_member(alice) if key == claimed_user.get_action_key('read article '+wanted_content.id): print(wanted_content) """ return hashlib.sha1(str(self.id) + action + self.salt).hexdigest()
class Track(Base): """An object representing a track.""" __tablename__ = 'tracks' key = Column(Integer(), primary_key=True) album = Column(String(200), nullable=False) album_art_url = Column(String(500), nullable=True, default=None) album_artist = Column(String(200), nullable=False) album_id = Column(String(30), nullable=False) artist = Column(String(200), nullable=False) artists = relationship('Artist', secondary=artist_tracks) composer = Column(String(200), nullable=False) deleted = Column(Boolean(), nullable=False) disc_number = Column(Integer(), nullable=False) duration = Column(Interval()) filename = Column(String(500), nullable=True) genre = Column(String(100), nullable=False) id = Column(String(30), nullable=True) kind = Column(String(10), nullable=False) last_played = Column(DateTime(), nullable=True) lyrics = Column(String(100000), nullable=True) play_count = Column(Integer(), nullable=False) playlists = relationship('Playlist', secondary=playlist_tracks) store_id = Column(String(30), nullable=False) title = Column(String(200), nullable=False) track_number = Column(Integer(), nullable=False) track_type = Column(String(5), nullable=False) year = Column(Integer(), nullable=False) @property def in_library(self): """Return True if this track is in the google library.""" return not self.id.startswith('T') @property def length(self): """Return the duration in the proper format.""" return format_timedelta(self.duration) @property def path(self): """Return an appropriate path for this result.""" return os.path.join(config.config.storage['media_dir'], self.id + '.mp3') @property def downloaded(self): """Return whether or not this track is downloaded.""" return os.path.isfile(self.path) @property def number(self): """Return the track number, padded with 0's.""" return '%s%s' % ('0' if self.track_number is not None and self.track_number < 10 else '', self.track_number) def populate(self, d): """Populate from a dictionary d.""" self.album = d.get('album', 'Unknown Album') try: self.album_art_url = d['albumArtRef'][0]['url'] except IndexError: pass # There is no album art. self.album_artist = d.get('albumArtist', 'Unknown Album Artist') self.album_id = d['albumId'] self.artist = d.get('artist', 'Unknown Artist') for id in d.get('artistId', []): try: artist = session.query(Artist).filter(Artist.id == id).one() except exc.NoResultFound: artist = Artist(id=id) self.artists.append(artist) self.composer = d.get('composer', 'Unknown Composer') self.deleted = d.get('deleted', False) self.disc_number = d.get('discNumber', 1) self.duration = timedelta(seconds=int(d.get('durationMillis', '0')) / 1000) self.genre = d.get('genre', 'No Genre') self.id = d.get('id', self.id) if self.id is None: self.id = get_id(d) self.kind = d['kind'] self.play_count = d.get('playCount', 0) self.store_id = d.get('storeId', self.id) self.title = d.get('title', 'Untitled Track') self.track_number = d.get('trackNumber', 1) self.track_type = d['trackType'] self.year = d.get('year', 1) def __str__(self): return '{0.artist} - {0.title}'.format(self)
class DBTimedBelief(Base, TimedBelief): """Database representation of TimedBelief""" __tablename__ = "timed_beliefs" __table_args__ = (UniqueConstraint( "event_start", "belief_horizon", "sensor_id", "source_id", name="_one_belief_by_one_source_uc", ), ) # type is useful so we can use polymorphic inheritance # (https://docs.sqlalchemy.org/en/13/orm/inheritance.html#single-table-inheritance) type = Column(String(50), nullable=False) event_start = Column(DateTime(timezone=True), primary_key=True) belief_horizon = Column(Interval(), nullable=False, primary_key=True) cumulative_probability = Column(Float, nullable=False, primary_key=True) event_value = Column(Float, nullable=False) sensor_id = Column(Integer(), ForeignKey("sensor.id", ondelete="CASCADE"), primary_key=True) source_id = Column(Integer, ForeignKey("belief_source.id"), primary_key=True) sensor = relationship( "DBSensor", backref=backref("beliefs", lazy=True, cascade="all, delete-orphan", passive_deletes=True), ) source = relationship( "DBBeliefSource", backref=backref("beliefs", lazy=True, cascade="all, delete-orphan", passive_deletes=True), ) @declared_attr def __mapper_args__(cls): if cls.__name__ == "DBTimedBelief": return { "polymorphic_on": cls.type, "polymorphic_identity": "DBTimedBelief" } else: return {"polymorphic_identity": cls.__name__} def __init__(self, sensor: DBSensor, source: DBBeliefSource, value: float, **kwargs): TimedBelief.__init__(self, sensor, source, value, **kwargs) Base.__init__(self) @classmethod def query( cls, session: Session, sensor: DBSensor, event_before: datetime = None, event_not_before: datetime = None, belief_before: datetime = None, belief_not_before: datetime = None, source: Union[int, List[int], str, List[str]] = None, ) -> "BeliefsDataFrame": """Query beliefs about sensor events. :param session: the database session to use :param sensor: sensor to which the beliefs pertain :param event_before: only return beliefs about events that end before this datetime (inclusive) :param event_not_before: only return beliefs about events that start after this datetime (inclusive) :param belief_before: only return beliefs formed before this datetime (inclusive) :param belief_not_before: only return beliefs formed after this datetime (inclusive) :param source: only return beliefs formed by the given source or list of sources (pass their id or name) :returns: a multi-index DataFrame with all relevant beliefs TODO: rename params for clarity: event_finished_before, even_starts_not_before (or similar), same for beliefs """ # Check for timezone-aware datetime input if event_before is not None: event_before = tb_utils.enforce_utc(event_before) if event_not_before is not None: event_not_before = tb_utils.enforce_utc(event_not_before) if belief_before is not None: belief_before = tb_utils.enforce_utc(belief_before) if belief_not_before is not None: belief_not_before = tb_utils.enforce_utc(belief_not_before) # Query sensor for relevant timing properties event_resolution, knowledge_horizon_fnc, knowledge_horizon_par = ( session.query( DBSensor.event_resolution, DBSensor.knowledge_horizon_fnc, DBSensor.knowledge_horizon_par, ).filter(DBSensor.id == sensor.id).one_or_none()) # Get bounds on the knowledge horizon (so we can already roughly filter by belief time) knowledge_horizon_min, knowledge_horizon_max = sensor_utils.eval_verified_knowledge_horizon_fnc( knowledge_horizon_fnc, knowledge_horizon_par, None) # Query based on start_time_window q = session.query(cls).filter(cls.sensor_id == sensor.id) # Apply event time filter if event_before is not None: q = q.filter(cls.event_start + event_resolution <= event_before) if event_not_before is not None: q = q.filter(cls.event_start >= event_not_before) # Apply rough belief time filter if belief_before is not None: q = q.filter(cls.event_start <= belief_before + cls.belief_horizon + knowledge_horizon_max) if belief_not_before is not None: q = q.filter(cls.event_start >= belief_not_before + cls.belief_horizon + knowledge_horizon_min) # Apply source filter if source is not None: source_list = [source] if not isinstance(source, list) else source id_list = [s for s in source_list if isinstance(s, int)] name_list = [s for s in source_list if isinstance(s, str)] if len(id_list) + len(name_list) < len(source_list): unidentifiable_list = [ s for s in source_list if not isinstance(s, int) and not isinstance(s, str) ] raise ValueError( "Query by source failed: query only possible by integer id or string name. Failed sources: %s" % unidentifiable_list) else: q = q.join(DBBeliefSource).filter( (cls.source_id.in_(id_list)) | (DBBeliefSource.name.in_(name_list))) # Build our DataFrame of beliefs df = BeliefsDataFrame(sensor=sensor, beliefs=q.all()) # Actually filter by belief time if belief_before is not None: df = df[df.index.get_level_values("belief_time") < belief_before] if belief_not_before is not None: df = df[ df.index.get_level_values("belief_time") >= belief_not_before] return df
class UserBase(GeoReferenced): id = Column(Integer, primary_key=True) created = Column(DateTime(), server_default=FetchedValue()) name = Column(String()) email = Column(String(), nullable=False, unique=True) mobile = Column(String(), nullable=False, unique=True) password = Column(String(), nullable=False, server_default=FetchedValue()) reset_password_token = Column(String(), nullable=False, server_default=FetchedValue()) active = Column('is_active', Boolean(), nullable=False, server_default=FetchedValue()) can_pickup = Column('can_pickup', Boolean(), nullable=False, server_default=FetchedValue()) has_pickup = Column('has_pickup', Boolean(), nullable=False, server_default=FetchedValue()) pickup_enabled = Column('pickup_enabled', Boolean(), nullable=False, server_default=FetchedValue()) last_pickup_available_start = Column('last_pickup_available_start', DateTime(), server_default=FetchedValue()) last_pickup_available_duration = Column('last_pickup_available_duration', Interval(), server_default=FetchedValue()) def __repr__(self): return '<user id=%r email=%r>' % (self.id, self.email) def enable_pickup(self, duration_secs=0): self.last_pickup_available_start = "NOW()" delta = datetime.timedelta(0, duration_secs) self.last_pickup_available_duration = delta self.has_pickup = False self.can_pickup = True self.pickup_enabled = True db.session.commit() # requestor requests a pickup from self def request_pickup(self, requestor): if not self.can_pickup: return None if not self.pickup_enabled: return None request = PickupRequest( requestor_user_id=requestor.id, driver_user_id=self.id, location_accuracy_meters=requestor.location_accuracy_meters, use_user_location=True, ) request.set_location(requestor.lng, requestor.lat) db.session.add(request) db.session.commit() return request
class Collection(Base, ModelMixin): id = Column(Integer, primary_key=True) name = Column(String(255)) description = Column(String(80), index=True) long_description = Column(Text) owner = Column(String(200), default=os.getlogin()) owner_gpgid = Column(String(30)) upstream_url = Column(Text(), doc="upstream url") uuid = Column(String(36), index=True, unique=True, default=suuid) created = Column(DateTime, default=datetime.datetime.now) updated = Column(DateTime, default=None) enabled = Column(Boolean, default=True) check_interval = Column(Interval(), default=datetime.timedelta(days=1)) download_manager = Column(String(30)) download_path = Column(String(255)) #uname = Column(Collection.name.concat("-").concat(Collection.uuid).label("uname")) entries = relationship("Entry", backref="collection") __tablename__ = 'collection' EXPORT = (("size", "owner", "changed", "name"), ("id", "upstream_url", "uuid", "created", "updated", "enabled", "check_interval", "owner_gpgid", "download_manager"), ("description", "long_description")) @property def basename(self): return self.name.replace("/", "_").replace("\\", "_") @staticmethod def create(session, *args, **kwargs): nc = Collection(*args, **kwargs) session.add(nc) root = Entry(name="/", type=EntryType.root, parent_id=None, collection=nc) session.add(root) return nc def mkdir(self, name): session = object_session(self) if self.type != EntryType.directory: raise exc.EntryTypeError( "can't create directory under non directory") if self.has_child(name): raise exc.EntryExistsError("file: %s already exists" % name) root = Entry(name=name, type=EntryType.directory, parent_id=self.id, collection=self.collection.id) session.add(root) @staticmethod def lookup(session, path): sp = pathsplit(path) if len(sp) < 2: raise exc.ArgumentError("path needs to be aboslute") # find collection colname = sp[1] col = Collection.lookup_collection(session, colname) entries = col.lookup_path(session, sp[2:]) return entries @staticmethod def lookup_collection(session, colname): uname = Collection.name.concat("-").concat( Collection.uuid).label("uname") q = session.query(Collection).add_column(uname) q = q.filter( or_(uname.startswith("%s-" % colname), uname.startswith(colname))) q = q.order_by(asc("uname")) res = q.all() if len(res) == 0: raise exc.CollectionNotFound( "could not find collation with name: %s" % colname) elif len(res) > 1: raise exc.CollectionMultipleChoices( "multiple collections match, add uuid: %s[-UUID]" % colname) return res[0][0] def lookup_path(self, session, chunks): root = self.get_root() cur = root chunks = chunks[:] while len(chunks): nname = chunks.pop(0) cur = cur.descent(nname) return cur def __repr__(self): return "<Collection('%s' owner='%s'>" % (self.name, self.owner) def get_root(self): """ Return the Root Directory Entry of the collection """ return object_session(self).query(Entry).filter( Entry.collection == self, Entry.type.is_(EntryType.root), Entry.parent_id.is_(None)).one() @property def size(self): """ Size of all entries that belong to the collection """ return len(self.entries) @property def changed(self): return self.updated or self.created def export(self, formatter=None): if not formatter: from .formatter import yaml formatter = yaml.Formatter() return formatter.export(self)
class Entry(Base, ModelMixin): id = Column(Integer, primary_key=True) name = Column(String, nullable=False, index=True, doc="name of directroy") type = Column(EntryType.sql_type(), nullable=False, index=True, default=EntryType.single) plugin = Column(String(PLUGIN_NAME_LENGTH), doc="plugin handling the entry") uuid = Column(String(36), index=True, unique=True, default=suuid) url = Column(Text) arguments = Column(Text) state = Column(EntryState.sql_type()) created = Column(DateTime, default=datetime.datetime.now) updated = Column(DateTime, default=None, index=True) enabled = Column(Boolean, nullable=True, default=None) last_success = Column(DateTime, nullable=True, default=None) last_failure = Column(DateTime, nullable=True, default=None, index=True) error_msg = Column(Text, nullable=True, default=None) success_msg = Column(Text, nullable=True, default=None) next_check = Column(DateTime, nullable=True, default=None, index=True, doc="used for querying jobs") job_started = Column(DateTime, nullable=True, default=None, index=True, doc="last time the job started") failure_count = Column(Integer, nullable=False, default=0, doc="failures since last success") size_is = Column(Integer, nullable=False, default=0, doc="size last time checked") size_should = Column(Integer, nullable=False, default=0, doc="size if known") priority = Column( Integer, nullable=False, default=0, doc="priority for job scheduler. higher means more priority") collection_id = Column(Integer, ForeignKey('collection.id'), nullable=False, index=True) parent_id = Column(Integer, ForeignKey('entry.id'), index=True) check_interval = Column(Interval(), default=None) children = relationship("Entry") meta = relationship("Meta", backref="entry") __tablename__ = 'entry' __table_args__ = ( UniqueConstraint(parent_id, name, name='uix_entry_unique_name'), # FIXME #UniqueConstraint(collection_id, type==EntryType.root , name='uix_entry_unique_name'), ) EXPORT = (("plugin", "type", "last_success", "state", "size", "name"), ("id", "last_failure", "uuid", "url", "arguments", "created", "enabled", "size_should"), ("updated", "last_success", "last_failure", "next_check", "error_msg", "success_msg")) def __init__(self, *args, **kwargs): if not 'collection_id' in kwargs: if 'collection' in kwargs: kwargs['collection_id'] = kwargs['collection'].id elif 'parent' in kwargs: kwargs['collection_id'] = kwargs['parent'].collection_id if 'parent' in kwargs: kwargs['parent_id'] = kwargs['parent'].id del kwargs['parent'] return super(Entry, self).__init__(*args, **kwargs) @classmethod def jobs_filter(cls, session, now, with_empty=False, exclude=()): q = session.query(cls)\ .filter(or_(cls.next_check==None, cls.next_check<now)) \ .filter(or_(cls.type.is_(EntryType.single), cls.type.is_(EntryType.collection_single), cls.type.is_(EntryType.collection))) if exclude: q = q.filter(cls.id.notin_(exclude)) if not with_empty: q = q.filter(cls.state != EntryState.empty) return q.order_by(desc(cls.priority)) @property def size(self): # maybe do update here ? return self.size_is def set_error(self, msg, unhandled=False): session = create_session() self.error_msg = msg if unhandled: self.state = EntryState.empty self.next_check = None self.failure_count = 0 else: now = datetime.datetime.now() self.state = EntryState.failure self.last_failure = now self.failure_count += 1 self.next_check = ( now + datetime.timedelta(minutes=fib(min(self.failure_count, 10)))) session.add(self) session.commit() def set_success(self, msg=None): session = create_session() self.last_failure = None now = datetime.datetime.now() self.last_success = now self.success_msg = msg self.failure_count = 0 self.next_check = (now + self.get_first_set("check_interval")) self.state = EntryState.success session.add(self) session.commit() def restart(self): session = create_session() self.last_error = None now = datetime.datetime.now() self.next_check = (now) session.add(self) session.commit() def is_collection(self): return self.type != 'SINGLE' def is_single(self): return self.type == 'SINGLE' def get_first_set(self, key): session = create_session() cur = self while True: rv = getattr(cur, key, None) if rv: return rv if not cur.parent_id: # check collection cur = session.query(Collection).filter( Collection.id == cur.collection_id).one() return getattr(cur, key, None) cur = session.query(Entry).filter(Entry.id == cur.parent_id).one() @property def full_path(self): #session = object_session(self) session = create_session() parts = [self.name] cur = self while cur.parent_id != None: cur = session.query(Entry).filter(Entry.id == cur.parent_id).one() if cur.name != "/": parts.insert(0, cur.name) else: parts.insert(0, "") return pathjoin(parts) @property def system_path(self): #session = object_session(self) session = create_session() parts = [self.name] cur = self #embed() while cur.parent_id != None: cur = session.query(Entry).filter(Entry.id == cur.parent_id).one() if cur.name != "/": parts.insert(0, cur.name) return os.path.join(*parts) def __repr__(self): return "<Entry('%s' id='%s'>" % (self.name, self.id) def has_child(self, name): session = object_session(self) return session.query(Entry).filter(Entry.parent_id == self.id, Entry.name == name).count() > 0 def get_child(self, name): session = object_session(self) return session.query(Entry).filter(Entry.parent_id == self.id, Entry.name == name).one() def get_or_create_child(self, name, args): session = object_session(self) try: rv = session.query(Entry).filter(Entry.parent_id == self.id, Entry.name == name).one() return rv, False except sqlexc.NoResultFound: rv = Entry(**args) session.add(rv) session.flush() return rv, True def descent(self, name): """ Same as returning get_child(name) but may just change some internal structure of this Entry and return itself. Used for path transversal """ return self.get_child(name) def dump(self, filter_=None, details=False, all_=False, dict_=False): rv = ModelMixin.dump(self, filter_=filter_, all_=all_, details=details, dict_=dict_) #add_if(rv, 'last_success', self.last_success, filter_) #add_if(rv, 'last_failure', self.last_failure, filter_) #add_if(rv, 'failure_count', self.failure_count, filter_) #embed() if len(self.meta) and (filter_ == None or 'meta' in filter_): rv['meta'] = meta = {} for m in self.meta: name = m.name if m.plugin: name = "%s::%s" % (m.plugin, m.name) rv['meta'][name] = m.value return rv @staticmethod def validate_name(target, value, oldvalue, initiator): #print(target) #embed() if value.find("/") != -1: initiator.root_name = True if hasattr(target, "is_root") and not target.is_root: raise exc.ValueError("can't name contain /") else: initiator.root_name = False @staticmethod def validate_type(target, value, oldvalue, initiator): #print(target) #embed() initiator.is_root = value == EntryType.root if hasattr(target, "root_name") and target.root_name: raise exc.ValueError("name can't contain /") def set_meta(self, name, value, plugin=None): session = object_session(self) try: meta = session.query(Meta).filter(Meta.entry_id.is_(self.id), Meta.name.is_(name), Meta.plugin.is_(plugin)).one() except sqlexc.NoResultFound: meta = Meta(entry_id=self.id, name=name, plugin=plugin) session.add(meta) session.flush() meta.value = value session.flush() def get_meta(self, name, plugin=None, default=None): session = object_session(self) try: return session.query(Meta).filter( Meta.entry_id.is_(self.id), Meta.name.is_(name), Meta.plugin.is_(plugin)).one().value except sqlexc.NoResultFound: return default
class DBRecipe(Base): __tablename__ = 'recipe' id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, unique=True, nullable=False) name = Column(String, nullable=False) description = Column(Text, server_default='') author = Column(String(50)) prep_time = Column(Interval()) cook_time = Column(Interval()) total_time = Column(Interval()) date_published = Column(DateTime()) url = Column(String(200)) rating_count = Column(Integer()) rating_value = Column(Float()) fk_category = Column(UUID(as_uuid=True), ForeignKey('category.id')) category = relationship("DBCategory") fk_language = Column(UUID(as_uuid=True), ForeignKey('language.id')) language = relationship("DBLanguage") fk_vendor = Column(UUID(as_uuid=True), ForeignKey('vendor.id')) vendor = relationship("DBVendor") image = Column(String(200)) ingredients = relationship("DBIngredient") @classmethod def from_entity(cls, recipe: Recipe): """creates a DBRecipe instance from the entity class Recipe. Important: The relationship of the DBRecipe class are not parsed. They have to be added manually. """ return cls( id=recipe.id, name=recipe.name, description=recipe.description, author=recipe.author.name, prep_time=recipe.prep_time, cook_time=recipe.cook_time, total_time=recipe.total_time, date_published=recipe.date_published, url=recipe.url.__str__(), image=recipe.image.__str__(), rating_count=recipe.aggregate_rating.rating_count if recipe.aggregate_rating is not None else None, rating_value=recipe.aggregate_rating.rating_value if recipe.aggregate_rating is not None else None, fk_category=recipe.category.id, fk_language=recipe.language.id, fk_vendor=recipe.vendor.id, ingredients=[DBIngredient.from_entity(ingredient, recipe.id) for ingredient in recipe.ingredients], ) def to_entity(self) -> Recipe: return create_recipe( recipe_id=self.id, name=self.name, description=self.description, author=self.author, prep_time=self.prep_time, cook_time=self.cook_time, total_time=self.total_time, date_published=self.date_published, url=self.url, rating_value=self.rating_value, rating_count=self.rating_count, image_url=self.image, ingredients=[db_ingredient.to_entity() for db_ingredient in self.ingredients], category=self.category.to_entity(), vendor=self.vendor.to_entity(), language=self.language.to_entity(), )