import transaction from Products.SQLAlchemyDA.da import SAWrapper from Products.ZSQLMethods.SQL import manage_addZSQLMethod from z3c.sqlalchemy import createSAWrapper from z3c.sqlalchemy.mapper import MappedClassBase from sqlalchemy import MetaData, Table, Column, Integer, String, Unicode from sqlalchemy.orm import mapper ZopeTestCase.installProduct('SQLAlchemyDA', 1) metadata = MetaData() test_table = Table('test', metadata, Column('id', Integer, primary_key=True), Column('utext', Unicode(255)), Column('text', String(255))) class Test(MappedClassBase): pass mapper(Test, test_table) class TestBase(ZopeTestCase.ZopeTestCase): def createDA(self, **kw): factory = self.app.manage_addProduct['SQLAlchemyDA'] factory.manage_addSAWrapper(id='da', title='da', dsn=self.dsn,
class Movie(Base): __tablename__ = 'movies' movie_id = Column(Integer, primary_key=True) title = Column(Unicode(500)) genres = Column(Unicode(500))
def chat_list(request): current_page = int(request.GET.get("page", 1)) if request.matched_route.name.startswith("chat_list_unanswered"): current_status = "unanswered" elif request.matched_route.name.startswith("chat_list_ongoing"): current_status = "ongoing" elif request.matched_route.name.startswith("chat_list_ended"): current_status = "ended" else: current_status = None if request.matched_route.name.startswith("chat_list_label"): current_label = request.matchdict["label"].lower().strip().replace( " ", "_") if current_label != request.matchdict["label"]: raise HTTPFound( request.route_path("chat_list_label", label=current_label)) else: current_label = None chats = Session.query(ChatUser, Chat, Message).join(Chat).outerjoin( Message, Message.id == Session.query(func.min(Message.id), ).filter( Message.chat_id == Chat.id, ).correlate(Chat), ).filter(ChatUser.user_id == request.user.id, ) chat_count = Session.query(func.count('*')).select_from(ChatUser).filter( ChatUser.user_id == request.user.id, ) if current_status == "unanswered": chats = chats.filter( and_( Chat.last_user_id is not None, Chat.last_user_id != request.user.id, )) chat_count = chat_count.join(Chat).filter( and_( Chat.last_user_id is not None, Chat.last_user_id != request.user.id, )) elif current_status is not None: chats = chats.filter(Chat.status == current_status) chat_count = chat_count.join(Chat).filter( Chat.status == current_status) if current_label is not None: label_array = cast([current_label], ARRAY(Unicode(500))) chats = chats.filter(ChatUser.labels.contains(label_array)) chat_count = chat_count.filter(ChatUser.labels.contains(label_array)) chats = chats.order_by(Chat.updated.desc()).limit(25).offset( (current_page - 1) * 25).all() # 404 on empty pages, unless it's the first page. if current_page != 1 and len(chats) == 0: raise HTTPNotFound chat_count = chat_count.scalar() if request.matchdict.get("fmt") == "json": return render_to_response("json", { "chats": [{ "chat_user": chat_user, "chat": chat, "prompt": prompt, } for chat_user, chat, prompt in chats], "chat_count": chat_count, }, request=request) paginator = paginate.Page( [], page=current_page, items_per_page=25, item_count=chat_count, url=paginate.PageURL( request.route_path(request.matched_route.name, label=current_label), {"page": current_page}), ) labels = (Session.query( func.unnest(ChatUser.labels), func.count("*")).filter(ChatUser.user_id == request.user.id).group_by( func.unnest(ChatUser.labels)).order_by( func.count("*").desc(), func.unnest(ChatUser.labels).asc()).all()) template = "layout2/chat_list.mako" if request.user.layout_version == 2 else "chat_list.mako" return render_to_response(template, { "chats": chats, "paginator": paginator, "labels": labels, "current_status": current_status, "current_label": current_label, "symbols": symbols, }, request=request)
class User(SQLAlchemyBase, JSONModel): __tablename__ = "users" id = Column(Integer, primary_key=True) created_at = Column(DateTime, default=datetime.datetime.now, nullable=False) username = Column(Unicode(50), nullable=False, unique=True) password = Column(UnicodeText, nullable=False) email = Column(Unicode(255), nullable=False) tokens = relationship("UserToken", back_populates="user", cascade="all, delete-orphan") name = Column(Unicode(50)) surname = Column(Unicode(50)) birthdate = Column(Date) genere = Column(Enum(GenereEnum), nullable=False) rol = Column(Enum(RolEnum), nullable=False) position = Column(Enum(PositionEnum)) phone = Column(Unicode(50)) photo = Column(Unicode(255)) license = Column(Enum(LicenseEnum)) matchname = Column(Unicode(50)) prefsmash = Column(Enum(SmashEnum)) club = Column(Unicode(50)) timeplay= Column(Unicode(50)) tournament_owner = relationship("Tournament", back_populates="owner") tournament_inscriptions = relationship("Tournament", back_populates="inscriptions") @hybrid_property def public_profile(self): return { "created_at": self.created_at.strftime(settings.DATETIME_DEFAULT_FORMAT), "username": self.username, "name": self.name, "email": self.email, "genere": self.genere.value, "photo": self.photo, "rol": self.rol.value, "position": self.position.value, "matchname": self.matchname, "timeplay": self.timeplay, "prefsmash": self.prefsmash.value, "club": self.club } @hybrid_property def photo_url(self): return _generate_media_url(self, "photo", default_image=True) @hybrid_property def photo_path(self): return _generate_media_path(self, "photo") @hybrid_method def set_password(self, password_string): self.password = pbkdf2_sha256.hash(password_string) @hybrid_method def check_password(self, password_string): return pbkdf2_sha256.verify(password_string, self.password) @hybrid_method def create_token(self): if len(self.tokens) < settings.MAX_USER_TOKENS: token_string = binascii.hexlify(os.urandom(25)).decode("utf-8") aux_token = UserToken(token=token_string, user=self) return aux_token else: raise falcon.HTTPBadRequest(title=messages.quota_exceded, description=messages.maximum_tokens_exceded) @hybrid_property def json_model(self): return { "created_at": self.created_at.strftime(settings.DATETIME_DEFAULT_FORMAT), "username": self.username, "email": self.email, "password": self.password, "name": self.name, "surname": self.surname, "birthdate": self.birthdate.strftime( settings.DATE_DEFAULT_FORMAT) if self.birthdate is not None else self.birthdate, "genere": self.genere.value, "rol": self.rol.value, "position":self.position.value, "phone": self.phone, "photo": self.photo_url, "matchname": self.matchname, "timeplay": self.timeplay, "prefsmash": self.prefsmash.value, "club": self.club, "license": self.license }
from sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy import Table from sqlalchemy import Unicode from sqlalchemy.dialects.postgresql import BYTEA from sqlalchemy.schema import ForeignKey from . import metadata from . import now_func from ...utils import GUIDFactory from .utc_dt import UTCDateTime breaks = Table( 'breaks', metadata, Column('guid', Unicode(64), primary_key=True, default=GUIDFactory('BK')), Column('session_guid', Unicode(64), ForeignKey( 'sessions.guid', ondelete='CASCADE', onupdate='CASCADE', ), nullable=False, index=True), Column('file_guid', Unicode(64), ForeignKey( 'files.guid', ondelete='CASCADE', onupdate='CASCADE',
class ExperimentV1(Base): @classmethod def todays(cls, s): return s.query(cls).filter(cls.created_at > 'today').all() @classmethod def query_by_date(cls, s, date): # "20171231" return s.query(m.ExperimentV1).filter( func.date(m.ExperimentV1.created_at) == date).all() @classmethod def query(cls): return glab.query(cls) @classmethod def get_by_id(cls, id): return cls.query().get(id) @classmethod def find_keyword(cls, keyword): return glab.query(ExperimentV1.id, func.date(ExperimentV1.created_at), ExperimentV1.keyword).filter( ExperimentV1.keyword.contains(keyword)).all() __tablename__ = 'experiment_v1' id = Column(Integer, primary_key=True) created_at = Column(DateTime, default=datetime.utcnow) clsname = Column(Unicode(256)) pkgname = Column(Unicode(256)) keyword = Column(Unicode(256)) duration = Column(Float) message = Column(UnicodeText) on_time = Column(PickleType, default={}) off_time = Column(PickleType, default={}) sequence = Column(PickleType, default={}) ran = Column(PickleType, default={}) order = Column(PickleType, default={}) payload = Column(PickleType, default={}) trial_list = Column(PickleType, default={}) projection_clsname = Column(Unicode(256)) projection_pkgname = Column(Unicode(256)) projection_kwargs = Column(PickleType, default={}) clock_clsname = Column(Unicode(256)) clock_pkgname = Column(Unicode(256)) clock_kwargs = Column(PickleType, default={}) stimulus_clsname = Column(Unicode(256)) stimulus_pkgname = Column(Unicode(256)) stimulus_kwargs = Column(PickleType, default={}) window_clsname = Column(Unicode(256)) window_pkgname = Column(Unicode(256)) window_kwargs = Column(PickleType, default={}) handler_clsname = Column(Unicode(256)) handler_pkgname = Column(Unicode(256)) handler_kwargs = Column(PickleType, default={}) monitor_clsname = Column(Unicode(256)) monitor_pkgname = Column(Unicode(256)) monitor_kwargs = Column(PickleType, default={}) def __iter__(self): return iter(self.ordered_trials) @property def ordered_trials(self): seq = np.array(self.sequence) trials = self.trial_list[seq].T.flatten() default_contrast = self.stimulus_kwargs.get('contrast', 1) for index, trial in enumerate(trials): if 'contrast' not in trial: trial['contrast'] = default_contrast # print 'trial #', index, 'default contrast was assigned', default_contrast on_time, off_time, sequence, ran, order = [ np.concatenate([ data[indice] for data, indice in zip( np.array(getattr(self, attr)).T, seq.T) ]) for attr in 'on_time off_time sequence ran order'.split() ] return [ dict(on_time=float(on_time), off_time=float(off_time), sequence=int(sequence), ran=int(ran), order=int(order), **condition) for on_time, off_time, sequence, ran, order, condition in zip( on_time, off_time, sequence, ran, order, trials) ] def export_as_matlab(self): trials = self.ordered_trials conditions = self.trial_list stimulus_params = dict(self.stimulus_kwargs, clsname=self.stimulus_clsname, pkgname=self.stimulus_pkgname) handler_params = dict(self.handler_kwargs, clsname=self.handler_clsname, pkgname=self.handler_pkgname) window_params = dict(self.window_kwargs, clsname=self.window_clsname, pkgname=self.window_pkgname) monitor_params = dict(self.monitor_kwargs, clsname=self.monitor_clsname, pkgname=self.monitor_pkgname) clock_params = dict(self.clock_kwargs, clsname=self.clock_clsname, pkgname=self.clock_pkgname) projection_params = dict(self.projection_kwargs, clsname=self.projection_clsname, pkgname=self.projection_pkgname) payload = dict( trials=trials, conditions=conditions, id=self.id, keyword=self.keyword, duration=self.duration, stimulus_params=stimulus_params, handler_params=handler_params, window_params=window_params, monitor_params=monitor_params, clock_params=clock_params, projection_params=projection_params, ) key = self.keyword.replace('/', '_') io.savemat('{}-{}'.format(self.id, key), payload)
class User(Base): __tablename__ = "user" id = Column(Unicode(36), primary_key=True, default=lambda: unicode(uuid.uuid4())) email_address = Column(Unicode(50), unique=True, nullable=False) # Todo: Turn on strict mode for Mysql password = Column(Text, nullable=False) full_name = Column(Unicode(50)) organization = Column(Unicode(50)) active = Column(Boolean(), nullable=False, default=True) registration_info = Column(Text) # json detailing when they were registered, etc. confirmed = Column(Text) # json detailing when they confirmed, etc. superuser = Column(Text) # json detailing when they became a superuser, otherwise empty # if not superuser logins = relationship("Login", order_by="desc(Login.timestamp)", lazy='dynamic', # Necessary for filter in login_count foreign_keys="Login.user", ) user_collections = relationship("UserCollection", order_by="asc(UserCollection.name)", lazy='dynamic', ) def display_num_collections(self): """ Returns the number of collections or a blank string if there are zero. Because this is so unimportant...we wrap the whole thing in a try/expect...last thing we want is a webpage not to be displayed because of an error here Importand TODO: use redis to cache this, don't want to be constantly computing it """ try: num = len(list(self.user_collections)) return display_collapsible(num) except Exception as why: print("Couldn't display_num_collections:", why) return "" def get_collection_by_name(self, collection_name): try: collect = self.user_collections.filter_by(name=collection_name).first() except sqlalchemy.orm.exc.NoResultFound: collect = None return collect @property def name_and_org(self): """Nice shortcut for printing out who the user is""" if self.organization: return "{} from {}".format(self.full_name, self.organization) else: return self.full_name @property def login_count(self): return self.logins.filter_by(successful=True).count() @property def confirmed_at(self): if self.confirmed: confirmed_info = json.loads(self.confirmed) return confirmed_info['timestamp'] else: return None @property def superuser_info(self): if self.superuser: return json.loads(self.superuser) else: return None @property def crowner(self): """If made superuser, returns object of person who did the crowning""" if self.superuser: superuser_info = json.loads(self.superuser) crowner = User.query.get(superuser_info['crowned_by']) return crowner else: return None @property def most_recent_login(self): try: return self.logins[0] except IndexError: return None
server_default=npbool(False), info={ 'header_string' : _('Unique') } ) def __str__(self): req = getattr(self, '__req__', None) if req: return req.localizer.translate(_(self.name)) return str(self.name) DomainGetFullFunction = SQLFunction( 'domain_get_full', args=(SQLFunctionArgument('did', UInt32()),), returns=Unicode(255), comment='Get fully qualified name of a domain', writes_sql=False ) DomainsBaseView = View( 'domains_base', DBSession.query( Domain.id.label('domainid'), literal_column('NULL').label('parentid'), Domain.name.label('name'), Domain.enabled.label('enabled'), Domain.public.label('public'), Domain.signed.label('signed'), Domain.soa_refresh.label('soa_refresh'), Domain.soa_retry.label('soa_retry'),
class Domain(Base): """ Domain object. """ __tablename__ = 'domains_def' __table_args__ = ( Comment('Domains'), Index('domains_def_u_domain', 'parentid', 'name', unique=True), Trigger('after', 'insert', 't_domains_def_ai'), Trigger('after', 'update', 't_domains_def_au'), Trigger('after', 'delete', 't_domains_def_ad'), { 'mysql_engine' : 'InnoDB', 'mysql_charset' : 'utf8', 'info' : { 'cap_menu' : 'BASE_DOMAINS', 'cap_read' : 'DOMAINS_LIST', 'cap_create' : 'DOMAINS_CREATE', 'cap_edit' : 'DOMAINS_EDIT', 'cap_delete' : 'DOMAINS_DELETE', 'show_in_menu' : 'modules', 'menu_name' : _('Domains'), 'menu_main' : True, 'default_sort' : ({ 'property': 'name' ,'direction': 'ASC' },), 'grid_view' : ( 'domainid', MarkupColumn( name='name', header_string=_('Name'), template='{__str__}', column_flex=1, sortable=True ), 'parent', MarkupColumn( name='state', header_string=_('State'), template=TemplateObject('netprofile_domains:templates/domain_icons.mak'), cell_class='np-nopad', column_width=60, column_resizable=False ) ), 'grid_hidden' : ('domainid',), 'form_view' : ( 'name', 'parent', 'enabled', 'public', 'signed', 'soa_refresh', 'soa_retry', 'soa_expire', 'soa_minimum', 'spf_gen', 'spf_rule', 'spf_errmsg', 'dkim_name', 'dkim_data', 'dkim_test', 'dkim_subdomains', 'dkim_strict', 'dmarc_trailer', 'descr' ), 'easy_search' : ('name', 'descr'), 'detail_pane' : ('netprofile_core.views', 'dpane_simple'), 'create_wizard' : Wizard( Step('name', 'parent', 'enabled', 'public', 'signed', 'descr', title=_('Domain info')), Step('soa_refresh', 'soa_retry', 'soa_expire', 'soa_minimum', 'dkim_name', 'dkim_data', title=_('DNS options')), title=_('Add new domain') ) } } ) id = Column( 'domainid', UInt32(), Sequence('domains_def_domainid_seq'), Comment('Domain ID'), primary_key=True, nullable=False, info={ 'header_string' : _('ID') } ) parent_id = Column( 'parentid', UInt32(), ForeignKey('domains_def.domainid', name='domains_def_fk_parentid', onupdate='CASCADE'), Comment('Parent domain ID'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('Parent'), 'column_flex' : 1 } ) name = Column( Unicode(255), Comment('Domain name'), nullable=False, info={ 'header_string' : _('Name') } ) enabled = Column( NPBoolean(), Comment('Is domain enabled?'), nullable=False, default=True, server_default=npbool(True), info={ 'header_string' : _('Enabled') } ) public = Column( NPBoolean(), Comment('Is domain visible to outsiders?'), nullable=False, default=False, server_default=npbool(False), info={ 'header_string' : _('Public') } ) signed = Column( NPBoolean(), Comment('Needs DNSSEC signing?'), nullable=False, default=False, server_default=npbool(False), info={ 'header_string' : _('Signed') } ) soa_refresh = Column( UInt32(), Comment('SOA refresh field'), nullable=False, default=3600, info={ 'header_string' : _('SOA Refresh') } ) soa_retry = Column( UInt32(), Comment('SOA retry field'), nullable=False, default=300, info={ 'header_string' : _('SOA Retry') } ) soa_expire = Column( UInt32(), Comment('SOA expire field'), nullable=False, default=1814400, info={ 'header_string' : _('SOA Expire') } ) soa_minimum = Column( UInt32(), Comment('SOA minimum field'), nullable=False, default=3600, info={ 'header_string' : _('SOA Minimum') } ) serial_date = Column( Date(), Comment('Domain serial date'), nullable=False, info={ 'header_string' : _('Serial Date'), 'secret_value' : True } ) serial_revision = Column( 'serial_rev', UInt8(), Comment('Domain serial revision'), nullable=False, default=1, info={ 'header_string' : _('Serial Revision'), 'secret_value' : True } ) dkim_name = Column( ASCIIString(255), Comment('DKIM public key name'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('DKIM Name') } ) dkim_data = Column( ASCIIText(), Comment('DKIM public key body'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('DKIM Key') } ) dkim_test = Column( NPBoolean(), Comment('Use DKIM in test mode'), nullable=False, default=False, server_default=npbool(False), info={ 'header_string' : _('DKIM Test') } ) dkim_subdomains = Column( NPBoolean(), Comment('Propagate DKIM rules to subdomains'), nullable=False, default=False, server_default=npbool(False), info={ 'header_string' : _('DKIM in Subdomains') } ) dkim_strict = Column( NPBoolean(), Comment('Use DKIM strict check and discard'), nullable=False, default=False, server_default=npbool(False), info={ 'header_string' : _('DKIM Strict') } ) spf_generate = Column( 'spf_gen', NPBoolean(), Comment('Generate SPF record'), nullable=False, default=True, server_default=npbool(True), info={ 'header_string' : _('Use SPF') } ) spf_rule = Column( ASCIIText(), Comment('Custom SPF rule'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('Custom SPF Rule') } ) spf_error_message = Column( 'spf_errmsg', UnicodeText(), Comment('Custom SPF error explanation string'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('SPF Error') } ) dmarc_trailer = Column( ASCIIString(255), Comment('DMARC record trailer'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('DMARC Trailer') } ) description = Column( 'descr', UnicodeText(), Comment('Domain description'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('Description') } ) children = relationship( 'Domain', backref=backref('parent', remote_side=[id]) ) @property def serial(self): if not self.serial_date: return str(self.serial_revision % 100) return '%s%02d' % ( self.serial_date.strftime('%Y%m%d'), (self.serial_revision % 100) ) def __str__(self): if self.parent: return '%s.%s' % ( str(self.name), str(self.parent) ) return str(self.name) @classmethod def resolve(cls, name, domain_aliases=True): name = name.strip('.') sess = DBSession() candidates = [(None, domain_candidates(name), None)] while len(candidates) > 0: old_candidates = candidates candidates = [] domain_cond = [] da_cond = [] for domain, names, suffix in old_candidates: if domain is None or isinstance(domain, Domain): domain_cond.append(and_( Domain.parent_id == (domain.id if domain else None), Domain.name.in_(names) )) if not domain_aliases: continue if domain is None or isinstance(domain, DomainAlias): da_cond.append(and_( DomainAlias.parent_id == (domain.id if domain else None), DomainAlias.name.in_(names) )) if len(domain_cond) > 0: for domain in sess.query(Domain).filter(or_(*domain_cond)): if suffix is None: domain_name = str(domain) else: domain_name = '.'.join((domain.name, suffix)) if name == domain_name: return domain offset = name.find('.' + domain_name) if offset > 0: left_part = name[:offset] candidates.append((domain, domain_candidates(left_part), domain_name)) if domain_aliases and len(da_cond) > 0: for da in sess.query(DomainAlias).filter(or_(*da_cond)): if suffix is None: domain_name = str(da) else: domain_name = '.'.join((da.name, suffix)) if name == domain_name: return da.domain offset = name.find('.' + domain_name) if offset > 0: left_part = name[:offset] dc = domain_candidates(left_part) candidates.extend(( (da, dc, domain_name), (da.domain, dc, domain_name) ))
class Project(db.Model, DomainObject): '''A microtasking Project to which Tasks are associated. ''' __tablename__ = 'project' #: ID of the project id = Column(Integer, primary_key=True) #: UTC timestamp when the project is created created = Column(Text, default=make_timestamp) #: UTC timestamp when the project is updated (or any of its relationships) updated = Column(Text, default=make_timestamp, onupdate=make_timestamp) #: Project name name = Column(Unicode(length=255), unique=True, nullable=False) #: Project slug for the URL short_name = Column(Unicode(length=255), unique=True, nullable=False) #: Project description description = Column(Unicode(length=255), nullable=False) #: Project long description long_description = Column(UnicodeText) #: Project webhook webhook = Column(Text) #: If the project allows anonymous contributions allow_anonymous_contributors = Column(Boolean, default=True) #: If the project is published published = Column(Boolean, nullable=False, default=False) # If the project is featured featured = Column(Boolean, nullable=False, default=False) # Secret key for project secret_key = Column(Text, default=make_uuid) # Zip download zip_download = Column(Boolean, default=True) # If the project owner has been emailed contacted = Column(Boolean, nullable=False, default=False) #: Project owner_id owner_id = Column(Integer, ForeignKey('user.id'), nullable=False) #: Project Category category_id = Column(Integer, ForeignKey('category.id'), nullable=False) #: Project info field formatted as JSON info = Column(MutableDict.as_mutable(JSON), default=dict()) tasks = relationship(Task, cascade='all, delete, delete-orphan', backref='project') task_runs = relationship(TaskRun, backref='project', cascade='all, delete-orphan', order_by='TaskRun.finish_time.desc()') category = relationship(Category) blogposts = relationship(Blogpost, cascade='all, delete-orphan', backref='project') owners_ids = Column(MutableList.as_mutable(ARRAY(Integer)), default=list()) def needs_password(self): return self.get_passwd_hash() is not None def get_passwd_hash(self): return self.info.get('passwd_hash') def get_passwd(self): if self.needs_password(): return signer.loads(self.get_passwd_hash()) return None def set_password(self, password): if len(password) > 1: self.info['passwd_hash'] = signer.dumps(password) return True self.info['passwd_hash'] = None return False def check_password(self, password): if self.needs_password(): return self.get_passwd() == password return False def has_autoimporter(self): return self.get_autoimporter() is not None def get_autoimporter(self): return self.info.get('autoimporter') def set_autoimporter(self, new=None): self.info['autoimporter'] = new def delete_autoimporter(self): del self.info['autoimporter'] def has_presenter(self): if current_app.config.get('DISABLE_TASK_PRESENTER') is True: return True else: return self.info.get('task_presenter') not in ('', None) @classmethod def public_attributes(self): """Return a list of public attributes.""" return [ 'id', 'description', 'info', 'n_tasks', 'n_volunteers', 'name', 'overall_progress', 'short_name', 'created', 'category_id', 'long_description', 'last_activity', 'last_activity_raw', 'n_task_runs', 'n_results', 'owner', 'updated', 'featured', 'owner_id', 'n_completed_tasks', 'n_blogposts', 'owners_ids' ] @classmethod def public_info_keys(self): """Return a list of public info keys.""" default = [ 'container', 'thumbnail', 'thumbnail_url', 'task_presenter', 'tutorial', 'sched' ] extra = current_app.config.get('PROJECT_INFO_PUBLIC_FIELDS') if extra: return list(set(default).union(set(extra))) else: return default
def __init__(self, addr, cache_size, logger=null_logger): logger.info(f"Opening database: '{addr}'") self.db = create_engine(addr) self.conn = self.db.connect() meta = MetaData(self.db) self.logger = logger self.tb_messages = Table( "messages", meta, Column("message_id", BigInteger, primary_key=True), Column("created_at", DateTime), Column("edited_at", DateTime, nullable=True), Column("deleted_at", DateTime, nullable=True), Column("message_type", Enum(discord.MessageType)), Column("system_content", UnicodeText), Column("content", UnicodeText), Column("embeds", JSON), Column("attachments", SmallInteger), Column("webhook_id", BigInteger, nullable=True), Column("int_user_id", BigInteger), Column("channel_id", BigInteger, ForeignKey("channels.channel_id")), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), ) self.tb_reactions = Table( "reactions", meta, Column("message_id", BigInteger), Column("emoji_id", BigInteger), Column("emoji_unicode", Unicode(7)), Column("int_user_id", BigInteger, ForeignKey("users.int_user_id")), Column("created_at", DateTime, nullable=True), Column("deleted_at", DateTime, nullable=True), Column("channel_id", BigInteger, ForeignKey("channels.channel_id")), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), UniqueConstraint( "message_id", "emoji_id", "emoji_unicode", "int_user_id", "created_at", name="uq_reactions", ), ) self.tb_typing = Table( "typing", meta, Column("timestamp", DateTime), Column("int_user_id", BigInteger, ForeignKey("users.int_user_id")), Column("channel_id", BigInteger, ForeignKey("channels.channel_id")), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), UniqueConstraint("timestamp", "int_user_id", "channel_id", "guild_id", name="uq_typing"), ) self.tb_pins = Table( "pins", meta, Column("pin_id", BigInteger, primary_key=True), Column( "message_id", BigInteger, ForeignKey("messages.message_id"), primary_key=True, ), Column("pinner_id", BigInteger, ForeignKey("users.int_user_id")), Column("int_user_id", BigInteger, ForeignKey("users.int_user_id")), Column("channel_id", BigInteger, ForeignKey("channels.channel_id")), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), ) self.tb_mentions = Table( "mentions", meta, Column("mentioned_id", BigInteger, primary_key=True), Column("type", Enum(MentionType), primary_key=True), Column( "message_id", BigInteger, ForeignKey("messages.message_id"), primary_key=True, ), Column("channel_id", BigInteger, ForeignKey("channels.channel_id")), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), UniqueConstraint("mentioned_id", "type", "message_id", name="uq_mention"), ) self.tb_guilds = Table( "guilds", meta, Column("guild_id", BigInteger, primary_key=True), Column("int_owner_id", BigInteger, ForeignKey("users.int_user_id")), Column("name", Unicode), Column("icon", String), Column("voice_region", Enum(discord.VoiceRegion)), Column("afk_channel_id", BigInteger, nullable=True), Column("afk_timeout", Integer), Column("mfa", Boolean), Column("verification_level", Enum(discord.VerificationLevel)), Column("explicit_content_filter", Enum(discord.ContentFilter)), Column("features", ARRAY(String)), Column("splash", String, nullable=True), ) self.tb_channels = Table( "channels", meta, Column("channel_id", BigInteger, primary_key=True), Column("name", String), Column("is_nsfw", Boolean), Column("is_deleted", Boolean), Column("position", SmallInteger), Column("topic", UnicodeText, nullable=True), Column("changed_roles", ARRAY(BigInteger)), Column( "category_id", BigInteger, ForeignKey("channel_categories.category_id"), nullable=True, ), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), ) self.tb_voice_channels = Table( "voice_channels", meta, Column("voice_channel_id", BigInteger, primary_key=True), Column("name", Unicode), Column("is_deleted", Boolean), Column("position", SmallInteger), Column("bitrate", Integer), Column("user_limit", SmallInteger), Column("changed_roles", ARRAY(BigInteger)), Column( "category_id", BigInteger, ForeignKey("channel_categories.category_id"), nullable=True, ), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), ) self.tb_channel_categories = Table( "channel_categories", meta, Column("category_id", BigInteger, primary_key=True), Column("name", Unicode), Column("position", SmallInteger), Column("is_deleted", Boolean), Column("is_nsfw", Boolean), Column("changed_roles", ARRAY(BigInteger)), Column( "parent_category_id", BigInteger, ForeignKey("channel_categories.category_id"), nullable=True, ), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), ) self.tb_users = Table( "users", meta, Column("int_user_id", BigInteger, primary_key=True), Column("real_user_id", BigInteger), Column("name", Unicode), Column("discriminator", SmallInteger), Column("avatar", String, nullable=True), Column("is_deleted", Boolean), Column("is_bot", Boolean), ) self.tb_guild_membership = Table( "guild_membership", meta, Column( "int_user_id", BigInteger, ForeignKey("users.int_user_id"), primary_key=True, ), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id"), primary_key=True), Column("is_member", Boolean), Column("joined_at", DateTime, nullable=True), Column("nick", Unicode(32), nullable=True), UniqueConstraint("int_user_id", "guild_id", name="uq_guild_membership"), ) self.tb_role_membership = Table( "role_membership", meta, Column("role_id", BigInteger, ForeignKey("roles.role_id")), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), Column("int_user_id", BigInteger, ForeignKey("users.int_user_id")), UniqueConstraint("role_id", "int_user_id", name="uq_role_membership"), ) self.tb_avatar_history = Table( "avatar_history", meta, Column("user_id", BigInteger, primary_key=True), Column("timestamp", DateTime, primary_key=True), Column("avatar", LargeBinary), Column("avatar_ext", String), ) self.tb_username_history = Table( "username_history", meta, Column("user_id", BigInteger, primary_key=True), Column("timestamp", DateTime, primary_key=True), Column("username", Unicode), ) self.tb_nickname_history = Table( "nickname_history", meta, Column("user_id", BigInteger, primary_key=True), Column("timestamp", DateTime, primary_key=True), Column("nickname", Unicode), ) self.tb_emojis = Table( "emojis", meta, Column("emoji_id", BigInteger), Column("emoji_unicode", Unicode(7)), Column("is_custom", Boolean), Column("is_managed", Boolean, nullable=True), Column("is_deleted", Boolean), Column("name", ARRAY(String)), Column("category", ARRAY(String)), Column("roles", ARRAY(BigInteger), nullable=True), Column("guild_id", BigInteger, nullable=True), UniqueConstraint("emoji_id", "emoji_unicode", name="uq_emoji"), ) self.tb_roles = Table( "roles", meta, Column("role_id", BigInteger, primary_key=True), Column("name", Unicode), Column("color", Integer), Column("raw_permissions", BigInteger), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), Column("is_hoisted", Boolean), Column("is_managed", Boolean), Column("is_mentionable", Boolean), Column("is_deleted", Boolean), Column("position", SmallInteger), ) self.tb_audit_log = Table( "audit_log", meta, Column("audit_entry_id", BigInteger, primary_key=True), Column("guild_id", BigInteger, ForeignKey("guilds.guild_id")), Column("action", Enum(discord.AuditLogAction)), Column("int_user_id", BigInteger, ForeignKey("users.int_user_id")), Column("reason", Unicode, nullable=True), Column("category", Enum(discord.AuditLogActionCategory), nullable=True), Column("before", JSON), Column("after", JSON), UniqueConstraint("audit_entry_id", "guild_id", name="uq_audit_log"), ) self.tb_channel_crawl = Table( "channel_crawl", meta, Column( "channel_id", BigInteger, ForeignKey("channels.channel_id"), primary_key=True, ), Column("last_message_id", BigInteger), ) self.tb_audit_log_crawl = Table( "audit_log_crawl", meta, Column("guild_id", BigInteger, ForeignKey("guilds.guild_id"), primary_key=True), Column("last_audit_entry_id", BigInteger), ) # Caches if cache_size is not None: self.message_cache = LruCache(cache_size["event-size"]) self.typing_cache = LruCache(cache_size["event-size"]) self.guild_cache = LruCache(cache_size["lookup-size"]) self.channel_cache = LruCache(cache_size["lookup-size"]) self.voice_channel_cache = LruCache(cache_size["lookup-size"]) self.channel_category_cache = LruCache(cache_size["lookup-size"]) self.user_cache = LruCache(cache_size["lookup-size"]) self.emoji_cache = LruCache(cache_size["lookup-size"]) self.role_cache = LruCache(cache_size["lookup-size"]) # Create tables meta.create_all(self.db) self.logger.info("Created all tables.")
class Task(TimestampMixin, DeclarativeBase): __tablename__ = 'task' id = Field(Integer, primary_key=True, json='id') priority = Field(Integer, nullable=False, default=50, json='priority') status = Field(Enum('new', 'success', 'in-progress', 'failed', name='task_status_enum'), default='new', nullable=True, json='status') fail_reason = Field(Unicode(2048), nullable=True, json='reason') started_at = Field(DateTime, nullable=True, json='startedAt') terminated_at = Field(DateTime, nullable=True, json='terminatedAt') type = Field(Unicode(50)) __mapper_args__ = { 'polymorphic_identity': __tablename__, 'polymorphic_on': type } def do_(self, context): # pragma: no cover raise NotImplementedError @classmethod def pop(cls, statuses={'new'}, filters=None, session=DBSession): find_query = session.query(cls.id.label('id'), cls.created_at, cls.status, cls.type, cls.priority) if filters is not None: find_query = find_query.filter( text(filters) if isinstance(filters, str) else filters) find_query = find_query \ .filter(cls.status.in_(statuses)) \ .order_by(cls.priority.desc()) \ .order_by(cls.created_at) \ .limit(1) \ .with_for_update() cte = find_query.cte('find_query') update_query = Task.__table__.update() \ .where(Task.id == cte.c.id) \ .values(status='in-progress') \ .returning(Task.__table__.c.id) task_id = session.execute(update_query).fetchone() session.commit() if not task_id: raise TaskPopError('There is no task to pop') task_id = task_id[0] task = session.query(cls).filter(cls.id == task_id).one() return task def execute(self, context, session=DBSession): try: isolated_task = session.query(Task).filter( Task.id == self.id).one() isolated_task.do_(context) session.commit() except: session.rollback() raise @classmethod def cleanup(cls, session=DBSession, statuses=['in-progress']): session.query(Task) \ .filter(Task.status.in_(statuses)) \ .with_lockmode('update') \ .update({'status': 'new', 'started_at': None, 'terminated_at': None}, synchronize_session='fetch') @classmethod def reset_status(cls, task_id, session=DBSession, statuses=['in-progress']): session.query(Task) \ .filter(Task.status.in_(statuses)) \ .filter(Task.id == task_id) \ .with_lockmode('update') \ .update({'status': 'new', 'started_at': None, 'terminated_at': None}, synchronize_session='fetch')
class Cluster(Base): __tablename__ = 'clusters' MODES = ('multinode', 'ha_full', 'ha_compact') STATUSES = ('new', 'deployment', 'operational', 'error', 'remove') NET_MANAGERS = ('FlatDHCPManager', 'VlanManager') GROUPING = ('roles', 'hardware', 'both') # Neutron-related NET_PROVIDERS = ('nova_network', 'neutron') NET_L23_PROVIDERS = ('ovs', ) NET_SEGMENT_TYPES = ('none', 'vlan', 'gre') id = Column(Integer, primary_key=True) mode = Column(Enum(*MODES, name='cluster_mode'), nullable=False, default='multinode') status = Column(Enum(*STATUSES, name='cluster_status'), nullable=False, default='new') net_provider = Column(Enum(*NET_PROVIDERS, name='net_provider'), nullable=False, default='nova_network') net_l23_provider = Column(Enum(*NET_L23_PROVIDERS, name='net_l23_provider'), nullable=False, default='ovs') net_segment_type = Column(Enum(*NET_SEGMENT_TYPES, name='net_segment_type'), nullable=False, default='vlan') net_manager = Column(Enum(*NET_MANAGERS, name='cluster_net_manager'), nullable=False, default='FlatDHCPManager') grouping = Column(Enum(*GROUPING, name='cluster_grouping'), nullable=False, default='roles') name = Column(Unicode(50), unique=True, nullable=False) release_id = Column(Integer, ForeignKey('releases.id'), nullable=False) nodes = relationship("Node", backref="cluster", cascade="delete", order_by='Node.id') tasks = relationship("Task", backref="cluster", cascade="delete") attributes = relationship("Attributes", uselist=False, backref="cluster", cascade="delete") changes_list = relationship("ClusterChanges", backref="cluster", cascade="delete") # We must keep all notifications even if cluster is removed. # It is because we want user to be able to see # the notification history so that is why we don't use # cascade="delete" in this relationship # During cluster deletion sqlalchemy engine will set null # into cluster foreign key column of notification entity notifications = relationship("Notification", backref="cluster") network_groups = relationship("NetworkGroup", backref="cluster", cascade="delete", order_by="NetworkGroup.id") dns_nameservers = Column(JSON, default=["8.8.8.8", "8.8.4.4"]) replaced_deployment_info = Column(JSON, default={}) replaced_provisioning_info = Column(JSON, default={}) is_customized = Column(Boolean, default=False) neutron_config = relationship("NeutronConfig", backref=backref("cluster"), cascade="all,delete", uselist=False) def replace_provisioning_info(self, data): self.replaced_provisioning_info = data self.is_customized = True return self.replaced_provisioning_info def replace_deployment_info(self, data): self.replaced_deployment_info = data self.is_customized = True return self.replaced_deployment_info @property def changes(self): return [{ "name": i.name, "node_id": i.node_id } for i in self.changes_list] @changes.setter def changes(self, value): self.changes_list = value @property def is_ha_mode(self): return self.mode in ('ha_full', 'ha_compact') @property def full_name(self): return '%s (id=%s, mode=%s)' % (self.name, self.id, self.mode) @property def are_attributes_locked(self): return self.status != "new" or any( map(lambda x: x.name == "deploy" and x.status == "running", self.tasks)) @classmethod def validate(cls, data): d = cls.validate_json(data) if d.get("name"): if db().query(Cluster).filter_by(name=d["name"]).first(): c = web.webapi.conflict c.message = "Environment with this name already exists" raise c() if d.get("release"): release = db().query(Release).get(d.get("release")) if not release: raise web.webapi.badrequest(message="Invalid release id") return d def add_pending_changes(self, changes_type, node_id=None): ex_chs = db().query(ClusterChanges).filter_by(cluster=self, name=changes_type) if not node_id: ex_chs = ex_chs.first() else: ex_chs = ex_chs.filter_by(node_id=node_id).first() # do nothing if changes with the same name already pending if ex_chs: return ch = ClusterChanges(cluster_id=self.id, name=changes_type) if node_id: ch.node_id = node_id db().add(ch) db().commit() def clear_pending_changes(self, node_id=None): chs = db().query(ClusterChanges).filter_by(cluster_id=self.id) if node_id: chs = chs.filter_by(node_id=node_id) map(db().delete, chs.all()) db().commit() @property def network_manager(self): if self.net_provider == 'neutron': from nailgun.network.neutron import NeutronManager return NeutronManager else: from nailgun.network.nova_network import NovaNetworkManager return NovaNetworkManager
class Release(Base): __tablename__ = 'releases' __table_args__ = (UniqueConstraint('name', 'version'), ) id = Column(Integer, primary_key=True) name = Column(Unicode(100), nullable=False) version = Column(String(30), nullable=False) description = Column(Unicode) operating_system = Column(String(50), nullable=False) state = Column(Enum(*consts.RELEASE_STATES, name='release_state'), nullable=False, default=consts.RELEASE_STATES.unavailable) networks_metadata = Column(MutableDict.as_mutable(JSON), default={}) attributes_metadata = Column(MutableDict.as_mutable(JSON), default={}) volumes_metadata = Column(MutableDict.as_mutable(JSON), default={}) modes_metadata = Column(MutableDict.as_mutable(JSON), default={}) roles_metadata = Column(MutableDict.as_mutable(JSON), default={}) network_roles_metadata = Column(MutableList.as_mutable(JSON), default=[], server_default='[]') vmware_attributes_metadata = Column(MutableDict.as_mutable(JSON), default={}) components_metadata = Column(MutableList.as_mutable(JSON), default=[], server_default='[]') modes = Column(MutableList.as_mutable(JSON), default=[]) clusters = relationship("Cluster", primaryjoin="Release.id==Cluster.release_id", backref="release", cascade="all,delete") extensions = Column(psql.ARRAY(String(consts.EXTENSION_NAME_MAX_SIZE)), default=[], nullable=False, server_default='{}') node_attributes = Column(MutableDict.as_mutable(JSON), default={}, server_default='{}', nullable=False) # TODO(enchantner): get rid of properties @property def openstack_version(self): return self.version.split('-')[0] @property def environment_version(self): """Returns environment version based on release version. A release version consists of 'OSt' and 'MOS' versions: '2014.1.1-5.0.2' so we need to extract 'MOS' version and returns it as result. :returns: an environment version """ # unfortunately, Fuel 5.0 didn't have an env version in release_version # so we need to handle that special case if self.version == '2014.1': version = '5.0' else: try: version = self.version.split('-')[1] except IndexError: version = '' return version @property def os_weight(self): try: weight = consts.RELEASE_OS[::-1].index(self.operating_system) except ValueError: weight = -1 return weight def __cmp__(self, other): """Allows to compare two releases :other: an instance of nailgun.db.sqlalchemy.models.release.Release """ if self.environment_version < other.environment_version: return -1 if self.environment_version > other.environment_version: return 1 if self.openstack_version < other.openstack_version: return -1 if self.openstack_version > other.openstack_version: return 1 if self.os_weight == other.os_weight == -1: if self.operating_system > other.operating_system: return -1 if self.operating_system < other.operating_system: return 1 else: if self.os_weight < other.os_weight: return -1 if self.os_weight > other.os_weight: return 1 return 0
# define metadata for table from sqlalchemy import MetaData from sqlalchemy import Table, Column, ForeignKey, ForeignKeyConstraint from sqlalchemy import Integer, Unicode, UnicodeText metadata = MetaData( ) # metadata is collection of tables and can be traversed like XML DOM city_table = Table("city", metadata, Column("id", Integer, primary_key=True), Column("version", Integer, primary_key=True), Column("name", UnicodeText)) user_table = Table( "user", metadata, Column("id", Integer, primary_key=True), Column("name", Unicode(100)), Column("city_id", Integer), Column("city_version", Integer), ForeignKeyConstraint(["city_id", "city_version"], ["city.id", "city.name"])) # init engine over database from sqlalchemy import create_engine engine = create_engine("sqlite://", echo=True) # create table from metadata metadata.create_all(engine)
class DomainAlias(Base): """ Domain alias object. Same contents, different name. """ __tablename__ = 'domains_aliases' __table_args__ = ( Comment('Domains Aliases'), Index('domains_aliases_u_da', 'parentid', 'name', unique=True), Index('domains_aliases_i_domain', 'domainid'), { 'mysql_engine' : 'InnoDB', 'mysql_charset' : 'utf8', 'info' : { 'cap_menu' : 'BASE_DOMAINS', 'cap_read' : 'DOMAINS_LIST', 'cap_create' : 'DOMAINS_CREATE', 'cap_edit' : 'DOMAINS_EDIT', 'cap_delete' : 'DOMAINS_DELETE', 'menu_name' : _('Aliases'), 'default_sort' : ({ 'property': 'name' ,'direction': 'ASC' },), 'grid_view' : ( 'daid', MarkupColumn( name='name', header_string=_('Name'), template='{__str__}', column_flex=1, sortable=True ), 'parent', 'domain' ), 'grid_hidden' : ('daid',), 'easy_search' : ('name',), 'detail_pane' : ('netprofile_core.views', 'dpane_simple'), 'create_wizard' : SimpleWizard(title=_('Add new domain alias')) } } ) id = Column( 'daid', UInt32(), Sequence('domains_aliases_daid_seq'), Comment('Domain alias ID'), primary_key=True, nullable=False, info={ 'header_string' : _('ID') } ) parent_id = Column( 'parentid', UInt32(), ForeignKey('domains_def.domainid', name='domains_aliases_fk_parentid', ondelete='CASCADE', onupdate='CASCADE'), Comment('Parent domain ID'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('Parent'), 'column_flex' : 1 } ) domain_id = Column( 'domainid', UInt32(), ForeignKey('domains_def.domainid', name='domains_aliases_fk_domainid', ondelete='CASCADE', onupdate='CASCADE'), Comment('Original domain ID'), nullable=True, default=None, server_default=text('NULL'), info={ 'header_string' : _('Origin') } ) name = Column( Unicode(255), Comment('Alias name'), nullable=False, info={ 'header_string' : _('Name') } ) domain = relationship( 'Domain', backref='aliases', innerjoin=True, foreign_keys=domain_id ) parent = relationship( 'Domain', backref='children_aliases', foreign_keys=parent_id ) def __str__(self): if self.parent: return '%s.%s' % ( str(self.name), str(self.parent) ) return str(self.name)
class App(db.Model, DomainObject): '''A microtasking Application to which Tasks are associated. ''' def __repr__(self): return "%s(%s)" % (self.__class__.__name__, self.id) __tablename__ = 'app' #: Unique id for this app (autogenerated) id = Column(Integer, primary_key=True) #: created timestamp (automatically set) created = Column(Text, default=make_timestamp) #: Name / Title for this Application name = Column(Unicode(length=255), unique=True) #: slug used in urls etc short_name = Column(Unicode(length=255), unique=True) #: description description = Column(Unicode(length=255)) #: long description long_description = Column(UnicodeText) #: Allow anonymous contributors to participate in the application tasks allow_anonymous_contributors = Column(Boolean, default=True) ## TODO: What is this? long_tasks = Column(Integer, default=0) #: Boolean integer (0,1) indicating that \ #: this App should be hidden from everyone but Administrators hidden = Column(Integer, default=0) #: owner (id) owner_id = Column(Integer, ForeignKey('user.id')) ## Following may not be relevant ... ## TODO: ask about these #: estimate of time it should take for user time_estimate = Column(Integer, default=0) #: time limit for a task time_limit = Column(Integer, default=0) calibration_frac = Column(Float, default=0) bolt_course_id = Column(Integer, default=0) #: category(id) category_id = Column(Integer, ForeignKey('category.id')) #: Standard JSON blob for additional data. This field also #: stores information used by PyBossa, such as the app thumbnail, #: the schedule mode, and the task presenter. #: #: { #: task_presenter: [html/javascript], #: thumbnail: [url to the thumbnail image] #: sched: [scheduling mode] #: } #: info = Column(JSONType, default=dict) ## Relationships #: `Task`s for this app.` tasks = relationship('Task', cascade='all, delete-orphan', backref='app') #: `TaskRun`s for this app.` task_runs = relationship('TaskRun', backref='app', cascade='all, delete-orphan', order_by='TaskRun.finish_time.desc()') #: `Featured` or not for this app featured = relationship('Featured', cascade='all, delete-orphan') #: `category` or not for this app category = relationship('Category') #: Percentage of completed tasks based on Task.state #: (0 not done, 1 completed) def completion_status(self): """Returns the percentage of submitted Tasks Runs done""" sql = text( '''SELECT COUNT(task_id) FROM task_run WHERE app_id=:app_id''') results = db.engine.execute(sql, app_id=self.id) for row in results: n_task_runs = float(row[0]) sql = text('''SELECT SUM(n_answers) FROM task WHERE app_id=:app_id''') results = db.engine.execute(sql, app_id=self.id) for row in results: if row[0] is None: n_expected_task_runs = float(30 * n_task_runs) else: n_expected_task_runs = float(row[0]) pct = float(0) if n_expected_task_runs != 0: pct = n_task_runs / n_expected_task_runs return pct def n_completed_tasks(self): """Returns the number of Tasks that are completed""" completed = 0 for t in self.tasks: if t.state == "completed": completed += 1 return completed def last_activity(self): sql = text('''SELECT finish_time FROM task_run WHERE app_id=:app_id ORDER BY finish_time DESC LIMIT 1''') results = db.engine.execute(sql, app_id=self.id) for row in results: if row is not None: return pretty_date(row[0]) else: return None
class DomainServiceType(Base): """ Domains-to-hosts linkage type. """ __tablename__ = 'domains_hltypes' __table_args__ = ( Comment('Domains-hosts linkage types'), Index('domains_hltypes_u_name', 'name', unique=True), { 'mysql_engine' : 'InnoDB', 'mysql_charset' : 'utf8', 'info' : { 'cap_menu' : 'BASE_DOMAINS', 'cap_read' : 'DOMAINS_LIST', 'cap_create' : 'DOMAINS_SERVICETYPES_CREATE', 'cap_edit' : 'DOMAINS_SERVICETYPES_EDIT', 'cap_delete' : 'DOMAINS_SERVICETYPES_DELETE', 'show_in_menu' : 'admin', 'menu_name' : _('Domain Service Types'), 'default_sort' : ({ 'property': 'name' ,'direction': 'ASC' },), 'grid_view' : ('hltypeid', 'name', 'unique'), 'grid_hidden' : ('hltypeid',), 'easy_search' : ('name',), 'create_wizard' : SimpleWizard(title=_('Add new type')) } } ) id = Column( 'hltypeid', UInt32(), Sequence('domains_hltypes_hltypeid_seq', start=101, increment=1), Comment('Domains-hosts linkage type ID'), primary_key=True, nullable=False, info={ 'header_string' : _('ID') } ) name = Column( Unicode(255), Comment('Domains-hosts linkage type name'), nullable=False, info={ 'header_string' : _('Name'), 'column_flex' : 1 } ) unique = Column( NPBoolean(), Comment('Is unique per domain?'), nullable=False, default=False, server_default=npbool(False), info={ 'header_string' : _('Unique') } ) def __str__(self): req = getattr(self, '__req__', None) if req: return req.localizer.translate(_(self.name)) return str(self.name)
from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table, Unicode log = logging.getLogger(__name__) metadata = MetaData() StoredWorkflowTagAssociation_table = Table( "stored_workflow_tag_association", metadata, Column("id", Integer, primary_key=True), Column("stored_workflow_id", Integer, ForeignKey("stored_workflow.id"), index=True), Column("tag_id", Integer, ForeignKey("tag.id"), index=True), Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True), Column("user_tname", Unicode(255), index=True), Column("value", Unicode(255), index=True), Column("user_value", Unicode(255), index=True)) WorkflowTagAssociation_table = Table( "workflow_tag_association", metadata, Column("id", Integer, primary_key=True), Column("workflow_id", Integer, ForeignKey("workflow.id"), index=True), Column("tag_id", Integer, ForeignKey("tag.id"), index=True), Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True), Column("user_tname", Unicode(255), index=True), Column("value", Unicode(255), index=True), Column("user_value", Unicode(255), index=True)) def upgrade(migrate_engine):
class User(Base): """The User table Each user can have one or more single user notebook servers. Each single user notebook server will have a unique token for authorization. Therefore, a user with multiple notebook servers will have multiple tokens. API tokens grant access to the Hub's REST API. These are used by single-user servers to authenticate requests, and external services to manipulate the Hub. Cookies are set with a single ID. Resetting the Cookie ID invalidates all cookies, forcing user to login again. A `state` column contains a JSON dict, used for restoring state of a Spawner. `servers` is a list that contains a reference for each of the user's single user notebook servers. The method `server` returns the first entry in the user's `servers` list. """ __tablename__ = 'users' id = Column(Integer, primary_key=True, autoincrement=True) name = Column(Unicode(255), unique=True) _orm_spawners = relationship( "Spawner", backref="user", cascade="all, delete-orphan", ) @property def orm_spawners(self): return {s.name: s for s in self._orm_spawners} admin = Column(Boolean, default=False) created = Column(DateTime, default=datetime.utcnow) last_activity = Column(DateTime, nullable=True) api_tokens = relationship( "APIToken", backref="user", cascade="all, delete-orphan", ) oauth_tokens = relationship( "OAuthAccessToken", backref="user", cascade="all, delete-orphan", ) oauth_codes = relationship( "OAuthCode", backref="user", cascade="all, delete-orphan", ) cookie_id = Column(Unicode(255), default=new_token, nullable=False, unique=True) # User.state is actually Spawner state # We will need to figure something else out if/when we have multiple spawners per user state = Column(JSONDict) # Authenticators can store their state here: # Encryption is handled elsewhere encrypted_auth_state = Column(LargeBinary) def __repr__(self): return "<{cls}({name} {running}/{total} running)>".format( cls=self.__class__.__name__, name=self.name, total=len(self._orm_spawners), running=sum(bool(s.server) for s in self._orm_spawners), ) def new_api_token(self, token=None, **kwargs): """Create a new API token If `token` is given, load that token. """ return APIToken.new(token=token, user=self, **kwargs) @classmethod def find(cls, db, name): """Find a user by name. Returns None if not found. """ return db.query(cls).filter(cls.name == name).first()
class Tournament(SQLAlchemyBase, JSONModel): __tablename__ = "tournaments" id = Column(Integer, primary_key=True) created_at = Column(DateTime, default=datetime.datetime.now, nullable=False) edited_at = Column(DateTime, default=None) name = Column(Unicode(255), nullable=False) start_date = Column(DateTime, nullable=False) finish_date = Column(DateTime, nullable=False) start_register_date = Column(DateTime, nullable=False) finish_register_date = Column(DateTime, nullable=False) limit_couples = Column(Integer) inscription_type = Column(Enum(TournamentPrivacyTypeEnum)) # Public o privat (requeix codi d'invitació) type = Column(Enum(TournamentTypeEnum), nullable=False) price_1 = Column(Float, nullable=False) price_2 = Column(Float, nullable=False) description = Column(UnicodeText) poster = Column(Unicode(255)) # Relació (User-Tournament) per tenir l'organitzador. owner_id = Column(Integer, ForeignKey("users.id"), nullable=False) owner = relationship("User", back_populates="tournament_owner") # Relació inscripcions inscriptions = relationship ("User", secondary=TournamentInscriptionsAssociation, back_populates="tournament_inscriptions") # Relació (Facility-Tournament) per tenir el club facility_id = Column(Integer, ForeignKey("facilities.id"), nullable=False) facility = relationship("Facility", back_populates="tournaments") # Categories categories = relationship("Category", secondary=TournamentCategoriesAssociation, back_populates="tournament_categories") #Relació rondes rounds = relationship("Round", back_populates="tournament") @hybrid_property def status(self): current_datetime = datetime.datetime.now() if current_datetime < self.finish_register_date: return TournamentStatusEnum.open elif (current_datetime > self.finish_register_date) and (current_datetime < self.finish_date): return TournamentStatusEnum.playing else: return TournamentStatusEnum.closed @status.expression def status(cls): current_datetime = datetime.datetime.now() return case( [ (current_datetime < cls.finish_register_date, type_coerce(TournamentStatusEnum.open, Enum(TournamentStatusEnum))), (and_(current_datetime > cls.finish_register_date, current_datetime < cls.finish_date), type_coerce(TournamentStatusEnum.in_game, Enum(TournamentStatusEnum))) ], else_=type_coerce(TournamentStatusEnum.closed, Enum(TournamentStatusEnum)) ) @hybrid_property def json_model(self): return { "id": self.id, "price_1": self.price_1, "finish_date": self.finish_date.strftime(settings.DATETIME_DEFAULT_FORMAT), "finish_register_date": self.finish_register_date.strftime(settings.DATETIME_DEFAULT_FORMAT), "description": self.description, "created_at": self.created_at.strftime(settings.DATETIME_DEFAULT_FORMAT), "name": self.name, "inscription_type" : self.inscription_type.value, "start_date": self.created_at.strftime(settings.DATETIME_DEFAULT_FORMAT), "status": self.status.value, "type": self.type.value, "facility": self.facility.to_json_model(id="id", name="name", provincia="provincia", town="town", latitude="latitude", longitude="longitude"), "categories": [category.json_model for category in self.categories], "rounds": [round.json_model for round in self.rounds], } @hybrid_property def poster_url(self): return _generate_media_url(self, "poster", default_image=True)
class APIToken(Hashed, Base): """An API token""" __tablename__ = 'api_tokens' user_id = Column(Integer, ForeignKey('users.id', ondelete="CASCADE"), nullable=True) service_id = Column(Integer, ForeignKey('services.id', ondelete="CASCADE"), nullable=True) id = Column(Integer, primary_key=True) hashed = Column(Unicode(255), unique=True) prefix = Column(Unicode(16), index=True) @property def api_id(self): return 'a%i' % self.id # token metadata for bookkeeping created = Column(DateTime, default=datetime.utcnow) expires_at = Column(DateTime, default=None, nullable=True) last_activity = Column(DateTime) note = Column(Unicode(1023)) def __repr__(self): if self.user is not None: kind = 'user' name = self.user.name elif self.service is not None: kind = 'service' name = self.service.name else: # this shouldn't happen kind = 'owner' name = 'unknown' return "<{cls}('{pre}...', {kind}='{name}')>".format( cls=self.__class__.__name__, pre=self.prefix, kind=kind, name=name, ) @classmethod def purge_expired(cls, db): """Purge expired API Tokens from the database""" now = utcnow() deleted = False for token in (db.query(cls).filter(cls.expires_at != None).filter( cls.expires_at < now)): app_log.debug("Purging expired %s", token) deleted = True db.delete(token) if deleted: db.commit() @classmethod def find(cls, db, token, *, kind=None): """Find a token object by value. Returns None if not found. `kind='user'` only returns API tokens for users `kind='service'` only returns API tokens for services """ prefix_match = cls.find_prefix(db, token) prefix_match = prefix_match.filter( or_(cls.expires_at == None, cls.expires_at >= utcnow())) if kind == 'user': prefix_match = prefix_match.filter(cls.user_id != None) elif kind == 'service': prefix_match = prefix_match.filter(cls.service_id != None) elif kind is not None: raise ValueError( "kind must be 'user', 'service', or None, not %r" % kind) for orm_token in prefix_match: if orm_token.match(token): return orm_token @classmethod def new(cls, token=None, user=None, service=None, note='', generated=True, expires_in=None): """Generate a new API token for a user or service""" assert user or service assert not (user and service) db = inspect(user or service).session if token is None: token = new_token() # Don't need hash + salt rounds on generated tokens, # which already have good entropy generated = True else: cls.check_token(db, token) # two stages to ensure orm_token.generated has been set # before token setter is called orm_token = cls(generated=generated, note=note or '') orm_token.token = token if user: assert user.id is not None orm_token.user = user else: assert service.id is not None orm_token.service = service if expires_in is not None: orm_token.expires_at = utcnow() + timedelta(seconds=expires_in) db.add(orm_token) db.commit() return token
class Member(OrderingMixin, FilteringMixin, PaginationMixin, DeclarativeBase): __tablename__ = 'member' id = Field(Integer, primary_key=True) reference_id = Field(Integer, unique=True) email = Field( Unicode(100), unique=True, index=True, not_none=False, required=True, min_length=7, max_length=100, message='Loerm Ipsum', label='Email address', example='*****@*****.**', watermark='*****@*****.**', pattern=r'(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)', pattern_description='Valid email format, example: [email protected]', ) access_token = Field(Unicode(512), protected=True) # FIXME: What is this? add_to_room = Field(Boolean, default=True) title = Field( Unicode(50), unique=True, index=True, nullable=True, max_length=20, min_length=3, label='Username', required=True, message='Loerm Ipsum', not_none=True, watermark='John_Doe', example='John_Doe', ) first_name = Field( Unicode(20), nullable=False, not_none=True, python_type=str, min_length=3, max_length=20, required=True, pattern=r'^[a-zA-Z]{1}[a-z-A-Z ,.\'-]{2,19}$', pattern_description='Only alphabetical characters, ., \' and space are' 'valid', example='John', label='First Name', message='Loerm Ipsum', watermark='Loerm Ipsum', ) last_name = Field( Unicode(20), nullable=False, not_none=True, python_type=str, min_length=3, max_length=20, required=True, pattern=r'^[a-zA-Z]{1}[a-z-A-Z ,.\'-]{2,19}$', pattern_description='Only alphabetical characters, ., \' and space are' 'valid', example='Doe', label='Last Name', message='Loerm Ipsum', watermark='Loerm Ipsum', ) phone = Field( Unicode(50), nullable=True, min_length=8, max_length=16, not_none=False, required=False, message='Loerm Ipsum', label='Phone', watermark='Enter your phone number', example='734 555 1212', pattern=r'\d{3}[-\.\s]??\d{3}[-\.\s]??\d{4}|\(\d{3}\)\s*\d{3}' r'[-\.\s]??\d{4}|\d{3}[-\.\s]??\d{4}', ) show_email = Field(Boolean, default=False) show_phone = Field(Boolean, default=False) avatar = Field(Unicode(200), label='Avatar', nullable=True, unique=False, not_none=False, required=False, example='Lorem Ipsum') messages = relationship('Envelop') contacts = relationship( 'Member', secondary='member_contact', primaryjoin=id == MemberContact.member_id, secondaryjoin=id == MemberContact.contact_member_id, lazy='selectin') room = relationship('Room', back_populates='owner') blocked_members = relationship( 'Member', secondary=member_block, primaryjoin=id == member_block.c.member_id, secondaryjoin=id == member_block.c.blocked_member_id, lazy='selectin') rooms = relationship( 'Target', secondary='target_member', back_populates='members', lazy='selectin', protected=True, ) administrator_of = relationship('Room', secondary='room_administrator', back_populates='administrators', protected=True, lazy='selectin') def create_jwt_principal(self, session_id=None): if session_id is None: session_id = str(uuid.uuid4()) return CASPrincipal( dict( id=self.id, roles=self.roles, email=self.email, title=self.title, firstName=self.first_name, lastName=self.last_name, referenceId=self.reference_id, sessionId=session_id, avatar=self.avatar, )) def create_refresh_principal(self): return JWTRefreshToken(dict(id=self.id)) @classmethod def current(cls): return DBSession.query(cls) \ .filter(cls.reference_id == context.identity.reference_id).one() def is_member(self, target_id): return DBSession.query(Member).join(TargetMember) \ .filter( TargetMember.target_id == target_id, TargetMember.member_id == self.id ) \ .count() def to_dict(self): member_dict = super().to_dict() member_dict['phone'] = self.phone if self.show_phone else None member_dict['email'] = self.email if self.show_email else None return member_dict @property def roles(self): return ['member'] def __repr__(self): return f'Member: {self.id} {self.reference_id} {self.title} {self.email}'
class ConvertibleCalendar(utils.Base): """User-Defined Calendars""" __tablename__ = "convertible_calendar" id = Column(Integer, primary_key=True) name = Column(Unicode(255), nullable=False) # # Weeks # weekday_names = Column( JSON, CheckConstraint( """ json_array_length(weekday_names) == 0 OR ( epoch_weekday IS NOT NULL AND weekday_start IS NOT NULL ) """, name=f"ck_{__tablename__}_weekday_names", ), default=list, nullable=False, ) @validates("weekday_names") def _sanitize_weekday_names(self, _, weekday_names: list) -> list: return list(utils.string_sanitization(weekday_names)) days_in_weeks = column_property(func.json_array_length(weekday_names)) epoch_weekday = Column( # index into weekday names Integer, CheckConstraint( """ ( -- calendar has weeks epoch_weekday >= 0 AND epoch_weekday < json_array_length(weekday_names) ) OR ( -- weekless calendar json_array_length(weekday_names) == 0 AND epoch_weekday IS NULL ) """, name=f"ck_{__tablename__}_epoch_weekday", ), ) weekday_start = Column( Integer, CheckConstraint( """ ( -- calendar has weeks weekday_start >= 0 AND weekday_start < json_array_length(weekday_names) ) OR ( -- weekless calendar json_array_length(weekday_names) == 0 AND weekday_start IS NULL ) """, name=f"ck_{__tablename__}_weekday_start", ), ) weekends = Column(JSON, default=list, nullable=False) @validates("weekends") def _validate_weekends(self, _, weekends: list) -> list: if len(self.weekday_names) == 0: assert len(weekends) == 0 return weekends for idx in weekends: assert 0 <= idx < len(self.weekday_names), "invalid weekend index" return utils.integer_sanitization(weekends) # # Common years # common_year_month_names = Column( JSON, CheckConstraint( """ ( -- a calendar with months json_array_length(common_year_month_names) == json_array_length(days_in_common_year_months) ) OR ( -- a monthless calendar json_array_length(common_year_month_names) == 0 AND json_array_length(days_in_common_year_months) == 1 ) """, name=f"ck_{__tablename__}_common_months", ), default=list, nullable=False, ) months_in_common_year = column_property( func.json_array_length(common_year_month_names) ) days_in_common_year_months = Column(JSON, nullable=False) @hybrid_property def days_in_common_year(self): return sum(self.days_in_common_year_months) # # Leap years # """ There are two main ways calendars determine leap years: cycles and divisibility. For example, the Hebrew calendar has a 19-year cycle where the cycle years 3, 6, 8, 11, 14, 17, and 19 are leap years. In contrast, for the Gregorian Calendar, leap years are divisible by 400 or divisible by 4 but not 100. Instead of having two separate systems, this class represents divisible "leap year rules" using "leap year cycles". The Gregorian calendar's divisibility rules can be represented as a 400 year cycle with the following cycle years being leap years: 4, 8, 12, 16, 20, ..., 96, 104, ..., 196, 204, ..., 296, 304, ..., 396, 400 """ has_leap_year = Column(Boolean(create_constraint=True), default=False) leap_year_month_names = Column( JSON, CheckConstraint( """ ( -- calendar with months and leap years has_leap_year == 1 AND ( json_array_length(leap_year_month_names) == json_array_length(days_in_leap_year_months) ) ) OR ( -- calendar has leap years, but no months has_leap_year == 1 AND json_array_length(leap_year_month_names) == 0 AND json_array_length(days_in_leap_year_months) == 1 ) OR ( -- calendar has no leap years has_leap_year == 0 AND json_array_length(leap_year_month_names) == 0 AND json_array_length(days_in_leap_year_months) == 0 ) """, name=f"ck_{__tablename__}_leap_months", ), default=list, nullable=False, ) months_in_leap_year = column_property( func.json_array_length(leap_year_month_names) ) days_in_leap_year_months = Column(JSON, default=list, nullable=False) @hybrid_property def days_in_leap_year(self) -> int: """:raises AssertionError: if leap year is longer than a common year""" if not self.has_leap_year: return 0 days_in_leap_year = sum(self.days_in_leap_year_months) err_msg = "Leap year must be longer than common year" assert days_in_leap_year > self.days_in_common_year, err_msg return days_in_leap_year @validates("common_year_month_names", "leap_year_month_names") def _sanitize_month_names(self, _, month_names: list) -> list: return utils.string_sanitization(month_names) @validates("days_in_common_year_months", "days_in_leap_year_months") def _validate_days_in_months(self, key, days_in_months: list) -> list: assert all([days > 0 for days in days_in_months]), "days must be > 0" if days_in_months: # skip if days_in_months is empty assert sum(days_in_months) > 0, f"{key} can not have zero days" return utils.integer_sanitization(days_in_months) leap_year_cycles = Column( JSON, CheckConstraint( """ ( has_leap_year == 1 AND json_array_length(leap_year_cycles) != 0 AND json_array_length(leap_year_cycle_ordinals) != 0 AND leap_year_cycle_start IS NOT NULL ) OR ( has_leap_year == 0 AND json_array_length(leap_year_cycles) == 0 AND json_array_length(leap_year_cycle_ordinals) == 0 AND leap_year_cycle_start IS NULL ) """, name=f"ck_{__tablename__}_leap_year_cycles", ), default=list, nullable=False, ) @validates("leap_year_cycles") def _validate_leap_cycles(self, _, leap_year_cycles: list) -> list: sanitized_cycles = utils.integer_sanitization(leap_year_cycles) for cycle in sanitized_cycles: assert cycle >= 1, "leap year cycles must be positive" return sanitized_cycles @hybrid_property def leap_year_cycle_length(self): """length of all the cycles combined""" return sum(self.leap_year_cycles) leap_year_cycle_start = Column( Integer, CheckConstraint( "leap_year_cycle_start >=0", name=f"ck_{__tablename__}_leap_year_cycle_start", ), ) leap_year_cycle_ordinals = Column(JSON, default=list, nullable=False) @validates("leap_year_cycle_ordinals") def _validate_cycle_ordinals(self, _, cycle_ordinals: list) -> list: sanitized_cycle_ordinals = utils.integer_sanitization(cycle_ordinals) for cycle_ordinal in sanitized_cycle_ordinals: assert cycle_ordinal >= 0, "cycle ordinals must be non-negative" return sanitized_cycle_ordinals leap_years_in_normal_cycle = column_property( func.json_array_length(leap_year_cycle_ordinals) ) special_common_years = Column( # common years no matter leap year rules JSON, CheckConstraint( """ has_leap_year == 1 OR json_array_length(special_common_years) == 0 """, name=f"ck_{__tablename__}_leap_year_exceptions", ), default=list, nullable=False, ) special_leap_years = Column( # leap years no matter leap year rules JSON, CheckConstraint( """ has_leap_year == 1 OR json_array_length(special_leap_years) == 0 """, name=f"ck_{__tablename__}_leap_year_overrules", ), default=list, nullable=False, ) leap_year_offset = Column( Integer, CheckConstraint( "has_leap_year == 1 OR leap_year_offset IS NULL", name=f"ck_{__tablename__}_leap_year_offset", ), ) @staticmethod def validate_disjoint_special_years(_, __, target: "ConvertibleCalendar"): """ :raises AssertionError: if special_common_years and special_leap_years are **not** mutually exclusive """ if target.special_common_years and target.special_leap_years: special_leap = set(target.special_leap_years) special_commons = set(target.special_common_years) disjoint_special_years = special_leap.isdisjoint(special_commons) assert disjoint_special_years, "special years must be disjoint" # # Eras, assumes human-readable years # eras = Column( JSON, CheckConstraint( """ json_array_length(eras) >= 2 AND json_array_length(eras) == json_array_length(era_ranges) """, name=f"ck_{__tablename__}_eras", ), default=default_eras, nullable=False, ) era_ranges = Column(JSON, default=default_era_ranges, nullable=False) @validates("era_ranges") def _validate_era_ranges(self, _, era_ranges: list) -> list: flat_era_ranges = list(itertools.chain.from_iterable(era_ranges)) assert flat_era_ranges[0] == "-inf", "first era must be infinite" assert flat_era_ranges[-1] == "inf", "last era must be infinite" finite_era_ranges = utils.integer_sanitization(flat_era_ranges[1:-1]) for hr_year in finite_era_ranges: assert hr_year >= 0, "human-readable years must be non-negative" return era_ranges jd_epoch = Column(Integer, default=1721426) # Julian Day of the epoch # should just be used to convert calendars _target_conversions = relationship( # from self to target "CalendarConversion", primaryjoin="ConvertibleCalendar.id==CalendarConversion.source_calendar_id", # noqa: E501 back_populates="source_calendar", ) _source_conversions = relationship( # from source to self "CalendarConversion", primaryjoin="ConvertibleCalendar.id==CalendarConversion.target_calendar_id", # noqa: E501 back_populates="target_calendar", ) def __repr__(self): return f"{self.name}(Epoch: {self.jd_epoch})" def calendars(self) -> list: """All the calendars this one can be converted to and from""" target_calendars = [ conversion.target_calendar for conversion in self._target_conversions ] source_calendars = [ conversion.source_calendar for conversion in self._source_conversions ] calendars = list(itertools.chain(target_calendars, source_calendars)) return calendars def conversions(self) -> list: """All conversions this calendar is involved in""" conversions = list( itertools.chain(self._source_conversions, self._target_conversions) ) return conversions def sync_ordinal(self, cal: "ConvertibleCalendar") -> Union[int, None]: """:returns: this calendar's sync ordinal for the given calendar""" conversion = self.conversion(cal) if conversion is None: return None if conversion.target_calendar is self: return conversion.target_sync_ordinal return conversion.source_sync_ordinal def conversion( self, calendar: "ConvertibleCalendar" ) -> Union["CalendarConversion", None]: """:returns: CalendarConversion for this and the given calendar""" conversions = self.conversions() for conversion in conversions: if {conversion.target_calendar, conversion.source_calendar} == { self, calendar, }: return conversion return None
class Licitation(Base): __tablename__ = "licitations" id = Column(Integer(), primary_key=True) uuid = Column(Unicode(), unique=True) file = Column(Unicode()) type = Column(Unicode()) subtype = Column(Unicode()) result_code = Column(Unicode()) uri = Column(Unicode()) title = Column(Unicode()) amount = Column(Float()) payable_amount = Column(Float()) budget_amount = Column(Float()) budget_payable_amount = Column(Float()) issued_at = Column(DateTime()) awarded_at = Column(DateTime()) contractor_id = Column(Integer(), ForeignKey('parties.id')) contractor = relationship("Party", backref='offered_licitations', primaryjoin="Licitation.contractor_id==Party.id") contracted_id = Column(Integer(), ForeignKey('parties.id')) contracted = relationship("Party", backref='taken_licitations', primaryjoin="Licitation.contracted_id==Party.id") @staticmethod def count(session): return session.query(Licitation).count() @staticmethod def get_by_uuid(session, uuid): return session.query(Licitation).filter_by(uuid=uuid).first() @staticmethod def exists(session, uuid): by_uuid = session.query(Licitation).filter_by(uuid=uuid) return session.query(by_uuid.exists()).scalar() @staticmethod def create(session, data): if Licitation.exists(session, data.get('uuid')): return False try: data['issued_at'] = parse_date(data['issued_at']) data['awarded_at'] = parse_date(data['awarded_at']) data['contractor'] = Party.get_or_create(session, data['contractor']) if data['contracted'] is not None: data['contracted'] = Party.get_or_create(session, data['contracted']) licitation = Licitation(**data) session.add(licitation) session.commit() return licitation except Exception as error: logger.error(error, exc_info=1) session.rollback() else: return True def to_dict(self): return { 'id': self.id, 'uuid': self.uuid, 'file': self.file, 'result_code': self.result_code, 'uri': self.uri, 'title': self.title, 'amount': self.amount, 'payable_amount': self.payable_amount, 'issued_at': self.issued_at.isoformat('T') if self.issued_at is not None else None, 'awarded_at': self.awarded_at.isoformat('T') if self.issued_at is not None else None, 'contractor': self.contractor.to_dict() if self.contractor is not None else None, 'contracted': self.contracted.to_dict() if self.contractor is not None else None, }
from datetime import datetime from sqlalchemy import MetaData, Column, ForeignKey, Table from sqlalchemy import Boolean, DateTime, Integer, Unicode metadata = MetaData() badge_table = Table( 'badge', metadata, Column('id', Integer, primary_key=True), Column('create_time', DateTime, default=datetime.utcnow), Column('title', Unicode(40), nullable=False), Column('color', Unicode(7), nullable=False), Column('description', Unicode(255), default=u'', nullable=False), Column('group_id', Integer, ForeignKey('group.id', ondelete="CASCADE")), Column('display_group', Boolean, default=False), #Column('badge_delegateable', Boolean, default=False) ) delegateable_badge_table = Table( 'delegateable_badges', metadata, Column('id', Integer, primary_key=True), Column('badge_id', Integer, ForeignKey('badge.id'), nullable=False), Column('delegateable_id', Integer, ForeignKey('delegateable.id'), nullable=False), Column('create_time', DateTime, default=datetime.utcnow), Column('creator_id', Integer, ForeignKey('user.id'), nullable=False))
class company(Base): __tablename__ = "azienda" azienda_id = Column(Integer, primary_key=True, autoincrement=True) nome_azienda = Column(Unicode(20))
class Entity(EntityCore, Base): __tablename__ = 'entity' id = Column(Integer, primary_key=True) project = Column(Unicode(255), index=True, nullable=False) origin = Column(Unicode(255), index=True, nullable=False) uid = Column(Unicode(UID_LENGTH), index=True, nullable=False) canonical_uid = Column(Unicode(UID_LENGTH), index=True, nullable=True) query_uid = Column(Unicode(UID_LENGTH), index=True, nullable=True) match_uid = Column(Unicode(UID_LENGTH), index=True, nullable=True) schema = Column(Unicode(255), nullable=True) tasked = Column(Boolean, default=False) active = Column(Boolean, default=True) data = Column(JSONB, default={}) def delete(self): # Keeping the mappings. session.delete(self) # TODO: links @classmethod def save(cls, data, origin, query_uid=None, match_uid=None): uid = data.pop('uid', None) if uid is None: raise ValueError("No UID on entity: %r" % data) obj = cls.get(uid, query_uid=query_uid, match_uid=match_uid) if obj is None: obj = cls() obj.project = project.name obj.uid = uid obj.canonical_uid = uid obj.query_uid = query_uid obj.match_uid = match_uid obj.origin = origin obj.schema = data.pop('schema', None) if obj.schema not in TYPES: raise ValueError("Invalid entity type: %r", data) obj.tasked = parse_boolean(data.pop('tasked', None), default=False) obj.active = parse_boolean(data.pop('active', None), default=True) obj.data = obj.parse_data(data) session.add(obj) Address.delete_by_entity(uid) Address.save(uid, obj.data.get('address'), origin) return obj @classmethod def get(cls, uid, query_uid=None, match_uid=None): q = cls.find_by_result(query_uid=query_uid, match_uid=match_uid) q = q.filter(cls.uid == uid) return q.first() @classmethod def find_by_result(cls, query_uid=None, match_uid=None): q = session.query(cls) q = q.filter(cls.project == project.name) if query_uid is not None and match_uid is not None: q = q.filter(cls.query_uid == query_uid) q = q.filter(cls.match_uid == match_uid) return q @classmethod def find_by_origins(cls, origins): q = session.query(cls) q = q.filter(cls.project == project.name) if len(origins): q = q.filter(cls.origin.in_(origins)) return q @classmethod def delete_by_origin(cls, origin, query_uid=None, match_uid=None): q = cls.find_by_origins([origin]) if query_uid is not None and match_uid is not None: q = q.filter(cls.query_uid == query_uid) q = q.filter(cls.match_uid == match_uid) for entity in q: entity.delete() @classmethod def iter_composite(cls, origins=[], tasked=None): sq = session.query(cls.canonical_uid.distinct()) sq = sq.filter(cls.project == project.name) sq = sq.filter(cls.active == True) # noqa if len(origins): sq = sq.filter(cls.origin.in_(origins)) if tasked is not None: sq = sq.filter(cls.tasked == tasked) q = session.query(cls) q = q.filter(cls.project == project.name) q = q.filter(cls.active == True) # noqa q = q.filter(cls.canonical_uid.in_(sq)) q = q.order_by(cls.canonical_uid.asc()) entities = [] canonical_uid = None for entity in q: if entity.canonical_uid != canonical_uid: if len(entities): yield CompositeEntity(entities) entities = [] entities.append(entity) canonical_uid = entity.canonical_uid if len(entities): yield CompositeEntity(entities) def __repr__(self): return '<Entity(%r)>' % self.uid
class User(db.Model, DomainObject, UserMixin): '''A registered user of the PYBOSSA system''' __tablename__ = 'user' id = Column(Integer, primary_key=True) #: UTC timestamp of the user when it's created. created = Column(Text, default=make_timestamp) email_addr = Column(Unicode(length=254), unique=True, nullable=False) #: Name of the user (this is used as the nickname). name = Column(Unicode(length=254), unique=True, nullable=False) #: Fullname of the user. fullname = Column(Unicode(length=500), nullable=False) #: Language used by the user in the PYBOSSA server. locale = Column(Unicode(length=254), default=u'en', nullable=False) api_key = Column(String(length=36), default=make_uuid, unique=True) passwd_hash = Column(Unicode(length=254), unique=True) ldap = Column(Unicode, unique=True) admin = Column(Boolean, default=False) pro = Column(Boolean, default=False) privacy_mode = Column(Boolean, default=True, nullable=False) category = Column(Integer) flags = Column(Integer) twitter_user_id = Column(BigInteger, unique=True) facebook_user_id = Column(BigInteger, unique=True) google_user_id = Column(String, unique=True) ckan_api = Column(String, unique=True) newsletter_prompted = Column(Boolean, default=False) valid_email = Column(Boolean, default=False) confirmation_email_sent = Column(Boolean, default=False) subscribed = Column(Boolean, default=True) consent = Column(Boolean, default=False) info = Column(MutableDict.as_mutable(JSONB), default=dict()) user_pref = Column(JSONB) ## Relationships task_runs = relationship(TaskRun, backref='user') projects = relationship(Project, backref='owner') blogposts = relationship(Blogpost, backref='owner') def get_id(self): '''id for login system. equates to name''' return self.name def set_password(self, password): self.passwd_hash = signer.generate_password_hash(password) def check_password(self, password): # OAuth users do not have a password if self.passwd_hash: return signer.check_password_hash(self.passwd_hash, password) return False @classmethod def public_attributes(self): """Return a list of public attributes.""" return [ 'created', 'name', 'fullname', 'info', 'n_answers', 'registered_ago', 'rank', 'score', 'locale' ] @classmethod def public_info_keys(self): """Return a list of public info keys.""" default = ['avatar', 'container', 'extra', 'avatar_url'] extra = current_app.config.get('USER_INFO_PUBLIC_FIELDS') if extra: return list(set(default).union(set(extra))) else: return default
class Role(Base): __tablename__ = "roles" id = Column(Integer, primary_key=True) name = Column(Unicode(60), nullable=False)