def tasks_query(self, q, marker=None, offset=None, limit=None, current_user=None, **kwargs): session = api_base.get_session() query = api_base.model_query(models.Task, session) # Filter out tasks or stories that the current user can't see query = query.outerjoin(models.Story, models.story_permissions, models.Permission, models.user_permissions, models.User) if current_user is not None: query = query.filter( or_( and_( models.User.id == current_user, models.Story.private == true() ), models.Story.private == false() ) ) else: query = query.filter(models.Story.private == false()) query = self._build_fulltext_search(models.Task, query, q) query = self._apply_pagination( models.Task, query, marker, offset, limit) return query.all()
def downgrade(): # 1. Add primary columns op.add_column('userphone', sa.Column('primary', sa.BOOLEAN(), autoincrement=False, nullable=False, server_default=expression.false())) op.add_column('useremail', sa.Column('primary', sa.BOOLEAN(), autoincrement=False, nullable=False, server_default=expression.false())) op.alter_column('userphone', 'primary', server_default=None) op.alter_column('useremail', 'primary', server_default=None) # 2. Update primary flags op.execute(sa.DDL(''' UPDATE useremail SET "primary" = true FROM user_useremail_primary WHERE useremail.id = user_useremail_primary.useremail_id; ''')) op.execute(sa.DDL(''' UPDATE userphone SET "primary" = true FROM user_userphone_primary WHERE userphone.id = user_userphone_primary.userphone_id; ''')) # 3. Drop primary tables op.execute(sa.DDL(''' DROP TRIGGER user_userphone_primary_trigger ON user_userphone_primary; DROP FUNCTION user_userphone_primary_validate(); ''')) op.execute(sa.DDL(''' DROP TRIGGER user_useremail_primary_trigger ON user_useremail_primary; DROP FUNCTION user_useremail_primary_validate(); ''')) op.drop_table('user_userphone_primary') op.drop_table('user_useremail_primary')
def task_build_query(project_group_id, current_user=None, **kwargs): # Construct the query query = api_base.model_query(models.Task) if project_group_id: query = query.join(models.Project, models.project_group_mapping, models.ProjectGroup) \ .filter(models.ProjectGroup.id == project_group_id) # Sanity check on input parameters query = api_base.apply_query_filters(query=query, model=models.Task, **kwargs) # Filter out tasks or stories that the current user can't see query = query.outerjoin(models.Story, models.story_permissions, models.Permission, models.user_permissions, models.User) if current_user is not None: query = query.filter( or_( and_( models.User.id == current_user, models.Story.private == true() ), models.Story.private == false() ) ) else: query = query.filter(models.Story.private == false()) return query
def filter_private_stories(query, current_user, story_model=models.Story): """Takes a query and filters out stories the user shouldn't see. :param query: The query to be filtered. :param current_user: The ID of the user requesting the result. :param story_model: The database model used for stories in the query. """ # First filter based on users with permissions set directly query = query.outerjoin(models.story_permissions, models.Permission, models.user_permissions, models.User) if current_user: visible_to_users = query.filter( or_( and_( models.User.id == current_user, story_model.private == true() ), story_model.private == false(), story_model.id.is_(None) ) ) else: visible_to_users = query.filter( or_( story_model.private == false(), story_model.id.is_(None) ) ) # Now filter based on membership of teams with permissions users = aliased(models.User, name="story_users") query = query.outerjoin(models.team_permissions, models.Team, models.team_membership, (users, users.id == models.team_membership.c.user_id)) if current_user: visible_to_teams = query.filter( or_( and_( users.id == current_user, story_model.private == true() ), story_model.private == false(), story_model.id.is_(None) ) ) else: visible_to_teams = query.filter( or_( story_model.private == false(), story_model.id.is_(None) ) ) return visible_to_users.union(visible_to_teams)
def create_spam_checks(): "Generate file based spam checks ruleset" users_q = Session.query(User).filter(User.spam_checks == false()) users = windowed_query(users_q, User.id, 100) domains = Session.query(Domain).filter(Domain.spam_checks == false()).all() kwargs = dict(users=users, domains=domains) write_ruleset('spam.checks.rules', kwargs) Session.close()
def test_malformed_range(self): # missing the "TO" separator clause = lucene_to_sqlalchemy(u'[what]', {'fqdn': System.fqdn, 'memory': System.memory}, [System.fqdn, System.memory]) self.assert_clause_equals(clause, or_(System.fqdn == u'[what]', false())) clause = lucene_to_sqlalchemy(u'memory:[1024, 2048]', {'fqdn': System.fqdn, 'memory': System.memory}, [System.fqdn, System.memory]) self.assert_clause_equals(clause, and_(false()))
def get_history_datasets(self, trans, history): """ Returns history's datasets. """ query = (trans.sa_session.query(trans.model.HistoryDatasetAssociation) .filter(trans.model.HistoryDatasetAssociation.history == history) .join("dataset") .options(eagerload_all("dataset.actions")) .order_by(trans.model.HistoryDatasetAssociation.hid) .filter(trans.model.HistoryDatasetAssociation.deleted == expression.false()) .filter(trans.model.Dataset.purged == expression.false())) return query.all()
def read(self): """Mark badge notifications from all activities read up to a timestamp. Takes in an unixtime timestamp. Fetches the IDs for all all unread badge notifications for the current user, which come from activities created at up to the passed in timestamp. Marks those notifications as read. """ timestamp = self.request.get('timestamp') if not timestamp: raise Exception('Missing parameter `timestamp`') timestamp = int(timestamp) timestamp = datetime.fromtimestamp(int(timestamp), pytz.UTC) userid = api.user.get_current().getId() notifications = ( Notification.query .join(Activity) .filter( Notification.userid == userid, Notification.is_badge == true(), Notification.is_read == false(), Activity.created < timestamp, ) .all() ) notification_ids = [n.notification_id for n in notifications] return notification_center().mark_notifications_as_read( notification_ids)
def test_assigns_boolean_server_defaults(self, User): is_admin = User.__table__.c.is_admin is_active = User.__table__.c.is_active assert is_admin.default.arg is True assert is_admin.server_default.arg.__class__ == true().__class__ assert is_active.server_default.arg.__class__ == false().__class__
def category(name): random = db.session.query(db.Books).filter(false()) if name != "all": entries = db.session.query(db.Books).filter(db.Books.tags.any(db.Tags.name.like("%" +name + "%" ))).order_by(db.Books.last_modified.desc()).all() else: entries = db.session.query(db.Books).all() return render_template('index.html', random=random, entries=entries, title="Category: %s" % name)
def test_assigns_boolean_server_defaults(self): is_admin = self.columns.is_admin is_active = self.columns.is_active assert is_admin.default.arg is True assert is_admin.server_default.arg.__class__ == true().__class__ assert is_active.server_default.arg.__class__ == false().__class__
def _get_group_project_roles(self, session, groups, project_id, project_domain_id): # NOTE(rodrigods): First, we always include projects with # non-inherited assignments sql_constraints = sqlalchemy.and_( RoleAssignment.type == AssignmentType.GROUP_PROJECT, RoleAssignment.inherited == false(), RoleAssignment.target_id == project_id) if CONF.os_inherit.enabled: # Inherited roles from domains sql_constraints = sqlalchemy.or_( sql_constraints, sqlalchemy.and_( RoleAssignment.type == AssignmentType.GROUP_DOMAIN, RoleAssignment.inherited, RoleAssignment.target_id == project_domain_id)) # Inherited roles from projects project_parents = [x['id'] for x in self.list_project_parents(project_id)] sql_constraints = sqlalchemy.or_( sql_constraints, sqlalchemy.and_( RoleAssignment.type == AssignmentType.GROUP_PROJECT, RoleAssignment.inherited, RoleAssignment.target_id.in_(project_parents))) sql_constraints = sqlalchemy.and_(sql_constraints, RoleAssignment.actor_id.in_(groups)) # NOTE(morganfainberg): Only select the columns we actually care about # here, in this case role_id. query = session.query(RoleAssignment.role_id).filter( sql_constraints).distinct() return [result.role_id for result in query.all()]
def manage_users_and_groups_for_quota( self, trans, **kwd ): quota, params = self._quota_op( trans, 'quota_members_edit_button', self._manage_users_and_groups_for_quota, kwd ) if not quota: return in_users = [] out_users = [] in_groups = [] out_groups = [] for user in trans.sa_session.query( trans.app.model.User ) \ .filter( trans.app.model.User.table.c.deleted == expression.false() ) \ .order_by( trans.app.model.User.table.c.email ): if user in [ x.user for x in quota.users ]: in_users.append( ( user.id, user.email ) ) else: out_users.append( ( user.id, user.email ) ) for group in trans.sa_session.query( trans.app.model.Group ) \ .filter( trans.app.model.Group.table.c.deleted == expression.false()) \ .order_by( trans.app.model.Group.table.c.name ): if group in [ x.group for x in quota.groups ]: in_groups.append( ( group.id, group.name ) ) else: out_groups.append( ( group.id, group.name ) ) return trans.fill_template( '/admin/quota/quota.mako', id=params.id, name=quota.name, in_users=in_users, out_users=out_users, in_groups=in_groups, out_groups=out_groups, webapp=params.webapp, message=params.message, status=params.status )
def get_summary(value_class): q = db_session.query( func.date(SR_Values.datetime).label("date") , func.sum(SR_Values.value).label("daily_value") ).filter(SR_Classes.id == SR_Values.value_class_id ).filter(SR_Classes.accum_flag == true() ).filter(SR_Classes.value_class == value_class ).filter(SR_Values.datetime > datetime.datetime(datetime.datetime.now().year, 1, 1) ).group_by(SR_Classes.value_class, func.month(SR_Values.datetime) ).order_by(SR_Classes.value_class, func.date(SR_Values.datetime)) print q rows = [{ "name": x.date , "value": x.daily_value } for x in q.all()] q = db_session.query( func.date(SR_Values.datetime).label("date") , func.avg(SR_Values.value).label("daily_value") ).filter(SR_Classes.id == SR_Values.value_class_id ).filter(SR_Classes.accum_flag == false() ).filter(SR_Classes.value_class == value_class ).filter(SR_Values.datetime > datetime.datetime(datetime.datetime.now().year, 1, 1) ).group_by(SR_Classes.value_class, func.month(SR_Values.datetime) ).order_by(SR_Classes.value_class, func.date(SR_Values.datetime)) rows.extend([{ "name": x.date , "value": x.daily_value } for x in q.all()]) print rows return rows
def get_active_column(tables, active=True): if active is None: return active_filter(tables).label('active') elif active: return true().label('active') else: return false().label('active')
def share(self, trans, id, email="", use_panels=False): msg = mtype = None # Load workflow from database stored = self.get_stored_workflow(trans, id) if email: other = trans.sa_session.query(model.User) \ .filter(and_(model.User.table.c.email == email, model.User.table.c.deleted == expression.false())) \ .first() if not other: mtype = "error" msg = ("User '%s' does not exist" % escape(email)) elif other == trans.get_user(): mtype = "error" msg = ("You cannot share a workflow with yourself") elif trans.sa_session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=other, stored_workflow=stored).count() > 0: mtype = "error" msg = ("Workflow already shared with '%s'" % escape(email)) else: share = model.StoredWorkflowUserShareAssociation() share.stored_workflow = stored share.user = other session = trans.sa_session session.add(share) session.flush() trans.set_message("Workflow '%s' shared with user '%s'" % (escape(stored.name), escape(other.email))) return trans.response.send_redirect(url_for(controller='workflow', action='sharing', id=id)) return trans.fill_template("/ind_share_base.mako", message=msg, messagetype=mtype, item=stored, email=email, use_panels=use_panels)
def upgrade(): create_table('users', Column('id', Integer, primary_key=True), Column('screen_name', String, nullable=False, unique=True), Column('twitter_user_id', BigInteger, nullable=False), Column('twitter_oauth_token', String, nullable=False), Column('twitter_oauth_token_secret', String, nullable=False), Column('admin', Boolean, server_default=false(), nullable=False), Column('created_at', DateTime(timezone=True), nullable=False), ) create_table('canvases', Column('id', Integer, primary_key=True), Column('artist_id', Integer, ForeignKey('users.id'), nullable=False), Column('title', UnicodeText), Column('description', UnicodeText), Column('strokes', JSON), Column('width', Integer, nullable=False), Column('height', Integer, nullable=False), Column('broadcast', Boolean, nullable=False), Column('replay', Boolean, nullable=False), Column('created_at', DateTime(timezone=True), nullable=False), ) create_table('collaborations', Column('canvas_id', Integer, ForeignKey('canvases.id'), primary_key=True), Column('artist_id', Integer, ForeignKey('users.id'), primary_key=True), Column('created_at', DateTime(timezone=True), nullable=False), )
def all_by_account(cls, account): """ Query available datasets based on dataset visibility. """ criteria = [cls.private == false()] if account is not None: criteria += ["1=1" if account.admin else "1=2", cls.managers.any(type(account).id == account.id)] q = db.session.query(cls).filter(db.or_(*criteria)) q = q.order_by(cls.label.asc()) return q
def get_all_sub_categories(self): """ ---- Returns a list of all the sub Categories from the DB. """ result = self.dsession.query(Categories).filter( Categories.hasChildren == false(), Categories.isActive == true()).order_by(Categories.name).all() return result
def get_users_notifications(self, userid, only_unread=False, limit=None): query = Notification.query.by_user(userid) if only_unread: query = query.filter(Notification.is_read == false()) query = query.join( Notification.activity).order_by(desc(Activity.created)) return query.limit(limit).all()
def test_integer_column(self): clause = lucene_to_sqlalchemy(u'memory:1024', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, System.memory == 1024) # searching invalid numbers against a numeric column is just False clause = lucene_to_sqlalchemy(u'memory:much', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(false()))
def upgrade(): op.create_table( 'b2share_community', sa.Column('created', TIMESTAMP, nullable=False), sa.Column('updated', TIMESTAMP, nullable=False), sa.Column('id', UUIDType, default=uuid.uuid4, nullable=False), sa.Column('name', sa.String(80), unique=True, nullable=False), sa.Column('description', sa.String(2000), nullable=False), sa.Column('logo', sa.String(300), nullable=True), sa.Column('deleted', sa.Boolean, nullable=False, server_default=expression.false()), sa.Column('publication_workflow', sa.String(80), nullable=False, default='direct_publish'), sa.Column('restricted_submission', sa.Boolean, nullable=False, server_default=expression.false(), default=False), sa.PrimaryKeyConstraint('id') )
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('vacancies', sa.Column('deleted', sa.Boolean(), nullable=False, server_default=sa.DefaultClause(false()))) op.add_column('vacancies', sa.Column('updated_at', sa.DateTime(), nullable=True, server_default=sa.DefaultClause(str(datetime.datetime.now())))) op.add_column('vacancies', sa.Column('user_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'vacancies', 'users', ['user_id'], ['id'])
def create_virus_checks(): "Generate file based virus checks ruleset" domains = Session.query(Domain)\ .filter(and_(Domain.virus_checks == true(), Domain.virus_checks_at_smtp == false()))\ .all() kwargs = dict(domains=domains) write_ruleset('virus.checks.rules', kwargs) Session.close()
def get_service(service_id): is_valid_service_id_or_400(service_id) service = Service.query.filter( Service.service_id == service_id) \ .filter(Service.framework.has(Framework.expired == false())) \ .first_or_404() return jsonify(services=service.serialize())
def test_numeric_column(self): clause = lucene_to_sqlalchemy(u'weight:1.2', {'weight': LabInfo.weight}, [LabInfo.weight]) self.assert_clause_equals(clause, LabInfo.weight == Decimal('1.2')) # searching invalid numbers against a numeric column is just False clause = lucene_to_sqlalchemy(u'weight:heavy', {'weight': LabInfo.weight}, [LabInfo.weight]) self.assert_clause_equals(clause, and_(false()))
def test_integer_range(self): clause = lucene_to_sqlalchemy(u'memory:[1024 TO 2048]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(System.memory >= 1024, System.memory <= 2048)) clause = lucene_to_sqlalchemy(u'memory:[1024 TO *]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(System.memory >= 1024, true())) clause = lucene_to_sqlalchemy(u'memory:[* TO 2048]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(true(), System.memory <= 2048)) clause = lucene_to_sqlalchemy(u'memory:[* TO *]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(true(), true())) clause = lucene_to_sqlalchemy(u'memory:[fnord TO blorch]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(false(), false()))
def get_visible_items(worklist, current_user=None): stories = worklist.items.filter(models.WorklistItem.item_type == 'story') stories = stories.join( (models.Story, models.Story.id == models.WorklistItem.item_id)) stories = stories.outerjoin(models.story_permissions, models.Permission, models.user_permissions, models.User) if current_user is not None: stories = stories.filter( or_( and_( models.User.id == current_user, models.Story.private == true() ), models.Story.private == false() ) ) else: stories = stories.filter(models.Story.private == false()) tasks = worklist.items.filter(models.WorklistItem.item_type == 'task') tasks = tasks.join( (models.Task, models.Task.id == models.WorklistItem.item_id)) tasks = tasks.outerjoin(models.Story, models.story_permissions, models.Permission, models.user_permissions, models.User) if current_user is not None: tasks = tasks.filter( or_( and_( models.User.id == current_user, models.Story.private == true() ), models.Story.private == false() ) ) else: tasks = tasks.filter(models.Story.private == false()) return stories.union(tasks)
def filter_private_boards(query, current_user): """Takes a query and filters out the boards that the user should not see. :param query: The query to be filtered. :param current_user: The ID of the user requesting the result. """ board_permissions = aliased(models.board_permissions, name="board_boardpermissions") permissions = aliased(models.Permission, name="board_permissions") user_permissions = aliased(models.user_permissions, name="board_userpermissions") users = aliased(models.User, name="board_users") query = (query .outerjoin((board_permissions, models.Board.id == board_permissions.c.board_id)) .outerjoin((permissions, board_permissions.c.permission_id == permissions.id)) .outerjoin((user_permissions, permissions.id == user_permissions.c.permission_id)) .outerjoin((users, user_permissions.c.user_id == users.id))) if current_user: query = query.filter( or_( and_( users.id == current_user, models.Board.private == true() ), models.Board.private == false(), models.Board.id.is_(None) ) ) else: query = query.filter( or_( models.Board.private == false(), models.Board.id.is_(None) ) ) return query
def test_datetime_column(self): clause = lucene_to_sqlalchemy(u'date_added:2014-09-08', {'date_added': System.date_added}, [System.date_added]) self.assert_clause_equals(clause, and_(System.date_added >= datetime.datetime(2014, 9, 8, 0, 0), System.date_added <= datetime.datetime(2014, 9, 8, 23, 59, 59))) # searching invalid dates against a datetime column is just False clause = lucene_to_sqlalchemy(u'date_added:fnord', {'date_added': System.date_added}, [System.date_added]) self.assert_clause_equals(clause, and_(false()))
def col_download_unread(cols_selected): """Download all unread submissions from all selected sources.""" submissions = [] for filesystem_id in cols_selected: id = Source.query.filter(Source.filesystem_id == filesystem_id) \ .one().id submissions += Submission.query.filter( Submission.downloaded == false(), Submission.source_id == id).all() if submissions == []: flash("No unread submissions in collections selected!", "error") return redirect(url_for('index')) return download("unread", submissions)
def test_datetime_range(self): clause = lucene_to_sqlalchemy(u'date_added:[2014-08-01 TO 2014-08-31]', {'date_added': System.date_added}, [System.date_added]) self.assert_clause_equals(clause, and_(System.date_added >= datetime.datetime(2014, 8, 1, 0, 0), System.date_added <= datetime.datetime(2014, 8, 31, 23, 59, 59))) clause = lucene_to_sqlalchemy(u'date_added:[2014-08-01 TO *]', {'date_added': System.date_added}, [System.date_added]) self.assert_clause_equals(clause, and_(System.date_added >= datetime.datetime(2014, 8, 1, 0, 0), true())) clause = lucene_to_sqlalchemy(u'date_added:[* TO 2014-08-31]', {'date_added': System.date_added}, [System.date_added]) self.assert_clause_equals(clause, and_(true(), System.date_added <= datetime.datetime(2014, 8, 31, 23, 59, 59))) clause = lucene_to_sqlalchemy(u'date_added:[* TO *]', {'date_added': System.date_added}, [System.date_added]) self.assert_clause_equals(clause, and_(true(), true())) clause = lucene_to_sqlalchemy(u'date_added:[fnord TO blorch]', {'date_added': System.date_added}, [System.date_added]) self.assert_clause_equals(clause, and_(false(), false()))
class Community(db.Model, Timestamp): """Represent a community metadata inside the SQL database. Additionally it contains two columns ``created`` and ``updated`` with automatically managed timestamps. """ __tablename__ = 'b2share_community' id = db.Column( UUIDType, primary_key=True, default=uuid.uuid4, ) """Community identifier.""" # noqa # community name name = db.Column( db.String(80), unique=True, nullable=False) # community description description = db.Column( db.String(2000), nullable=False) # link to the logo logo = db.Column( db.String(300), nullable=True) # Flag marking the community as deleted deleted = db.Column(db.Boolean, nullable=False, server_default=expression.false()) # Publication workflow used in this community publication_workflow = db.Column(db.String(80), nullable=False, default='direct_publish') # Restrict record creation restricted_submission = db.Column(db.Boolean, nullable=False, server_default=expression.false(), default=False)
def get_all_certs_attached_to_endpoint_without_autorotate(): """ Retrieves all certificates that are attached to an endpoint, but that do not have autorotate enabled. :return: list of certificates attached to an endpoint without autorotate """ return ( Certificate.query.filter(Certificate.endpoints.any()) .filter(Certificate.rotation == false()) .filter(Certificate.not_after >= arrow.now()) .filter(not_(Certificate.replaced.any())) .all() # noqa )
def downgrade(): op.alter_column( "user", "needsToFillCulturalSurvey", new_column_name="hasFilledCulturalSurvey", server_default=expression.false(), ) op.execute( """ UPDATE "user" SET "hasFilledCulturalSurvey" = True """ )
def common_filters(allow_show_archived=False): if not allow_show_archived: archived_books = (ub.session.query(ub.ArchivedBook).filter( ub.ArchivedBook.user_id == int(current_user.id)).filter( ub.ArchivedBook.is_archived == True).all()) archived_book_ids = [ archived_book.book_id for archived_book in archived_books ] archived_filter = db.Books.id.notin_(archived_book_ids) else: archived_filter = true() if current_user.filter_language() != "all": lang_filter = db.Books.languages.any( db.Languages.lang_code == current_user.filter_language()) else: lang_filter = true() negtags_list = current_user.list_denied_tags() postags_list = current_user.list_allowed_tags() neg_content_tags_filter = false() if negtags_list == [ '' ] else db.Books.tags.any(db.Tags.name.in_(negtags_list)) pos_content_tags_filter = true() if postags_list == [ '' ] else db.Books.tags.any(db.Tags.name.in_(postags_list)) if config.config_restricted_column: pos_cc_list = current_user.allowed_column_value.split(',') pos_content_cc_filter = true() if pos_cc_list == [''] else \ getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\ any(db.cc_classes[config.config_restricted_column].value.in_(pos_cc_list)) neg_cc_list = current_user.denied_column_value.split(',') neg_content_cc_filter = false() if neg_cc_list == [''] else \ getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\ any(db.cc_classes[config.config_restricted_column].value.in_(neg_cc_list)) else: pos_content_cc_filter = true() neg_content_cc_filter = false() return and_(lang_filter, pos_content_tags_filter, ~neg_content_tags_filter, pos_content_cc_filter, ~neg_content_cc_filter, archived_filter)
def get_all_certs_attached_to_destination_without_autorotate(plugin_name=None): """ Retrieves all certificates that are attached to a destination, but that do not have autorotate enabled. :param plugin_name: Optional destination plugin name to query. Queries certificates attached to any destination if not provided. :return: list of certificates attached to a destination without autorotate """ if plugin_name: return (Certificate.query.filter( Certificate.destinations.any(plugin_name=plugin_name)).filter( Certificate.rotation == false()).filter( Certificate.revoked == false()).filter( Certificate.not_after >= arrow.now()).filter( not_(Certificate.replaced.any())).all() # noqa ) return (Certificate.query.filter(Certificate.destinations.any()).filter( Certificate.rotation == false()).filter( Certificate.revoked == false()).filter( Certificate.not_after >= arrow.now()).filter( not_(Certificate.replaced.any())).all() # noqa )
class Menu(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), nullable=True) daily_menus = db.relationship('DailyMenu', backref='menu', lazy='select') favourite = db.Column(db.Boolean, server_default=expression.false()) created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) def __init__(self, name=None, favourite=False): self.name = name self.favourite = favourite def __repr__(self): return 'Menu {0}, name {1}, created at {2}'.format(self.id, self.name, self.created_at)
class User(db.Model): """ Stores base client data """ __tablename__ = "users" id = Column(Integer, primary_key=True, unique=True) telegram_id = Column(Integer, unique=True, nullable=False) is_superuser = Column(Boolean, server_default=expression.false(), nullable=False) is_active = Column(Boolean, server_default=expression.false(), nullable=False) created_dt = Column(DateTime, server_default=func.now(), nullable=False) updated_dt = Column(DateTime, server_default=func.now(), nullable=False) def __repr__(self): return ( f"<User (telegram_id={self.telegram_id}, is_active={self.is_active}, " f"updated_dt={self.updated_dt.strftime(TIME_FORMAT)})> ")
class Utterance(Base, BasicMixin): __tablename__ = "utterance" utterance_text = Column(String, index=True) node_utterances = relationship("NodeUtterance", lazy="select") nodes = relationship("Node", secondary="node_utterance", lazy="select") amazon_anonymous = Column( Boolean, server_default=expression.false(), default=False, nullable=False, index=True, ) is_spellchecked = Column(Boolean)
class Puppet(Base): query = None # type: Query __tablename__ = "puppet" id = Column(Integer, primary_key=True) # type: TelegramID custom_mxid = Column(String, nullable=True) # type: Optional[MatrixUserID] access_token = Column(String, nullable=True) displayname = Column(String, nullable=True) displayname_source = Column(Integer, nullable=True) # type: Optional[TelegramID] username = Column(String, nullable=True) photo_id = Column(String, nullable=True) is_bot = Column(Boolean, nullable=True) matrix_registered = Column(Boolean, nullable=False, server_default=expression.false())
def get_resource_types(meta, project_id): """Return a list of all resource types""" types = [] quota_usages_t = Table('quota_usages', meta, autoload=True) resource_types_q = select( columns=[quota_usages_t.c.resource, func.count()], whereclause=quota_usages_t.c.deleted == false(), group_by=quota_usages_t.c.resource) for (resource, _) in resource_types_q.execute(): types.append(resource) return types
def upgrade(): op.add_column( 'provider', sa.Column('enabledForPro', sa.Boolean(), nullable=False, server_default=expression.false())) op.add_column( 'provider', sa.Column('requireProviderIdentifier', sa.Boolean(), nullable=False, server_default=expression.true()))
class Task(Base): __tablename__ = "tasks" id = Column(Integer, primary_key=True, index=True) name = Column(String(100), unique=True, index=True, nullable=False) comment = Column(String(500), index=True) done = Column(Boolean, server_default=expression.false(), default=False, nullable=False) owner_id = Column(Integer, ForeignKey("users.id"), nullable=False) owner = relationship("User", back_populates="tasks")
def test_integer_range(self): clause = lucene_to_sqlalchemy(u'memory:[1024 TO 2048]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals( clause, and_(System.memory >= 1024, System.memory <= 2048)) clause = lucene_to_sqlalchemy(u'memory:[1024 TO *]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(System.memory >= 1024, true())) clause = lucene_to_sqlalchemy(u'memory:[* TO 2048]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(true(), System.memory <= 2048)) clause = lucene_to_sqlalchemy(u'memory:[* TO *]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(true(), true())) clause = lucene_to_sqlalchemy(u'memory:[fnord TO blorch]', {'memory': System.memory}, [System.memory]) self.assert_clause_equals(clause, and_(false(), false()))
class Puppet(Base): __tablename__ = "puppet" id: TelegramID = Column(Integer, primary_key=True) custom_mxid: UserID = Column(String, nullable=True) access_token: str = Column(String, nullable=True) next_batch: SyncToken = Column(String, nullable=True) base_url: str = Column(Text, nullable=True) displayname: str = Column(String, nullable=True) displayname_source: TelegramID = Column(Integer, nullable=True) username: str = Column(String, nullable=True) photo_id: str = Column(String, nullable=True) is_bot: bool = Column(Boolean, nullable=True) matrix_registered: bool = Column(Boolean, nullable=False, server_default=expression.false()) disable_updates: bool = Column(Boolean, nullable=False, server_default=expression.false()) @classmethod def all_with_custom_mxid(cls) -> Iterable['Puppet']: yield from cls._select_all(cls.c.custom_mxid != None) @classmethod def get_by_tgid(cls, tgid: TelegramID) -> Optional['Puppet']: return cls._select_one_or_none(cls.c.id == tgid) @classmethod def get_by_custom_mxid(cls, mxid: UserID) -> Optional['Puppet']: return cls._select_one_or_none(cls.c.custom_mxid == mxid) @classmethod def get_by_username(cls, username: str) -> Optional['Puppet']: return cls._select_one_or_none(func.lower(cls.c.username) == username) @classmethod def get_by_displayname(cls, displayname: str) -> Optional['Puppet']: return cls._select_one_or_none(cls.c.displayname == displayname)
def get_top_existing(limit=39): cols = [ Place.place_id, Place.display_name, Place.area, Place.state, Place.candidate_count, Place.item_count ] c = func.count(Changeset.place_id) q = (Place.query.filter( Place.state.in_(['ready', 'load_isa', 'refresh']), Place.area > 0, Place.index_hide == false(), Place.candidate_count > 4).options( load_only(*cols)).outerjoin(Changeset).group_by(*cols).having( c == 0).order_by((Place.item_count / Place.area).desc())) return q[:limit]
def passwdreset(self): """Render password reset page""" c.came_from = '/' c.login_counter = 0 c.form = ResetPwForm(request.POST, csrf_context=session) if request.method == 'POST' and c.form.validate(): key_seed = '%s%s' % (c.form.email.data, arrow.utcnow().ctime()) token = hashlib.sha1(key_seed).hexdigest() user = Session.query(User)\ .filter(User.email == c.form.email.data)\ .one() if not user.local: flash( _('The account %s is an external account, use your' ' External systems to change the password. ' 'Contact your system adminstrator if you do not ' 'know which external systems you authenticate to') % user.email) redirect(url('/accounts/login')) rtoken = Session\ .query(ResetToken.used)\ .filter(ResetToken.used == false())\ .filter(ResetToken.user_id == user.id)\ .all() if not rtoken: rtoken = ResetToken(token, user.id) Session.add(rtoken) Session.commit() host = URL_PREFIX_RE.sub('', request.host_url) c.username = user.username c.firstname = user.firstname or user.username c.reset_url = url('accounts-pw-token-reset', token=token, host=host) text = self.render('/email/pwreset.txt') mailer = Mailer(get_conf_options(config)) mailer.start() sdrnme = config.get('baruwa.custom.name', 'Baruwa') email = Msg(author=[(sdrnme, config.get('baruwa.reports.sender'))], to=[('', c.form.email.data)], subject=_("[%s] Password reset request") % sdrnme) email.plain = text mailer.send(email) mailer.stop() flash( _('An email has been sent to the address provided, ' 'please follow the instructions in that email to ' 'reset your password.')) redirect(url('/accounts/login')) return self.render('/accounts/login.html')
class GiftCode(Base): __tablename__ = 'giftcodes' id = Column(Integer, primary_key=True, nullable=False, doc='ID') code = Column(String(16), unique=True, nullable=False, doc='Code', default=random_gift_code) time = Column(Interval, default=timedelta(days=30), nullable=False, doc='Time') free_only = Column(Boolean, default=False, nullable=False, server_default=false()) used = Column(ForeignKey('users.id'), nullable=True) def __init__(self, time=None, code=None, used=None): if isinstance(used, User): used = used.id self.time = time or timedelta(days=30) self.used = used self.code = code or random_gift_code() @property def username_if_used(self): '''User''' if self.used and self.user: return self.user.username else: return False def use(self, user, reuse=False): """Use this GiftCode on user :param user: User :param reuse: bool allow to reuse a code? """ if self.used and not reuse: raise AlreadyUsedGiftCode() if self.free_only and user.is_paid: raise AlreadyUsedGiftCode() self.used = user.id user.add_paid_time(self.time) def __str__(self): return self.code
class Player(Base): __tablename__ = 'players' id = Column(Integer, primary_key=True) username = Column(String(20), unique=True, nullable=False) squad_type = Column(String(20), nullable=False) team = Column(String(20), nullable=False) troops = Column(Integer, default=50) location = Column(String(20), nullable=False) is_active = Column(Boolean, server_default=expression.true()) last_active = Column(DateTime, server_default=func.now(), onupdate=func.current_timestamp()) is_new = Column(Boolean, server_default=expression.true()) uses_ip = Column(Boolean, server_default=expression.false()) banned = Column(Boolean, server_default=expression.false()) banned_by = Column(String(20)) time_banned = Column(DateTime) reason_banned = Column(String(1000)) actions = Column(Integer, default=10) ammo = Column(Integer, default=200) morale = Column(Integer, default=100) dug_in = Column(Integer, default=0) level = Column(Integer, default=1) experience = Column(Integer, default=0) management = Column(Integer, default=1) attack = Column(Integer, default=1) # For better defense defense = Column(Integer, default=1) # For better attack charisma = Column(Integer, default=1) # For more troops gained per recruit rallying = Column(Integer, default=1) # For increasing morale pathfinder = Column( Integer, default=1) # For the amount of action you use per movement logistics = Column(Integer, default=1) # For less ammo used per attack development = Column(Integer, default=1) # For less actions used per upgrade
def col_download_unread(cols_selected: List[str]) -> werkzeug.Response: """Download all unread submissions from all selected sources.""" submissions = [] # type: List[Union[Source, Submission]] for filesystem_id in cols_selected: id = Source.query.filter(Source.filesystem_id == filesystem_id) \ .filter_by(deleted_at=None).one().id submissions += Submission.query.filter( Submission.downloaded == false(), Submission.source_id == id).all() if submissions == []: flash(gettext("No unread submissions in selected collections."), "error") return redirect(url_for('main.index')) return download("unread", submissions)
def fill_indexpage(page, database, db_filter, order, *join): if current_user.show_detail_random(): randm = db.session.query(db.Books).filter(common_filters())\ .order_by(func.random()).limit(config.config_random_books) else: randm = false() off = int(int(config.config_books_per_page) * (page - 1)) pagination = Pagination(page, config.config_books_per_page, len(db.session.query(database).filter(db_filter).filter(common_filters()).all())) entries = db.session.query(database).join(*join, isouter=True).filter(db_filter).filter(common_filters()).\ order_by(*order).offset(off).limit(config.config_books_per_page).all() for book in entries: book = order_authors(book) return entries, randm, pagination
def translate_isa(expression: Expression, session: Session, model, get_model): assert expression.operator == "Isa" left, right = expression.args if dot_path(left) == (): assert left == Variable("_this") else: for field_name in dot_path(left): _, model, __ = get_relationship(model, field_name) assert not right.fields, "Unexpected fields in isa expression" constraint_type = get_model(right.tag) model_type = inspect(model, raiseerr=True).class_ return sql.true() if issubclass(model_type, constraint_type) else sql.false()
class Item(db.Model): id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(100), nullable=False) description = db.Column(db.Text, nullable=True) date_posted = db.Column(db.DateTime(), nullable=False, default=datetime.now) favorite = db.Column(db.Boolean, nullable=False, server_default=expression.false()) list_id = db.Column(db.Integer, db.ForeignKey('list.id'), nullable=False) def __repr__(self): return f"<Item('{self.title}', '{self.date_posted}', '{self.favorite}', '{self.list_id}')>"
class Channel(TimedBaseModel): __tablename__ = "channels" id = db.Column(db.Integer, primary_key=True, index=True, unique=True) chat_id = db.Column(db.String) name = db.Column(db.String) publications_counter_total = db.Column(db.Integer, default=0) publications_counter_day = db.Column(db.Integer, default=0) last_publication_datetime = db.Column(db.DateTime, server_default=db.func.now()) count_of_publications = db.Column(db.Integer, default=1) start_time_publications = db.Column(db.Time) end_time_publications = db.Column(db.Time) is_enable = db.Column(db.Boolean, server_default=expression.false())
class ActionLog(MailSyncBase): # STOPSHIP(emfree) should we set ondelete='CASCADE' here? namespace_id = Column(ForeignKey(Namespace.id), nullable=False, index=True) namespace = relationship( 'Namespace', primaryjoin='and_(ActionLog.namespace_id==Namespace.id, ' 'Namespace.deleted_at.is_(None))') action = Column(Text(40), nullable=False) record_id = Column(Integer, nullable=False) table_name = Column(Text(40), nullable=False) executed = Column(Boolean, server_default=false(), nullable=False) extra_args = Column(JSON, nullable=True)
def next_dagruns_to_examine( cls, state: DagRunState, session: Session, max_number: Optional[int] = None, ): """ Return the next DagRuns that the scheduler should attempt to schedule. This will return zero or more DagRun rows that are row-level-locked with a "SELECT ... FOR UPDATE" query, you should ensure that any scheduling decisions are made in a single transaction -- as soon as the transaction is committed it will be unlocked. :rtype: list[airflow.models.DagRun] """ from airflow.models.dag import DagModel if max_number is None: max_number = cls.DEFAULT_DAGRUNS_TO_EXAMINE # TODO: Bake this query, it is run _A lot_ query = (session.query(cls).filter( cls.state == state, cls.run_type != DagRunType.BACKFILL_JOB).join( DagModel, DagModel.dag_id == cls.dag_id).filter( DagModel.is_paused == false(), DagModel.is_active == true())) if state == State.QUEUED: # For dag runs in the queued state, we check if they have reached the max_active_runs limit # and if so we drop them running_drs = (session.query( DagRun.dag_id, func.count(DagRun.state).label('num_running')).filter( DagRun.state == DagRunState.RUNNING).group_by( DagRun.dag_id).subquery()) query = query.outerjoin( running_drs, running_drs.c.dag_id == DagRun.dag_id).filter( func.coalesce(running_drs.c.num_running, 0) < DagModel.max_active_runs) query = query.order_by( nulls_first(cls.last_scheduling_decision, session=session), cls.execution_date, ) if not settings.ALLOW_FUTURE_EXEC_DATES: query = query.filter(DagRun.execution_date <= func.now()) return with_row_locks(query.limit(max_number), of=cls, session=session, **skip_locked(session=session))
class Note(Base): __tablename__ = 'note' id = Column(Integer, primary_key=True) text = Column(String(10000)) is_tweet = Column(Boolean, server_default=expression.false()) created_at = Column(DateTime, server_default=func.now()) updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now()) created_by_id = Column(BigInteger, ForeignKey('user.id', ondelete='CASCADE')) tweets = relationship(Tweet, secondary=tweet_note_assc, backref=backref('tweet', lazy='dynamic')) categories = relationship(Category, secondary=category_note_assc, backref=backref('category', lazy='dynamic'))
class Queue(db.Model): """Queue Model Extends: db.Model Attributes: id (int): Primary Key. ticket (str): The ticket assigned to the request. idempotency_key (str): An idempotency key sent along with the request. request (str): The request type. initiated (datetime): The timestamp of the request. execution_time (float): The execution time in seconds. completed (bool): Whether the process has been completed. success (bool): The status of the process. error_msg (str): The error message in case of failure. result (str): The path of the result. """ __tablename__ = 'ipr_queue' id = db.Column(db.BigInteger(), primary_key=True) ticket = db.Column( db.String(511), default=lambda: md5(str(uuid.uuid4()).encode()).hexdigest(), nullable=False, unique=True) idempotency_key = db.Column(db.String(511), nullable=True, unique=True) request = db.Column(db.String(511), nullable=False) initiated = db.Column(db.DateTime(timezone=True), server_default=func.now(), nullable=False) execution_time = db.Column(db.Float(), nullable=True) completed = db.Column(db.Boolean(), server_default=expression.false(), nullable=False) success = db.Column(db.Boolean(), nullable=True) error_msg = db.Column(db.Text(), nullable=True) result = db.Column(db.Text(), nullable=True) def __iter__(self): for key in [ 'ticket', 'idempotency_key', 'request', 'initiated', 'execution_time', 'completed', 'success', 'error_msg', 'result' ]: yield (key, getattr(self, key)) def get(self, **kwargs): queue = self.query.filter_by(**kwargs).first() if queue is None: return None return dict(queue)
async def disable_key(kid: int) -> bool: key = session.query(TrxKey).filter(TrxKey.id == kid).one_or_none() if key is not None: key.status = false() session.add(key) try: session.commit() session.flush() return True except exc.SQLAlchemyError as err: print(err) return False