def get_all_users(when): hour = when.hour return Session.query(User).filter( User.send_passwords_periodically == true(), User.email_verified == true(), extract('hour', User.creation) == hour, ).order_by(User.creation)
def filter_private_stories(query, current_user, story_model=models.Story): """Takes a query and filters out stories the user shouldn't see. :param query: The query to be filtered. :param current_user: The ID of the user requesting the result. :param story_model: The database model used for stories in the query. """ # First filter based on users with permissions set directly query = query.outerjoin(models.story_permissions, models.Permission, models.user_permissions, models.User) if current_user: visible_to_users = query.filter( or_( and_( models.User.id == current_user, story_model.private == true() ), story_model.private == false(), story_model.id.is_(None) ) ) else: visible_to_users = query.filter( or_( story_model.private == false(), story_model.id.is_(None) ) ) # Now filter based on membership of teams with permissions users = aliased(models.User, name="story_users") query = query.outerjoin(models.team_permissions, models.Team, models.team_membership, (users, users.id == models.team_membership.c.user_id)) if current_user: visible_to_teams = query.filter( or_( and_( users.id == current_user, story_model.private == true() ), story_model.private == false(), story_model.id.is_(None) ) ) else: visible_to_teams = query.filter( or_( story_model.private == false(), story_model.id.is_(None) ) ) return visible_to_users.union(visible_to_teams)
def get_all_parent_categories(self): """ ---- Returns a list of all the sub Categories from the DB. """ result = self.dsession.query(Categories).filter( Categories.hasChildren == true(), Categories.isActive == true()).order_by(Categories.name).all() return result
def _get_default_service_type(self, context): try: query = context.session.query(ServiceType) return query.filter(ServiceType.default == expr.true()).one() except orm_exc.NoResultFound: return except orm_exc.MultipleResultsFound: # This should never happen. If it does, take the first instance query2 = context.session.query(ServiceType) results = query2.filter(ServiceType.default == expr.true()).all() LOG.warning(_("Multiple default service type instances found." "Will use instance '%s'"), results[0]["id"]) return results[0]
def test_string_range(self): clause = lucene_to_sqlalchemy(u'fqdn:[aaa TO zzz]', {'fqdn': System.fqdn}, [System.fqdn]) self.assert_clause_equals(clause, and_(System.fqdn >= u'aaa', System.fqdn <= u'zzz')) clause = lucene_to_sqlalchemy(u'fqdn:[aaa TO *]', {'fqdn': System.fqdn}, [System.fqdn]) self.assert_clause_equals(clause, and_(System.fqdn >= u'aaa', true())) clause = lucene_to_sqlalchemy(u'fqdn:[* TO zzz]', {'fqdn': System.fqdn}, [System.fqdn]) self.assert_clause_equals(clause, and_(true(), System.fqdn <= u'zzz')) clause = lucene_to_sqlalchemy(u'fqdn:[* TO *]', {'fqdn': System.fqdn}, [System.fqdn]) self.assert_clause_equals(clause, and_(true(), true()))
def _delete_idle_service_vm_hosting_devices(self, context, num, template): """Deletes <num> or less unused <template>-based service VM instances. The number of deleted service vm instances is returned. """ # Delete the "youngest" hosting devices since they are more likely # not to have finished booting num_deleted = 0 plugging_drv = self.get_hosting_device_plugging_driver(context, template['id']) hosting_device_drv = self.get_hosting_device_driver(context, template['id']) if plugging_drv is None or hosting_device_drv is None or num <= 0: return num_deleted query = context.session.query(hd_models.HostingDevice) query = query.outerjoin( hd_models.SlotAllocation, hd_models.HostingDevice.id == hd_models.SlotAllocation.hosting_device_id) query = query.filter(hd_models.HostingDevice.template_id == template['id'], hd_models.HostingDevice.admin_state_up == expr.true(), hd_models.HostingDevice.tenant_bound == expr.null(), hd_models.HostingDevice.auto_delete == expr.true()) query = query.group_by(hd_models.HostingDevice.id).having( func.count(hd_models.SlotAllocation.logical_resource_id) == 0) query = query.order_by( hd_models.HostingDevice.created_at.desc(), func.count(hd_models.SlotAllocation.logical_resource_id)) hd_candidates = query.all() num_possible_to_delete = min(len(hd_candidates), num) for i in range(num_possible_to_delete): res = plugging_drv.get_hosting_device_resources( context, hd_candidates[i]['id'], hd_candidates[i]['complementary_id'], self.l3_tenant_id(), self.mgmt_nw_id()) if self.svc_vm_mgr.delete_service_vm(context, hd_candidates[i]['id']): with context.session.begin(subtransactions=True): context.session.delete(hd_candidates[i]) plugging_drv.delete_hosting_device_resources( context, self.l3_tenant_id(), **res) num_deleted += 1 LOG.info(_LI('Deleted %(num)d hosting devices based on template ' '%(t_id)s'), {'num': num_deleted, 't_id': template['id']}) return num_deleted
def filter( self, trans, user, query, column_filter ): """ Modify query to filter histories by sharing status. """ if column_filter == "All": pass elif column_filter: if column_filter == "private": query = query.filter( self.model_class.users_shared_with == null() ) query = query.filter( self.model_class.importable == false() ) elif column_filter == "shared": query = query.filter( self.model_class.users_shared_with != null() ) elif column_filter == "accessible": query = query.filter( self.model_class.importable == true() ) elif column_filter == "published": query = query.filter( self.model_class.published == true() ) return query
def search(what, **conditions): if what == 'codename': columns = [db.codesTable.c.name] elif what == 'point': columns = [simTable.c.identifier, db.codesTable.c.name, db.decodersTable.c.name, simTable.c.channel_json, simTable.c.wordSeed, simTable.c.samples, simTable.c.errors, simTable.c.cputime, simTable.c.date_start, simTable.c.date_end, simTable.c.machine, simTable.c.program_name, simTable.c.program_version, simTable.c.stats] else: raise ValueError('unknown search: "{}"'.format(what)) condition = expression.true() for key, val in conditions.items(): if key == 'identifier': condition &= simTable.c.identifier.in_(val) elif key == 'code': condition &= db.codesTable.c.name.in_(val) else: raise ValueError() s = sqla.select(columns, whereclause=condition, from_obj=joinTable, distinct=True, use_labels=True).order_by(db.codesTable.c.name) ans = db.engine.execute(s).fetchall() if what == 'point': return [dataPointFromRow(row) for row in ans] return db.engine.execute(s).fetchall()
def modify_cycle_task_notification(obj, notification_name): notif_type = get_notification_type(notification_name) notif = Notification.query.filter( Notification.object_id == obj.id, Notification.object_type == obj.type, Notification.notification_type == notif_type, or_(Notification.sent_at.is_(None), Notification.repeating == true()), ).first() send_on = datetime.combine(obj.end_date, datetime.min.time()) - timedelta( notif_type.advance_notice) today = datetime.combine(date.today(), datetime.min.time()) if send_on < today: # this should not be allowed, but if a cycle task is changed to a past # date, we remove the current pending notification if it exists if notif: db.session.delete(notif) elif notif: # when cycle date is moved in the future and current exists we update # the current notif.send_on = send_on db.session.add(notif) else: # when cycle date is moved in the future and no current create new # notification add_notif(obj, notif_type, send_on)
def test_assigns_boolean_server_defaults(self, User): is_admin = User.__table__.c.is_admin is_active = User.__table__.c.is_active assert is_admin.default.arg is True assert is_admin.server_default.arg.__class__ == true().__class__ assert is_active.server_default.arg.__class__ == false().__class__
def task_build_query(project_group_id, current_user=None, **kwargs): # Construct the query query = api_base.model_query(models.Task) if project_group_id: query = query.join(models.Project, models.project_group_mapping, models.ProjectGroup) \ .filter(models.ProjectGroup.id == project_group_id) # Sanity check on input parameters query = api_base.apply_query_filters(query=query, model=models.Task, **kwargs) # Filter out tasks or stories that the current user can't see query = query.outerjoin(models.Story, models.story_permissions, models.Permission, models.user_permissions, models.User) if current_user is not None: query = query.filter( or_( and_( models.User.id == current_user, models.Story.private == true() ), models.Story.private == false() ) ) else: query = query.filter(models.Story.private == false()) return query
def test_assigns_boolean_server_defaults(self): is_admin = self.columns.is_admin is_active = self.columns.is_active assert is_admin.default.arg is True assert is_admin.server_default.arg.__class__ == true().__class__ assert is_active.server_default.arg.__class__ == false().__class__
def get_active_column(tables, active=True): if active is None: return active_filter(tables).label('active') elif active: return true().label('active') else: return false().label('active')
def tasks_query(self, q, marker=None, offset=None, limit=None, current_user=None, **kwargs): session = api_base.get_session() query = api_base.model_query(models.Task, session) # Filter out tasks or stories that the current user can't see query = query.outerjoin(models.Story, models.story_permissions, models.Permission, models.user_permissions, models.User) if current_user is not None: query = query.filter( or_( and_( models.User.id == current_user, models.Story.private == true() ), models.Story.private == false() ) ) else: query = query.filter(models.Story.private == false()) query = self._build_fulltext_search(models.Task, query, q) query = self._apply_pagination( models.Task, query, marker, offset, limit) return query.all()
def get_summary(value_class): q = db_session.query( func.date(SR_Values.datetime).label("date") , func.sum(SR_Values.value).label("daily_value") ).filter(SR_Classes.id == SR_Values.value_class_id ).filter(SR_Classes.accum_flag == true() ).filter(SR_Classes.value_class == value_class ).filter(SR_Values.datetime > datetime.datetime(datetime.datetime.now().year, 1, 1) ).group_by(SR_Classes.value_class, func.month(SR_Values.datetime) ).order_by(SR_Classes.value_class, func.date(SR_Values.datetime)) print q rows = [{ "name": x.date , "value": x.daily_value } for x in q.all()] q = db_session.query( func.date(SR_Values.datetime).label("date") , func.avg(SR_Values.value).label("daily_value") ).filter(SR_Classes.id == SR_Values.value_class_id ).filter(SR_Classes.accum_flag == false() ).filter(SR_Classes.value_class == value_class ).filter(SR_Values.datetime > datetime.datetime(datetime.datetime.now().year, 1, 1) ).group_by(SR_Classes.value_class, func.month(SR_Values.datetime) ).order_by(SR_Classes.value_class, func.date(SR_Values.datetime)) rows.extend([{ "name": x.date , "value": x.daily_value } for x in q.all()]) print rows return rows
def get_form_data(config): q = db.session.query(Entry) q = q.filter(Patient.test != true(), Entry.patient_id == Patient.id) q = q.order_by(Entry.patient_id, Entry.id) q = _patient_filter(q, Entry.patient_id, config['user'], config['patient_group']) q = q.filter(Entry.form.has(slug=config['name'])) return q
def _has_unsent_notifications(notif_type, obj): """Helper for searching unsent notifications. Args: notify_type (NotificationType): type of the notifications we're looking for. obj (sqlalchemy model): Object for which we're looking for notifications. Returns: True if there are any unsent notifications of notif_type for the given object, and False otherwise. """ obj_key = (obj.id, obj.type, notif_type.id) for notification in db.session: if not isinstance(notification, models.Notification): continue notif_key = (notification.object_id, notification.object_type, notification.notification_type.id) if obj_key == notif_key: return notification return models.Notification.query.filter( models.Notification.notification_type_id == notif_type.id, models.Notification.object_id == obj.id, models.Notification.object_type == obj.type, ( models.Notification.sent_at.is_(None) | ( models.Notification.repeating == true() ) ) ).first()
def apply_rules_blueprints(user, args=None): q = Blueprint.query if not user.is_admin: group_user_objs = GroupUserAssociation.query.filter_by(user_id=user.id, manager=False).all() user_group_ids = [group_user_obj.group.id for group_user_obj in group_user_objs] banned_group_ids = [banned_group_item.id for banned_group_item in user.banned_groups.all()] allowed_group_ids = set(user_group_ids) - set(banned_group_ids) # do not allow the banned users # Start building query expressions based on the condition that : # a group manager can see all of his blueprints and only enabled ones of other groups query_exp = Blueprint.is_enabled == true() allowed_group_ids_exp = None if allowed_group_ids: allowed_group_ids_exp = Blueprint.group_id.in_(allowed_group_ids) query_exp = and_(allowed_group_ids_exp, query_exp) manager_group_ids = get_manager_group_ids(user) manager_group_ids_exp = None if manager_group_ids: manager_group_ids_exp = Blueprint.group_id.in_(manager_group_ids) query_exp = or_(query_exp, manager_group_ids_exp) q = q.filter(query_exp) if args is not None and 'blueprint_id' in args: q = q.filter_by(id=args.get('blueprint_id')) return q
def domain_object_list(self, domain, paths, cluster=None): """Return a list of (path, property list, attribute dictionary) for the objects in the specific domain and cluster. """ v = self.versions.alias('v') n = self.nodes.alias('n') a = self.attributes.alias('a') s = select([n.c.path, v.c.serial, v.c.node, v.c.hash, v.c.size, v.c.type, v.c.source, v.c.mtime, v.c.muser, v.c.uuid, v.c.checksum, v.c.cluster, a.c.key, a.c.value]) if cluster: s = s.where(v.c.cluster == cluster) s = s.where(v.c.serial == a.c.serial) s = s.where(a.c.domain == domain) s = s.where(a.c.node == n.c.node) s = s.where(a.c.is_latest == true()) if paths: s = s.where(n.c.path.in_(paths)) r = self.conn.execute(s) rows = r.fetchall() r.close() group_by = itemgetter(slice(12)) rows.sort(key=group_by) groups = groupby(rows, group_by) return [(k[0], k[1:], dict([i[12:] for i in data])) for (k, data) in groups]
def modify_cycle_task_overdue_notification(task): """Add or update the task's overdue notification. If an overdue notification already exists for the task, its date of sending is adjusted as needed. If such notification does not exist yet, it gets created. Args: task: The CycleTaskGroupObjectTask instance for which to update the overdue notifications. """ notif_type = get_notification_type(u"cycle_task_overdue") send_on = datetime.combine(task.end_date, datetime.min.time()) + timedelta(1) notif = Notification.query.filter( Notification.object_id == task.id, Notification.object_type == task.type, (Notification.sent_at.is_(None) | (Notification.repeating == true())), Notification.notification_type == notif_type, ).first() if notif: if notif.send_on != send_on: notif.send_on = send_on db.session.add(notif) return # NOTE: The "task.id" check is to assure a notification is created for # existing task instances only, avoiding DB errors. Overdue notifications # for new tasks are handled and added elsewhere. if all([task.id, task.status in task.active_states, task.cycle.is_current]): add_notif(task, notif_type, send_on, repeating=True)
def _collect_assessment_issues(): """Returns issue infos associated with Assessments.""" issue_params = {} issuetracker_cls = models.IssuetrackerIssue issue_objects = issuetracker_cls.query.filter( issuetracker_cls.object_type == 'Assessment', issuetracker_cls.enabled == expression.true(), issuetracker_cls.issue_id.isnot(None), ).order_by(issuetracker_cls.object_id).all() for iti in issue_objects: asmt = iti.issue_tracked_obj if not asmt: logger.error( 'The Assessment corresponding to the Issue Tracker Issue ID=%s ' 'does not exist.', iti.issue_id) continue status_value = issues.STATUSES.get(asmt.status) if not status_value: logger.error( 'Inexistent Issue Tracker status for assessment ID=%d ' 'with status: %s.', asmt.id, status_value) continue issue_params[iti.issue_id] = { 'assessment_id': asmt.id, 'state': { 'status': status_value, 'type': iti.issue_type, 'priority': iti.issue_priority, 'severity': iti.issue_severity, }, } return issue_params
def create_content_ruleset(): """Create content ruleset""" def set_attrs(obj, dom=None): """Set attrs""" for key in POLICY_SETTINGS_MAP: attr = POLICY_SETTINGS_MAP[key] value = getattr(obj, attr) if value != 0: policy = Session.query(Policy.name)\ .filter(Policy.id == value)\ .one() setattr(obj, '%s-name' % attr, "%s.conf" % policy.name) if dom: setattr(obj, 'domain_name', dom.name) setattr(obj, 'domain_aliases', dom.aliases) return obj global_policy = Session.query(PolicySettings).get(1) set_attrs(global_policy) dpsq = Session.query(DomainPolicy, Domain)\ .filter(DomainPolicy.domain_id == Domain.id)\ .filter(Domain.status == true()) domain_policies = [set_attrs(dps[0], dps[1]) for dps in dpsq] for policy_type in [1, 2, 3, 4]: kwargs = dict(gps=global_policy, dps=domain_policies, policy_type=POLICY_SETTINGS_MAP[policy_type], default="%s.conf" % POLICY_FILE_MAP[policy_type]) write_ruleset(POLICY_FILE_MAP[policy_type], kwargs, 'content.protection.ruleset')
def read(self): """Mark badge notifications from all activities read up to a timestamp. Takes in an unixtime timestamp. Fetches the IDs for all all unread badge notifications for the current user, which come from activities created at up to the passed in timestamp. Marks those notifications as read. """ timestamp = self.request.get('timestamp') if not timestamp: raise Exception('Missing parameter `timestamp`') timestamp = int(timestamp) timestamp = datetime.fromtimestamp(int(timestamp), pytz.UTC) userid = api.user.get_current().getId() notifications = ( Notification.query .join(Activity) .filter( Notification.userid == userid, Notification.is_badge == true(), Notification.is_read == false(), Activity.created < timestamp, ) .all() ) notification_ids = [n.notification_id for n in notifications] return notification_center().mark_notifications_as_read( notification_ids)
def get_pending_notifications(): """Get notification data for all future notifications. The data dict that get's returned here contains notification data grouped by dates on which the notifications should be received. Returns list of Notifications, data: a tuple of notifications that were handled and corresponding data for those notifications. """ notifications = db.session.query(Notification).filter( (Notification.sent_at.is_(None)) | (Notification.repeating == true()) ).all() notif_by_day = defaultdict(list) for notification in notifications: notif_by_day[notification.send_on.date()].append(notification) data = defaultdict(dict) today = date.today() for day, notif in notif_by_day.iteritems(): current_day = max(day, today) data[current_day] = merge_dict(data[current_day], get_notification_data(notif)) return notifications, data
def command(self): "run command" self.init() if self.options.username is None: print >> sys.stderr, "\nProvide an username\n" print self.parser.print_help() sys.exit(126) try: user = Session\ .query(User)\ .filter(User.username == self.options.username)\ .filter(User.local == true())\ .one() if user.validate_password(os.environ['BARUWA_ADMIN_PASSWD']): print >> sys.stderr, "The account password is valid" sys.exit(0) else: print >> sys.stderr, "The account password is invalid" sys.exit(2) except KeyError: print >> sys.stderr, "BARUWA_ADMIN_PASSWD env variable not set" sys.exit(126) except NoResultFound: print >> sys.stderr, ("No local user found with username %s" % self.options.username) sys.exit(126) finally: Session.close()
def filter(self): "Set filters" if self.user.is_domain_admin: dquery = self.dbsession.query(Domain).join(downs, (oa, downs.c.organization_id == oa.c.organization_id))\ .filter(Domain.status == true())\ .filter(oa.c.user_id == self.user.id).all() domains = [] for domain in dquery: domains.append(domain.name) for domain_alias in domain.aliases: if domain_alias.status: domains.append(domain_alias.name) if not domains: domains.append('xx') if self.direction and self.direction == 'in': self.query = self.query\ .filter(self.model.to_domain.in_(domains)) elif self.direction and self.direction == 'out': self.query = self.query\ .filter(self.model.from_domain.in_(domains)) else: self.query = self.query.filter( func._(or_(self.model.to_domain.in_(domains), self.model.from_domain.in_(domains)))) if self.user.is_peleb: self._build_user_filter() return self.query
def __init__(self, user, reportid, filters=None): "Init" self.dbsession = Session self.user = user self.reportid = reportid self.model = None self.isaggr = False self.filters = filters queryfield = getattr(Message, REPORTS[self.reportid]['address']) orderby = REPORTS[reportid]['sort'] if (self.reportid in ['3', '4', '7', '8'] and self.user.is_superadmin and not self.filters): # domains self.isaggr = True if self.reportid in ['3', '4']: # src self.model = SrcMessageTotals self.query = self.dbsession\ .query(SrcMessageTotals.id.label('address'), SrcMessageTotals.total.label('count'), SrcMessageTotals.volume.label('size'))\ .order_by(desc(orderby)) else: # dst self.model = DstMessageTotals self.query = self.dbsession\ .query(DstMessageTotals.id.label('address'), DstMessageTotals.total.label('count'), DstMessageTotals.volume.label('size'))\ .order_by(desc(orderby)) else: # emails & relays self.query = self.dbsession.query(queryfield.label('address'), func.count(queryfield).label('count'), func.sum(Message.size).label('size')) if self.reportid != '10': self.query = self.query.filter(queryfield != u'')\ .group_by(queryfield).order_by(desc(orderby)) else: self.query = self.query.filter(queryfield != u'127.0.0.1')\ .group_by(queryfield).order_by(desc(orderby)) if self.isaggr: uquery = AggrFilter(self.query) else: uquery = UserFilter(self.dbsession, self.user, self.query) if self.reportid not in ['5', '6', '7', '8']: self.query = uquery() if self.reportid in ['5', '6', '7', '8']: if not self.user.is_superadmin: uquery.setdirection('in') self.query = uquery() else: flf = self.model.id if self.isaggr else Message.to_domain self.query = self.query.filter(flf .in_(self.dbsession.query(Domain.name) .filter(Domain.status == true())))
def content(): sess = None try: sess = session() sites = sess.query(Site).join(SiteEra).join(Era).filter( SiteEra.is_physical == true(), or_( Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date) bffr = StringIO.StringIO() zf = zipfile.ZipFile(bffr) for site in sites: for group in site.groups(sess, start_date, finish_date, True): outs = [] outs.append( "Site Code, Site Name, Associated Site Codes, " "Sources, Generator Types, From, To,Type,Date," + ','.join(map(str, range(1, 49)))) associates = ' '.join( site.code for site in group.sites[1:]) source_codes = ' '.join( sorted(set(sup.source.code for sup in group.supplies))) gen_types = ' '.join( sorted( set( sup.generator_type.code for sup in group.supplies if sup.generator_type is not None))) group_start_str = hh_format(group.start_date) group_finish_str = hh_format(group.finish_date) for hh in group.hh_data(sess): hh_start = hh['start_date'] if hh_start.hour == 0 and hh_start.minute == 0: outs.append( "\r\n" + ','.join( '"' + str(val) + '"' for val in [ site.code, site.name, associates, source_codes, gen_types, group_start_str, group_finish_str, 'used', hh_start.strftime('%Y-%m-%d')])) used_gen_kwh = hh['imp_gen'] - hh['exp_net'] - \ hh['exp_gen'] used_3p_kwh = hh['imp_3p'] - hh['exp_3p'] used_kwh = hh['imp_net'] + used_gen_kwh + used_3p_kwh outs.append(',' + str(round(used_kwh, 2))) zf.writestr( site.code + '_' + group.finish_date.strftime('%Y%m%d%M%H') + '.csv', ''.join(outs)) yield bffr.getValue() bffr.truncate() except: yield traceback.format_exc() finally: if sess is not None: sess.close()
def last_complete_date(cls): last_complete = DBSession.query(cls) \ .filter(cls.complete == true()) \ .order_by(cls.date.desc()) \ .first() if last_complete is None: return None return last_complete.date
def get_active_issue_info(model_name): """Returns stored in GGRC issue tracker info associated with model.""" issuetracker_cls = models.IssuetrackerIssue return issuetracker_cls.query.filter( issuetracker_cls.object_type == model_name, issuetracker_cls.enabled == expression.true(), issuetracker_cls.issue_id.isnot(None), ).order_by(issuetracker_cls.object_id).all()
def feed_letter_series(book_id): off = request.args.get("offset") or 0 letter = true() if book_id == "00" else func.upper(db.Series.sort).startswith(book_id) entries = calibre_db.session.query(db.Series)\ .join(db.books_series_link)\ .join(db.Books)\ .filter(calibre_db.common_filters()).filter(letter)\ .group_by(text('books_series_link.series'))\ .order_by(db.Series.sort) pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page, entries.count()) entries = entries.offset(off).limit(config.config_books_per_page).all() return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_series', pagination=pagination)
def process_fields(self, fields): """Processes all fields in `fields`.""" models = set() for i, field in enumerate(fields.keys()): if i == 0: models.update(searchable_fields[field].models) else: models.intersection_update(searchable_fields[field].models) for model in models: for field, value in fields.items(): current_filter = self.filters.setdefault(model, true()) new_filter = self.process_model(model, field, value) self.filters[model] = current_filter & new_filter
class NodeConfig(db.Model): is_effective = db.Column(db.BOOLEAN, primary_key=True, default=True) min_debtor_id = db.Column(db.BigInteger, nullable=False) max_debtor_id = db.Column(db.BigInteger, nullable=False) __table_args__ = ( db.CheckConstraint(is_effective == true()), db.CheckConstraint(min_debtor_id <= max_debtor_id), { 'comment': 'Represents the global node configuration (a singleton). The ' 'node is responsible only for debtor IDs that are within the ' 'interval [min_debtor_id, max_debtor_id].', } )
def common_filters(allow_show_archived=False): if not allow_show_archived: archived_books = (ub.session.query(ub.ArchivedBook).filter( ub.ArchivedBook.user_id == int(current_user.id)).filter( ub.ArchivedBook.is_archived == True).all()) archived_book_ids = [ archived_book.book_id for archived_book in archived_books ] archived_filter = db.Books.id.notin_(archived_book_ids) else: archived_filter = true() if current_user.filter_language() != "all": lang_filter = db.Books.languages.any( db.Languages.lang_code == current_user.filter_language()) else: lang_filter = true() negtags_list = current_user.list_denied_tags() postags_list = current_user.list_allowed_tags() neg_content_tags_filter = false() if negtags_list == [ '' ] else db.Books.tags.any(db.Tags.name.in_(negtags_list)) pos_content_tags_filter = true() if postags_list == [ '' ] else db.Books.tags.any(db.Tags.name.in_(postags_list)) if config.config_restricted_column: pos_cc_list = current_user.allowed_column_value.split(',') pos_content_cc_filter = true() if pos_cc_list == [''] else \ getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\ any(db.cc_classes[config.config_restricted_column].value.in_(pos_cc_list)) neg_cc_list = current_user.denied_column_value.split(',') neg_content_cc_filter = false() if neg_cc_list == [''] else \ getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\ any(db.cc_classes[config.config_restricted_column].value.in_(neg_cc_list)) else: pos_content_cc_filter = true() neg_content_cc_filter = false() return and_(lang_filter, pos_content_tags_filter, ~neg_content_tags_filter, pos_content_cc_filter, ~neg_content_cc_filter, archived_filter)
def get_typeahead(self, database, query, replace=('', ''), tag_filter=true()): query = query or '' self.session.connection().connection.connection.create_function( "lower", 1, lcase) entries = self.session.query(database).filter(tag_filter). \ filter(func.lower(database.name).ilike("%" + query + "%")).all() json_dumps = json.dumps( [dict(name=r.name.replace(*replace)) for r in entries]) return json_dumps
def get_daily_notifications(): """Get notification data for all future notifications. Returns list of Notifications, data: a tuple of notifications that were handled and corresponding data for those notifications. """ notifications = db.session.query( Notification).filter((Notification.send_on <= datetime.today()) & ((Notification.sent_at.is_(None)) | (Notification.repeating == true()))).all() return notifications, get_notification_data(notifications)
class ProgramsModel(TimestampMixinModel, db.Model): """Table des Programmes de GeoNature-citizen""" __tablename__ = "t_programs" __table_args__ = {"schema": "gnc_core"} id_program = db.Column(db.Integer, primary_key=True) unique_id_program = db.Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False) id_project = db.Column(db.Integer, db.ForeignKey(ProjectModel.id_project), nullable=False) title = db.Column(db.String(50), nullable=False) short_desc = db.Column(db.String(200), nullable=False) long_desc = db.Column(db.Text(), nullable=False) form_message = db.Column(db.String(500)) image = db.Column(db.String(250)) logo = db.Column(db.String(250)) id_module = db.Column( db.Integer, ForeignKey(TModules.id_module), nullable=False, default=1, ) module = relationship("TModules") taxonomy_list = db.Column(db.Integer, nullable=True) is_active = db.Column(db.Boolean(), server_default=expression.true(), default=True) id_geom = db.Column(db.Integer, db.ForeignKey(GeometryModel.id_geom), nullable=False) id_form = db.Column(db.Integer, db.ForeignKey(CustomFormModel.id_form), nullable=True) custom_form = relationship("CustomFormModel") geometry = relationship("GeometryModel") project = relationship("ProjectModel") def get_geofeature(self, recursif=True, columns=None): geometry = to_shape(self.geometry.geom) feature = Feature( id=self.id_program, geometry=geometry, properties=self.as_dict(True, exclude=["t_obstax"]), ) return feature def __repr__(self): return self.title
class Player(Base): __tablename__ = 'players' id = Column(Integer, primary_key=True) username = Column(String(20), unique=True, nullable=False) squad_type = Column(String(20), nullable=False) team = Column(String(20), nullable=False) troops = Column(Integer, default=50) location = Column(String(20), nullable=False) is_active = Column(Boolean, server_default=expression.true()) last_active = Column(DateTime, server_default=func.now(), onupdate=func.current_timestamp()) is_new = Column(Boolean, server_default=expression.true()) uses_ip = Column(Boolean, server_default=expression.false()) banned = Column(Boolean, server_default=expression.false()) banned_by = Column(String(20)) time_banned = Column(DateTime) reason_banned = Column(String(1000)) actions = Column(Integer, default=10) ammo = Column(Integer, default=200) morale = Column(Integer, default=100) dug_in = Column(Integer, default=0) level = Column(Integer, default=1) experience = Column(Integer, default=0) management = Column(Integer, default=1) attack = Column(Integer, default=1) # For better defense defense = Column(Integer, default=1) # For better attack charisma = Column(Integer, default=1) # For more troops gained per recruit rallying = Column(Integer, default=1) # For increasing morale pathfinder = Column( Integer, default=1) # For the amount of action you use per movement logistics = Column(Integer, default=1) # For less ammo used per attack development = Column(Integer, default=1) # For less actions used per upgrade
def select(self): from sqlalchemy.sql.expression import case, literal, true return (self.db.query( SEPASammler.id.label('#'), BookingKind.title.label('Art'), SEPASammler.booking_day.label('Datum'), SEPASammler.pmtinfid.label('Sparkassen-ID'), case([ (SEPASammler.is_ueberweisung == true(), literal('Sammelüberweisung')), ], else_=literal('Sammellastschrift')).label('Typ'), ).select_from(SEPASammler).outerjoin(BookingKind).filter( SEPASammler.accounting_year == get_selected_year()))
def get_reply(request, reply_id): query = request.dbsession.query(Reply)\ .filter(Reply.uid == reply_id)\ .filter(Reply.enabled == true())\ .options(subqueryload(Reply.user))\ .options(subqueryload(Reply.article) .subqueryload(Article.board)) result = query.all() if not result: raise PageNotFound() reply = result[0] if not reply.article.enabled or not reply.article.board.enabled: raise PageNotFound() return reply
def performance(performance_id): shows = db.session.query(Event).filter_by(parent_id=None).filter_by( is_active=true()).all() p = db.session.query(Event).filter_by(id=performance_id).first() # importlib.reload(juniper.app.blackfedora.forms) # ticket_selection_form = juniper.app.blackfedora.forms.TicketSelectionForm.load_form(p) ticket_selection_form = get_ticket_selection_form(event=p) return render_template('blackfedora/performance.html', performance=p, shows=shows, ticket_selection_form=ticket_selection_form)
def get_active_user(email: str) -> Any: """ Get User Data based on email and active status""" try: with session_scope() as db: statement = select( models.User).where(models.User.email == email).where( models.User.is_active == expression.true()).options( defer('password')) results = db.exec(statement) data = results.one() return data except SQLAlchemyError as e: fastapi_logger.exception("get_user") return None
def _revoke_other_default(target, value, oldvalue, initiator): """Removes the previous default when a new one is set.""" session = object_session(target) if session is None: return if value: previous_default = ( session.query(IncidentType).filter(IncidentType.default == true()).one_or_none() ) if previous_default: previous_default.default = False session.commit()
def next_dagruns_to_examine( cls, state: DagRunState, session: Session, max_number: Optional[int] = None, ): """ Return the next DagRuns that the scheduler should attempt to schedule. This will return zero or more DagRun rows that are row-level-locked with a "SELECT ... FOR UPDATE" query, you should ensure that any scheduling decisions are made in a single transaction -- as soon as the transaction is committed it will be unlocked. :rtype: list[airflow.models.DagRun] """ from airflow.models.dag import DagModel if max_number is None: max_number = cls.DEFAULT_DAGRUNS_TO_EXAMINE # TODO: Bake this query, it is run _A lot_ query = (session.query(cls).filter( cls.state == state, cls.run_type != DagRunType.BACKFILL_JOB).join( DagModel, DagModel.dag_id == cls.dag_id).filter( DagModel.is_paused == false(), DagModel.is_active == true())) if state == State.QUEUED: # For dag runs in the queued state, we check if they have reached the max_active_runs limit # and if so we drop them running_drs = (session.query( DagRun.dag_id, func.count(DagRun.state).label('num_running')).filter( DagRun.state == DagRunState.RUNNING).group_by( DagRun.dag_id).subquery()) query = query.outerjoin( running_drs, running_drs.c.dag_id == DagRun.dag_id).filter( func.coalesce(running_drs.c.num_running, 0) < DagModel.max_active_runs) query = query.order_by( nulls_first(cls.last_scheduling_decision, session=session), cls.execution_date, ) if not settings.ALLOW_FUTURE_EXEC_DATES: query = query.filter(DagRun.execution_date <= func.now()) return with_row_locks(query.limit(max_number), of=cls, session=session, **skip_locked(session=session))
class PlayerTeam(DB.Model): """ Table for multiple player / team associations""" __tablename__ = 'player_team' player_id = Column(Integer, ForeignKey('player.id'), primary_key=True) team_id = Column(Integer, ForeignKey('team.id'), primary_key=True) active = Column(Boolean, default=True, server_default=expression.true()) join_date = Column(Date) @hybrid_property def years_active(self): """ function to return a years of experience at a grade """ if self.join_date: return relativedelta(date.today(), self.join_date).years return 0
def get_num_responses_per_characteristic_xform_id(self, period): clinic_submissions_table = Base.metadata.tables['clinic_submissions'] result = DBSession.execute( select([ 'COUNT(*)', clinic_submissions_table.c.characteristic, clinic_submissions_table.c.xform_id ]).select_from(clinic_submissions_table).where( and_(clinic_submissions_table.c.clinic_id == self.id, clinic_submissions_table.c.valid == true(), clinic_submissions_table.c.period == period)).group_by( clinic_submissions_table.c.characteristic, clinic_submissions_table.c.xform_id)).fetchall() return tuple_to_dict_list(('count', 'characteristic', 'xform_id'), result)
class Collection(Base): __table_args__ = (CheckConstraint( '(latest_repo_resolved IS NULL) = (latest_repo_id IS NULL)', name='collection_latest_repo_id_check'), ) id = Column(Integer, primary_key=True) order = Column(Integer, nullable=False, server_default="100") # name used in machine context (urls, fedmsg), e.g. "f24" name = Column(String, nullable=False, unique=True) # name for ordinary people, e.g. "Fedora 24" display_name = Column(String, nullable=False) # whether this collection is in secondary or primary mode secondary_mode = Column(Boolean, nullable=False, server_default=false()) # Koji configuration target = Column(String, nullable=False) dest_tag = Column(String, nullable=False) build_tag = Column(String, nullable=False) # bugzilla template fields. If null, bug filling will be disabled bugzilla_product = Column(String) bugzilla_version = Column(String) # priority of packages in given collection is multiplied by this priority_coefficient = Column(Float, nullable=False, server_default='1') # build group name build_group = Column(String, nullable=False, server_default='build') latest_repo_id = Column(Integer) latest_repo_resolved = Column(Boolean) # whether to poll builds for untracked packages poll_untracked = Column(Boolean, nullable=False, server_default=true()) packages = relationship('Package', backref='collection', passive_deletes=True) @property def state_string(self): return { True: 'ok', False: 'unresolved', None: 'unknown' }[self.latest_repo_resolved] def __str__(self): return self.display_name
class Pclause(db.Model): id = db.Column(db.Integer, primary_key=True) clause = db.Column(LONGTEXT()) label = db.Column(db.Boolean, default=True, server_default=expression.true(), nullable=False) response_id = db.Column(db.Integer, db.ForeignKey('response.id', ondelete='CASCADE')) #sentence = db.relationship('Psentence', backref='source') def __repr__(self): return f'<Source ID: {self.sentence_id}; Content: {self.clause}; Label: {self.label}>'
def new_tokens_ok(account): session = get_session() result = session.query(models.Token).filter_by(account=account, refresh=true()).all() # pylint: disable=no-member token_names_expected = [ "10_original_refreshed_and_deleted", "11_to_be_kept_and_refreshed", "14_original_refreshed_and_deleted", "17_to_be_kept_and_refreshed" ] selection = [] for elem in result: if elem.refresh_token is not None: if elem.refresh_token in str(elem.oidc_scope): selection.append(elem.refresh_token) return all(item in token_names_expected for item in selection)
def upgrade(): op.add_column( "provider", sa.Column("enabledForPro", sa.Boolean(), nullable=False, server_default=expression.false())) op.add_column( "provider", sa.Column("requireProviderIdentifier", sa.Boolean(), nullable=False, server_default=expression.true()), )
def rbac_filter(cls, query, mode, user): public_objects = query.filter(cls.public == true()) user_objects = (query.join(cls.pools).join( models["access"], models["pool"].access).join( models["user"], models["access"].users).filter( models["access"].pools_access.contains(mode)).filter( models["user"].name == user.name)) user_group_objects = (query.join(cls.pools).join( models["access"], models["pool"].access).join( models["group"], models["access"].groups).join( models["user"], models["group"].users).filter( models["access"].pools_access.contains(mode)).filter( models["user"].name == user.name)) return public_objects.union(user_objects, user_group_objects)
def count(self): "Get the count" if self.user.is_domain_admin: dquery = self.dbsession.query(Domain).join(downs, (oa, downs.c.organization_id == oa.c.organization_id))\ .filter(Domain.status == true())\ .filter(oa.c.user_id == self.user.id).all() # domains = [domain.name for domain in dquery] domains = [] for domain in dquery: domains.append(domain.name) for domain_alias in domain.aliases: if domain_alias.status: domains.append(domain_alias.name) self.query = self.query.filter(MessageTotals.id.in_(domains)) elif self.user.is_peleb: addrs = [ addr.address for addr in self.user.addresses if '+*' not in addr.address and '-*' not in addr.address ] tagged_addrs = [ addr.address for addr in self.user.addresses if '+*' in addr.address or '-*' in addr.address ] addrs.append(self.user.email) if tagged_addrs: tagged_to = func._( or_(*[ Message.to_address.like( TAGGED_RE.sub(r'\g<one>%', taddr)) for taddr in tagged_addrs ])) tagged_from = func._( or_(*[ Message.from_address.like( TAGGED_RE.sub(r'\g<one>%', taddr)) for taddr in tagged_addrs ])) self.query = self.query.filter( func._( or_(tagged_to, tagged_from, Message.to_address.in_(addrs), Message.from_address.in_(addrs)))) else: self.query = self.query.filter( func._( or_(Message.to_address.in_(addrs), Message.from_address.in_(addrs)))) value = self.query.one() return int(value.total or 0)
def translate_isa(expression: Expression, session: Session, model, get_model): assert expression.operator == "Isa" left, right = expression.args if dot_path(left) == (): assert left == Variable("_this") else: for field_name in dot_path(left): _, model, __ = get_relationship(model, field_name) assert not right.fields, "Unexpected fields in isa expression" constraint_type = get_model(right.tag) model_type = inspect(model, raiseerr=True).class_ return sql.true() if issubclass(model_type, constraint_type) else sql.false()
def ads_list(): ads = Advertisement.query.filter(Advertisement.active == true()) oblast_district = request.args.get('oblast_district') min_price = request.args.get('min_price', None, type=int) max_price = request.args.get('max_price', None, type=int) new_building = request.args.get('new_building') page = request.args.get('page', 1, type=int) if oblast_district: ads = ads.filter(Advertisement.oblast_district == oblast_district) if min_price: ads = ads.filter(Advertisement.price >= min_price) if max_price: ads = ads.filter(Advertisement.price <= max_price) if new_building: ads = ads.filter( or_(Advertisement.under_construction == true(), Advertisement.construction_year >= date.today().year - 2)) per_page = 5 ads = ads.paginate(page, per_page, False) return render_template('ads_list.html', ads=ads, oblast_district=oblast_district, min_price=min_price, max_price=max_price, new_building=new_building)
def _get_unsent_notification(notif_type, obj, for_update=True): """Get object's first unsent notification of specified type. Get first `obj` object's unsent notification of type `notif_type`. Note that notification is considered unsent if it was not send or the one is repeating. Unsent notification present in current DB session has priority over notifications stored in DB. If there are not any unsent notifications, None will be returned. Args: notif_type (NotificationType): type of the notifications too look for. obj (db.Model): Object for which to look for notifications. for_update (bool): Flag indicating the way notification should be selected from the DB. If True, `FOR UPDATE` SQL clause will be applied. Defaults to True. Returns: Notification object or None. """ def _get_notif_key(notif): """Return key to use during notification comparison.""" return (notif.object_id, notif.object_type, notif.notification_type_id or notif.notification_type.id) obj_key = (obj.id, obj.type, notif_type.id) notifs = (o for o in db.session if isinstance(o, models.Notification)) for notification in notifs: if obj_key == _get_notif_key(notification): return notification notif_q = db.session.query( models.Notification, ).filter( models.Notification.notification_type_id == notif_type.id, models.Notification.object_id == obj.id, models.Notification.object_type == obj.type, ( models.Notification.sent_at.is_(None) | ( models.Notification.repeating == true() ) ) ) if for_update: notif_q = notif_q.with_for_update() return notif_q.first()
def assigned_org_units(self, userid=None, omit_current=False): if userid is None: userid = self._get_current_user_id() query = self._query_org_units().join(OrgUnit.users_group) query = query.join(Group.users).filter(User.userid == userid) query = query.filter(OrgUnit.enabled == true()) org_units = query.all() if omit_current: current_org_unit = get_current_org_unit() org_units = [ each for each in org_units if each != current_org_unit ] return org_units
def get_certificates_with_same_prefix_with_rotate_on(prefix): """ Find certificates with given prefix that are still valid, not replaced and marked for auto-rotate :param prefix: prefix to match :return: """ now = arrow.now().format("YYYY-MM-DD") return ( Certificate.query.filter(Certificate.name.like(prefix)) .filter(Certificate.rotation == true()) .filter(Certificate.not_after >= now) .filter(not_(Certificate.replaced.any())) .all() )
def get_accounts_in_creation_order(self, current_user=None): """ Get accounts in order of creation for deactivation for license limits pick currently logged in user, and append to the list in position 0 pick all super-admins in order of creation date and add it to the list pick all other accounts in order of creation and add it to the list. Based on that list, the last accounts can be disabled as they are created at the end and don't include any of the super admins as well as the current user. :param current_user: optionally current user running this operation """ if not current_user: current_user = get_current_rhodecode_user() active_super_admins = [ x.user_id for x in User.query() .filter(User.user_id != current_user.user_id) .filter(User.active == true()) .filter(User.admin == true()) .order_by(User.created_on.asc())] active_regular_users = [ x.user_id for x in User.query() .filter(User.user_id != current_user.user_id) .filter(User.active == true()) .filter(User.admin == false()) .order_by(User.created_on.asc())] list_of_accounts = [current_user.user_id] list_of_accounts += active_super_admins list_of_accounts += active_regular_users return list_of_accounts
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column("incident_cost_type", sa.Column("category", sa.String(), nullable=True)) # we set the category of the default response incident cost type bind = op.get_bind() session = Session(bind=bind) incident_cost_type = ( session.query(IncidentCostType).filter(IncidentCostType.default == true()).one_or_none() ) if incident_cost_type: incident_cost_type.category = "Primary" session.commit()
def get_samples(session, sample_ids=None): query = session.query(Sample, SampleStats).outerjoin( SampleStats, and_(SampleStats.sample_id == Sample.id, SampleStats.outliers == true(), SampleStats.full_reads == false(), SampleStats.filter_type == 'all')) if sample_ids is not None: query = query.filter(SampleStats.sample_id.in_(sample_ids)) query = query.order_by(Sample.subject_id).options( Load(SampleStats).load_only('sequence_cnt', 'in_frame_cnt', 'stop_cnt', 'functional_cnt', 'no_result_cnt')) clone_counts = session.query(SampleStats.sample_id, SampleStats.sequence_cnt, SampleStats.functional_cnt).filter( SampleStats.outliers == true(), SampleStats.full_reads == false(), SampleStats.filter_type == 'clones_all') clone_counts = {s.sample_id: s for s in clone_counts} result = [] for sample, stats in query: sample_dict = _sample_to_dict(sample) if stats is not None: clones = clone_counts[sample.id] sample_dict['sequence_cnt'] = stats.sequence_cnt sample_dict['functional_cnt'] = stats.functional_cnt sample_dict['no_result_cnt'] = stats.no_result_cnt sample_dict['total_cnt'] = stats.sequence_cnt + stats.no_result_cnt sample_dict['clone_cnt'] = clones.sequence_cnt sample_dict['functional_clone_cnt'] = clones.functional_cnt result.append(sample_dict) return result