def get_schema_names(self, connection, **kw): sysschema = self.sys_schemas query = sql.select([sysschema.c.schemaname], sql.not_(sysschema.c.schemaname.like('SYS%')), sql.not_(sysschema.c.schemaname.like('Q%')), order_by=[sysschema.c.schemaname]) return [self.normalize_name(r[0]) for r in connection.execute(query)]
def get_schultyp_all(): """ liefert alle Schultraeger """ return session.query(KeySchultyp).\ filter(and_(not_(KeySchultyp.Gruppe=='HKM'), not_(KeySchultyp.Gruppe=='SONS'), not_(KeySchultyp.Gruppe=='ST'))).\ order_by(KeySchultyp.TextKey)
def tasks(self): """ List of tasks that support this distro """ # Delayed import to avoid circular dependency from . import Task, TaskExcludeArch, TaskExcludeOSMajor return Task.query\ .filter(not_(Task.excluded_arch.any( TaskExcludeArch.arch == self.arch)))\ .filter(not_(Task.excluded_osmajor.any( TaskExcludeOSMajor.osmajor == self.distro.osversion.osmajor)))
def myfavorite(Session, msmgroup): """ An example """ logger.info("Running myfavorite") def activation_response(x, k): """ Curve from [0,1] -> [0,1] """ if k < 0: k = k / (1 - k) return x / (1 - k * (x - 1)) # =============# k_factor = -100 # =============# prev = Session.query(MSMGroup).get(msmgroup.id - 1) if prev is None: return default(Session, msmgroup) # number of new states discovered n_new = msmgroup.n_states - prev.n_states if n_new < 0: return default(Session, msmgroup) q = Session.query(Trajectory) new_trajectories = q.filter(Trajectory.msm_groups.contains(msmgroup)).filter( not_(Trajectory.msm_groups.contains(prev)) ) # sum of the number of steps in the new trajectories sum_op = Session.query(func.sum(Trajectory.length)) sum_op = sum_op.filter(Trajectory.msm_groups.contains(msmgroup)) sum_op = sum_op.filter(not_(Trajectory.msm_groups.contains(prev))) n_steps = float(sum_op.scalar()) p_explore = activation_response(n_new / n_steps, k_factor) if len(msmgroup.markov_models) != 2: raise ValueError("Only 2 models") if not [False, True] == sorted([msm.forcefield.true_kinetics for msm in msmgroup.markov_models]): raise ValueError("one needs to be true_kinetics, the other not") for msm in msmgroup.markov_models: if msm.forcefield.true_kinetics: msm.model_selection_weight = 1 - p_explore even_sampling(Session, msm) else: msm.model_selection_weight = p_explore even_sampling(Session, msm) logger.info("%s selection weight: %f", msm.forcefield.name, msm.model_selection_weight)
def get_pixbuf (self, attr,val): if attr=='category': tbl = self.rd.recipe_table.join(self.rd.categories_table) col = self.rd.categories_table.c.category if hasattr(self,'category_images'): stment = and_(col==val,self.rd.recipe_table.c.image!=None, self.rd.recipe_table.c.image!='', not_(self.rd.recipe_table.c.title.in_(self.category_images)) ) else: stment = and_(col==val,self.rd.recipe_table.c.image!=None,self.rd.recipe_table.c.image!='') result = tbl.select(stment,limit=1).execute().fetchone() if not hasattr(self,'category_images'): self.category_images = [] if result: self.category_images.append(result.title) elif attr=='rating': return star_generator.get_pixbuf(val) elif attr in ['preptime','cooktime']: return get_time_slice(val) else: tbl = self.rd.recipe_table col = getattr(self.rd.recipe_table.c,attr) stment = and_(col==val,self.rd.recipe_table.c.image!=None,self.rd.recipe_table.c.image!='') result = tbl.select(stment,limit=1).execute().fetchone() if result and result.thumb: return scale_pb(get_pixbuf_from_jpg(result.image)) else: return self.get_base_icon(attr) or self.get_base_icon('category')
def initialize(app): # Initialize the sources for source_name, source_details in app.config.get('REVERE_SOURCES', {}).items(): if source_details.get('enabled') is False: continue app.sources[source_name] = get_klass(source_details['type'])(description=source_details.get('description'), config=source_details['config']) # Initialize the alerts for alert_name, alert_details in app.config.get('REVERE_ALERTS', {}).items(): app.alerts[alert_name] = get_klass(alert_details['type'])(description=alert_details.get('description'), config=alert_details['config'], enabled_in_config=alert_details.get('enabled', True)) alert = Alert.query.filter_by(key=alert_name).first() if not alert: alert = Alert(key=alert_name) db.session.add(alert) Alert.query.filter(not_(Alert.key.in_(app.alerts.keys()))).delete(synchronize_session='fetch') db.session.commit() # Run the maintenance routine hourly scheduler.add_cron_job(monitor_maintenance, year="*", month="*", day="*", hour="*", minute="0") for monitor in Monitor.query.filter_by(active=True): update_monitor_scheduler(monitor) scheduler.start()
def edit(self, *args, **kw): user = handler.user.get_user_in_session(request) if request.method == 'GET': project_id = args[0] else: project_id = kw.get('pid') debug("check permission", 1) if not checker.check_permission(user=user, project_id=project_id, right_id=constants.right_upload_id) and not checker.is_admin(user=user): flash('You must have %s permission to edit the project.' % constants.right_upload, 'error') raise redirect('/tracks/', {'pid': project_id}) #if checker.is_admin(user=user): #user = DBSession.query(User).join(Project).filter(Project.id == project_id).first() widget = form.EditProject(action=url('/projects/edit/%s' % project_id)).req() widget.value = {'pid': project_id} project = DBSession.query(Project).filter(Project.id == project_id).first() # prendre les user tracks du meme sequence id tracks = DBSession.query(Track).join(User.tracks).filter( and_(User.id == user.id, Track.sequence_id == project.sequence_id, not_(Track.id.in_([t.id for t in project.tracks]))) ).all() # prendre les sared tracks du meme sequence id shared_tracks = handler.user.shared_tracks(user.id, constants.rights['download']['id']) shared_tracks = [t for t in shared_tracks if (t.sequence_id == project.sequence_id and t.id not in [tr.id for tr in project.tracks])] tracks.extend(shared_tracks) if request.method == 'GET': debug("GET", 2) widget.child.children[1].value = project.name widget.child.children[2].options = [('', '')] + [(t.id, t.name) for t in tracks] + [(t.id, t.name, {'selected': True}) for t in project.tracks] return dict(page='tracks', widget=widget, project_id=project_id) debug("POST", 2) try: debug("validate post", 2) widget.validate(kw) except twc.ValidationError as e: debug("error", 2) w = e.widget w.child.children[1].value = project.name w.child.children[2].options = [(t.id, t.name) for t in tracks] + [(t.id, t.name, {'selected': True}) for t in project.tracks] return dict(page='tracks', widget=w, project_id=project_id) debug("validation passed") track_ids = kw.get('tracks', []) if not track_ids: track_ids = [] if not isinstance(track_ids, list): track_ids = [track_ids] if len(track_ids) > 0 and '' in track_ids: track_ids.remove('') # if the project is shared, some track cannot be removed for t in project.tracks: if not checker.user_own_track(user.id, track=t) and t.id not in track_ids and t.id in [s.id for s in shared_tracks]: track_ids.append(t.id) handler.project.e(project_id=project_id, name=kw.get('name'), track_ids=track_ids) raise redirect('/tracks/', {'pid': project_id})
def visit_conditional_insert(element, compiler, **kwargs): # magic copied from sqlalchemy.sql.compiler.SQLCompiler.visit_insert compiler.isinsert = True try: # pylint: disable=E0611 from sqlalchemy.sql import crud colparams = crud._get_crud_params(compiler, element) except ImportError: # SQLAlchemy <= 1.0 colparams = compiler._get_colparams(element) text = 'INSERT INTO %s' % compiler.process(element.table, asfrom=True) text += ' (%s)\n' % ', '.join(compiler.preparer.format_column(c[0]) for c in colparams) text += 'SELECT %s\n' % ', '.join(c[1] for c in colparams) text += compiler.default_from() # default_from() returns '' for MySQL but that's wrong, MySQL requires # FROM DUAL if there is a following WHERE clause. if isinstance(compiler.dialect, MySQLDialect): text += 'FROM DUAL\n' # We need FOR UPDATE in the inner SELECT for MySQL, to ensure we acquire an # exclusive lock immediately, instead of acquiring a shared lock and then # subsequently upgrading it to an exclusive lock, which is subject to # deadlocks if another transaction is doing the same thing. nonexistence_clause = not_(exists(Select( columns=[sqltext('1')], from_obj=[element.table], whereclause=element.unique_condition, for_update=True))) text += 'WHERE ' + compiler.process(nonexistence_clause) return text
def has_member(cls, user): #pylint: disable=E0213 return or_( and_(cls.membership_type != GroupMembershipType.inverted, cls.user_group_assocs.any(UserGroup.user == user)), and_(cls.membership_type == GroupMembershipType.inverted, not_(cls.excluded_user_group_assocs.any(ExcludedUserGroup.user == user))) )
def public_wall_for_contact(cls, contact): return and_( Share.contact_id == contact.id, Share.public, not_(Share.hidden), Post.parent_id == None )
def test_single_query(self): search = Search(self.Donkey, "people", self.session) session = self.Donkey.Session() people_class = self.Donkey.get_class("people") email_class = self.Donkey.get_class("email") assert set(QueryFromStringParam(search, 'name < ?', pos_args = ["popp02"]).add_conditions(base_query).all()).symmetric_difference( set(session.query(people_class.id).filter(people_class.name < u"popp02").all())) == set() assert set(QueryFromStringParam(search, 'name < ? and email.email like ?', pos_args = ["popp02", "popi%"]).add_conditions(base_query).all()).symmetric_difference( set(session.query(people_class.id).join(["email"]).filter(and_(people_class.name < u"popp02", email_class.email.like(u"popi%"))).all())) == set() assert set(QueryFromStringParam(search, "name < ? and not email.email like ?", pos_args = ["popp02", "popi%"]).add_conditions(base_query).all()).symmetric_difference( set(session.query(people_class.id).outerjoin(["email"]).\ filter(and_(people_class.name < u"popp02", or_(email_class.email == None, not_(email_class.email.like(u"popi%"))))).all())) == set() assert set(QueryFromStringParam(search, "name < ? or not email.email like ?", pos_args = ["popp02", "popi%"]).add_conditions(base_query).all()).symmetric_difference( set(session.query(people_class.id).outerjoin(["email"]).\ filter(or_(people_class.name < u"popp02", or_(email_class.email == None, not_(email_class.email.like(u"popi%"))))).all())) == set() assert set(QueryFromStringParam(search, "not (name < ? or not email.email like ?) ", pos_args = ["popp02", "popi%"] ).add_conditions(base_query).all()).symmetric_difference( set(session.query(people_class.id).outerjoin(["email"]).\ filter(not_(or_(people_class.name < u"popp02", or_(email_class.email == None, not_(email_class.email.like(u"popi%")))))).all())) == set()
def exclude_topic_mutes(conditions, user_profile, stream_id): # type: (List[Selectable], UserProfile, Optional[int]) -> List[Selectable] query = MutedTopic.objects.filter( user_profile=user_profile, ) if stream_id is not None: # If we are narrowed to a stream, we can optimize the query # by not considering topic mutes outside the stream. query = query.filter(stream_id=stream_id) query = query.values( 'recipient_id', 'topic_name' ) rows = list(query) if not rows: return conditions def mute_cond(row): # type: (Dict[str, Any]) -> Selectable recipient_id = row['recipient_id'] topic_name = row['topic_name'] stream_cond = column("recipient_id") == recipient_id topic_cond = func.upper(column("subject")) == func.upper(topic_name) return and_(stream_cond, topic_cond) condition = not_(or_(*list(map(mute_cond, rows)))) return conditions + [condition]
def exclude(self, **terms): q = self._clone() query = not_(self._filter(**terms)) if self.query is not None: query = and_(self.query, query) q.query = query return q
def run(self): while not self.stoprequest.isSet(): self.db.expire_all() for node_db in self.db.query(Node).filter( # nodes may become unresponsive while provisioning not_(Node.status == 'provisioning') ): timedelta = (datetime.now() - node_db.timestamp).seconds if timedelta > self.timeout: logger.warning( u"Node '{0}' seems to be offline " "for {1} seconds...".format( node_db.name, timedelta ) ) if node_db.online: node_db.online = False self.db.add(node_db) self.db.commit() notifier.notify( "error", u"Node '{0}' has gone away".format( node_db.name or node_db.mac ), node_id=node_db.id ) self.sleep()
def validate(self): print "\n\n"+"-"*70+"\n\n" print "Validation of data" print "Unused Event Types: {0}".format(', '.join( [et.display_name for et in model.DBSession.query(model.EventType). filter(not_(model.EventType.id.in_(self.used_event_types))). all()]))
def admin_user(userid): user = Users.query.filter_by(id=userid).first() if request.method == 'GET': solves = Solves.query.filter_by(userid=userid).all() solve_ids = [s.chalid for s in solves] missing = Challenges.query.filter( not_(Challenges.id.in_(solve_ids) ) ).all() last_seen = db.func.max(Tracking.date).label('last_seen') addrs = db.session.query(Tracking.ip, last_seen) \ .filter_by(user=userid) \ .group_by(Tracking.ip) \ .order_by(last_seen.desc()).all() wrong_keys = WrongKeys.query.filter_by(userid=userid).order_by(WrongKeys.date.asc()).all() awards = Awards.query.filter_by(userid=userid).order_by(Awards.date.asc()).all() score = user.score() place = user.place() return render_template('admin/user.html', solves=solves, user=user, addrs=addrs, score=score, missing=missing, place=place, wrong_keys=wrong_keys, awards=awards) elif request.method == 'POST': admin_user = request.form.get('admin', None) if admin_user: admin_user = True if admin_user == 'true' else False user.admin = admin_user # Set user.banned to hide admins from scoreboard user.banned = admin_user db.session.commit() db.session.close() return jsonify({'data': ['success']}) name = request.form.get('name', None) password = request.form.get('password', None) email = request.form.get('email', None) website = request.form.get('website', None) affiliation = request.form.get('affiliation', None) country = request.form.get('country', None) errors = [] name_used = Users.query.filter(Users.name == name).first() if name_used and int(name_used.id) != int(userid): errors.append('That name is taken') email_used = Users.query.filter(Users.email == email).first() if email_used and int(email_used.id) != int(userid): errors.append('That email is taken') if errors: db.session.close() return jsonify({'data':errors}) else: user.name = name user.email = email if password: user.password = bcrypt_sha256.encrypt(password) user.website = website user.affiliation = affiliation user.country = country db.session.commit() db.session.close() return jsonify({'data':['success']})
def author_shared_with(cls, author, target): return and_( Post.author_id == author.id, Share.contact_id == target.contact.id, not_(Share.hidden), Post.parent_id == None )
def _add_networks_wo_ips(cls, cluster_db, network_ids, node_db): add_net_data = [] # And now let's add networks w/o IP addresses nets = db().query(NetworkGroup).\ filter(NetworkGroup.cluster_id == cluster_db.id) if network_ids: nets = nets.filter(not_(NetworkGroup.id.in_(network_ids))) # For now, we pass information about all networks, # so these vlans will be created on every node we call this func for # However it will end up with errors if we precreate vlans in VLAN mode # in fixed network. We are skipping fixed nets in Vlan mode. for net in nets.order_by(NetworkGroup.id).all(): interface = cls._get_interface_by_network_name( node_db, net.name ) if net.name == 'fixed' and cluster_db.net_manager == 'VlanManager': continue add_net_data.append({ 'name': net.name, 'vlan': net.vlan_start, 'dev': interface.name}) add_net_data.append(cls._get_admin_network(node_db)) return add_net_data
def get_sightings(session): query = session.query(Sighting) \ .filter(Sighting.expire_timestamp > time.time()) trash_list = getattr(config, 'TRASH_IDS', None) if trash_list: query = query.filter(not_(Sighting.pokemon_id.in_(config.TRASH_IDS))) return query.all()
def test_network_group_creates_several_networks(self): cluster = self.env.create_cluster(api=False) kw = {'release': cluster.release_id, 'cidr': '10.0.0.0/8', 'netmask': '255.0.0.0', 'network_size': 256, 'name': 'fixed', 'access': 'private', 'vlan_start': 200, 'amount': 25, 'cluster_id': cluster.id} ng = NetworkGroup(**kw) self.db.add(ng) self.db.commit() self.env.network_manager.create_networks(ng) nets_db = self.db.query(Network).filter( not_(Network.name == "fuelweb_admin") ).all() self.assertEquals(len(nets_db), kw['amount']) self.assertEquals(nets_db[0].vlan_id, kw['vlan_start']) self.assertEquals(nets_db[kw['amount'] - 1].vlan_id, kw['vlan_start'] + kw['amount'] - 1) self.assertEquals(all(x.name == kw['name'] for x in nets_db), True) self.assertEquals(all(x.access == kw['access'] for x in nets_db), True) vlans_db = self.db.query(Vlan).all() self.assertEquals(len(vlans_db), kw['amount'] + 1) # + 1 for admin net
def trash(self, flag=True): filter = sql.and_(Comment.reviewed == True, Comment.publishable == False) if flag: return self.filter(filter) else: return self.filter(sql.not_(filter))
def _get_ips_except_admin(cls, node_id=None, network_id=None, joined=False): """Method for receiving IP addresses for node or network excluding Admin Network IP address. :param node_id: Node database ID. :type node_id: int :param network_id: Network database ID. :type network_id: int :returns: List of free IP addresses as SQLAlchemy objects. """ ips = db().query(IPAddr).order_by(IPAddr.id) if joined: ips = ips.options(joinedload('network_data')) if node_id: ips = ips.filter_by(node=node_id) if network_id: ips = ips.filter_by(network=network_id) try: admin_net_id = cls.get_admin_network_group_id() except errors.AdminNetworkNotFound: admin_net_id = None if admin_net_id: ips = ips.filter( not_(IPAddr.network == admin_net_id) ) return ips.all()
def test_negate(self): sat = self.sa_alltypes sd = sat.c.double_col d = self.alltypes.double_col cases = [(-(d > 0), sql.not_(sd > L(0)))] self._check_expr_cases(cases)
def filterQueryByDateRestrictor(query, dateRestrictor, tableName): """Returns the query filtered by the date restrictor, e.g., 'date elicited is earlier than 2011-11-11'. """ location = dateRestrictor['location'] relation = dateRestrictor['relation'] date = dateRestrictor['date'] date = datetime.datetime.combine(date, datetime.time()) tbl = getattr(model, tableName) col = getattr(tbl, location) if relation == '' or relation == 'not_': nextDay = date + datetime.timedelta(1) previousDay = date - datetime.timedelta(1) if relation == '': filterCondition = and_(col > previousDay, col < nextDay) else: filterCondition = not_(and_(col > previousDay, col < nextDay)) elif relation == 'earlier_than': filterCondition = col < date else: filterCondition = col > date return query.filter(filterCondition)
def _get_ips_except_admin(self, node_id=None, network_id=None): """ Method for receiving IP addresses for node or network excluding Admin Network IP address. :param node_id: Node database ID. :type node_id: int :param network_id: Network database ID. :type network_id: int :returns: List of free IP addresses as SQLAlchemy objects. """ node_db = db().query(Node).get(node_id) ips = db().query(IPAddr).order_by(IPAddr.id) if node_id: ips = ips.filter_by(node=node_id) if network_id: ips = ips.filter_by(network=network_id) admin_net_id = self.get_admin_network_id(False) if admin_net_id: ips = ips.filter( not_(IPAddr.network == admin_net_id) ) return ips.all()
def test_do_not_update_net_manager_if_validation_is_failed(self): self.db.query(NetworkGroup).filter(not_(NetworkGroup.name == "fuelweb_admin")).first() new_net_manager = {"net_manager": "VlanManager", "networks": [{"id": 500, "vlan_start": 500}]} self.put(self.cluster.id, new_net_manager, expect_errors=True) self.db.refresh(self.cluster) self.assertNotEquals(self.cluster.net_manager, new_net_manager["net_manager"])
def get_schulen_all(): """ liefert alle Schulen """ query = session.query(Schulstelle).add_entity(Schulstamm).join('rel_schulstamm') # Studienseminare etc. ausschliessen query = query.order_by(Schulstamm.NameSchule).filter(not_(Schulstamm.Schulamt==u'LH')) query = query.reset_joinpoint().filter_by(Standort_Kz=0).filter_by(Loesch_Datum='') return query.all()
def test_filter_ne_nul_nul(self): Keyword = self.classes.Keyword self._equivalent(self.session.query(Keyword).filter(Keyword.user != self.u), self.session.query(Keyword). filter(not_(Keyword.user_keyword.has(user=self.u))))
def find_duplicate(field=["email", "displayname", "user_id"], details=False): """Find duplicate users in RCShibboleth :param choices field: The field to choose """ db = connect() fields = {"email": users.c.email, "displayname": users.c.displayname, "user_id": users.c.user_id} sql = select([users.c.user_id]) sql = sql.group_by(users.c.user_id) sql = sql.having(func.count(users.c.user_id) > 1) result = db.execute(sql) ignored_ids = [row[0] for row in result] field = fields[field] sql = select([field, func.count(field)]) sql = sql.group_by(field) sql = sql.having(func.count(field) > 1) result = db.execute(sql) user_list = [] for row in result: filter = row[0] sql = select([users]) sql = sql.where(and_(field == filter, or_(not_(users.c.user_id.in_(ignored_ids)), users.c.user_id is None))) sql = sql.where(field == filter) users1 = db.execute(sql) user_list.extend(users1) print_users(user_list)
def get_measures(self): """Find all data that should be included in the report. The data is returned as a list of tuples containing a :py:class:`Module <euphorie.client.model.Module>`, :py:class:`Risk <euphorie.client.model.Risk>` and :py:class:`ActionPlan <euphorie.client.model.ActionPlan>`. Each entry in the list will correspond to a row in the generated Excel file. This implementation differs from Euphorie in its ordering: it sorts on risk priority instead of start date. """ query = ( Session.query(model.Module, model.Risk, model.ActionPlan) .filter(sql.and_(model.Module.session == self.session, model.Module.profile_index > -1)) .filter(sql.not_(model.SKIPPED_PARENTS)) .filter(sql.or_(model.MODULE_WITH_RISK_OR_TOP5_FILTER, model.RISK_PRESENT_OR_TOP5_FILTER)) .join( ( model.Risk, sql.and_( model.Risk.path.startswith(model.Module.path), model.Risk.depth == model.Module.depth + 1, model.Risk.session == self.session, ), ) ) .join((model.ActionPlan, model.ActionPlan.risk_id == model.Risk.id)) .order_by(sql.case(value=model.Risk.priority, whens={"high": 0, "medium": 1}, else_=2), model.Risk.path) ) return query.all()
"""Example of using logical operators to combine conditions.""" from datetime import datetime from sqlalchemy import create_engine, MetaData from sqlalchemy.sql import select, or_, and_, not_ if __name__ == '__main__': engine = create_engine('sqlite:///../chinook.db', echo=False) meta = MetaData() meta.reflect(bind=engine) connection = engine.connect() media_types = meta.tables['media_types'] artists = meta.tables['artists'] invoices = meta.tables['invoices'] # or_ (alternative) - select artists named James OR Peter query = select([artists]).where( or_(artists.c.Name.like('James%'), artists.c.Name.like('Peter%'))) for idx, name in connection.execute(query): print(f'{idx:3}: {name}') # and_ (conjunction) - select invoices with total cost between 10.0 AND 13.0 (exclusive) query = select([invoices]).where( and_(invoices.c.Total > 10.0, invoices.c.Total < 13.0)) for row in connection.execute(query): print(f'{row[invoices.c.Total]}') # not_ (negation) - select media types that don't start on P query = select([media_types]).where(not_(media_types.c.Name.like('P%'))) for row in connection.execute(query): print(f'{row[media_types.c.Name]}')
def __ne__(self, obj): return not_(self.__eq__(obj))
def author_shared_with(cls, author, target): return and_(Post.author_id == author.id, Share.contact_id == target.contact.id, not_(Share.hidden), Post.parent_id == None)
def _not_spec_to_condition(cls, not_spec): return not_(cls._specification_to_condition(not_spec.spec))
def __ne__(self, other): return sql.not_(self.__eq__(other))
def apply(self, query, value, alias=None): start, end = value return query.filter(not_(self.get_column(alias).between(start, end)))
class ClauseVisitor(TreeVisitor): """Implements TreeVisitor to convert the tree into a SQLAlchemy WHERE clause. Parameters ---------- universe : `DimensionUniverse` All known dimensions. columns: `QueryColumns` Struct that organizes the special columns known to the query under construction. elements: `NamedKeyDict` `DimensionElement` instances and their associated tables. """ unaryOps = {"NOT": lambda x: not_(x), "+": lambda x: +x, "-": lambda x: -x} """Mapping or unary operator names to corresponding functions""" binaryOps = { "OR": lambda x, y: or_(x, y), "AND": lambda x, y: and_(x, y), "=": lambda x, y: x == y, "!=": lambda x, y: x != y, "<": lambda x, y: x < y, "<=": lambda x, y: x <= y, ">": lambda x, y: x > y, ">=": lambda x, y: x >= y, "+": lambda x, y: x + y, "-": lambda x, y: x - y, "*": lambda x, y: x * y, "/": lambda x, y: x / y, "%": lambda x, y: x % y } """Mapping or binary operator names to corresponding functions""" def __init__(self, universe: DimensionUniverse, columns: QueryColumns, elements: NamedKeyDict[DimensionElement, FromClause]): self.universe = universe self.columns = columns self.elements = elements def visitNumericLiteral(self, value, node): # Docstring inherited from TreeVisitor.visitNumericLiteral # Convert string value into float or int try: value = int(value) except ValueError: value = float(value) return literal(value) def visitStringLiteral(self, value, node): # Docstring inherited from TreeVisitor.visitStringLiteral return literal(value) def visitIdentifier(self, name, node): # Docstring inherited from TreeVisitor.visitIdentifier element, column = categorizeIdentifier(self.universe, name) if column is not None: return self.elements[element].columns[column] else: return self.columns.getKeyColumn(element) def visitUnaryOp(self, operator, operand, node): # Docstring inherited from TreeVisitor.visitUnaryOp func = self.unaryOps.get(operator) if func: return func(operand) else: raise ValueError( f"Unexpected unary operator `{operator}' in `{node}'.") def visitBinaryOp(self, operator, lhs, rhs, node): # Docstring inherited from TreeVisitor.visitBinaryOp func = self.binaryOps.get(operator) if func: return func(lhs, rhs) else: raise ValueError( f"Unexpected binary operator `{operator}' in `{node}'.") def visitIsIn(self, lhs, values, not_in, node): # Docstring inherited from TreeVisitor.visitIsIn # `values` is a list of literals and ranges, range is represented # by a tuple (start, stop, stride). We need to transform range into # some SQL construct, simplest would be to generate a set of literals # and add it to the same list but it could become too long. What we # do here is to introduce some large limit on the total number of # items in IN() and if range exceeds that limit then we do something # like: # # X IN (1, 2, 3) # OR # (X BETWEEN START AND STOP AND MOD(X, STRIDE) = MOD(START, STRIDE)) # # or for NOT IN case # # NOT (X IN (1, 2, 3) # OR # (X BETWEEN START AND STOP # AND MOD(X, STRIDE) = MOD(START, STRIDE))) max_in_items = 1000 # split the list into literals and ranges literals, ranges = [], [] for item in values: if isinstance(item, tuple): ranges.append(item) else: literals.append(item) clauses = [] for start, stop, stride in ranges: count = (stop - start + 1) // stride if len(literals) + count > max_in_items: # X BETWEEN START AND STOP # AND MOD(X, STRIDE) = MOD(START, STRIDE) expr = lhs.between(start, stop) if stride != 1: expr = and_(expr, (lhs % stride) == (start % stride)) clauses.append(expr) else: # add all values to literal list, stop is inclusive literals += [ literal(value) for value in range(start, stop + 1, stride) ] if literals: # add IN() in front of BETWEENs clauses.insert(0, lhs.in_(literals)) expr = or_(*clauses) if not_in: expr = not_(expr) return expr def visitParens(self, expression, node): # Docstring inherited from TreeVisitor.visitParens return expression.self_group() def visitRangeLiteral(self, start, stop, stride, node): # Docstring inherited from TreeVisitor.visitRangeLiteral # Just return a triple and let parent clauses handle it, # stride can be None which means the same as 1. return (start, stop, stride or 1)
def get_schema_names(self, connection, **kw): sysschema = self.sys_schemas query = sql.select([sysschema.c.schemaname], sql.not_(sysschema.c.schemaname.like('SYS%')), order_by=[sysschema.c.schemaname]) return [self.normalize_name(r[0]) for r in connection.execute(query)]
print "Calculating sentiment for {} from {}".format(url, last_month[0]) #last_month = None self.pool.map(partial(self.process_sentiment, url=url, NaiveBayes=NaiveBayes,\ Vader=Vader, st=st), self.iter_posts(url, start_date=last_month)) def start(self, classifier, site): NaiveBayes = Vader = st = None if 'NaiveBayes' in classifier: NaiveBayes = self.NaiveBayes_load() if 'Vader' in classifier: Vader = SentimentIntensityAnalyzer() if 'Stanford' in classifier: st = StanfordCoreNLPPLUS('http://localhost') print '{}Classify posts from {} using {}{}'.format(seperator, site, classifier, seperator) self.classify_posts(site, NaiveBayes=NaiveBayes, Vader=Vader, st=st) if __name__ == '__main__': s = sentiment_analysis() NaiveBayes = s.NaiveBayes_load() Vader = SentimentIntensityAnalyzer() sites = ['BiggerPockets', 'activerain'] print '{}Classify posts from {}{}'.format(seperator, sites[0], seperator) url_like = '%biggerpockets%' occupation_like = '%agent%' posts = s.session.query(Posts.URL, Posts.replyid, Posts.body, Posts.city, Posts.state).\ join(Users, Posts.uid==Users.uid).filter(not_(Users.occupation.like(occupation_like))).\ filter(Posts.URL.like(url_like)).filter(func.length(Posts.state)==2) # self.classify_posts(site[0], NaiveBayes=NaiveBayes, Vader=Vader) print str(posts) print str(posts.statement.compile(dialect=mysql.dialect()))
def admin_team(teamid): user = Teams.query.filter_by(id=teamid).first() if request.method == 'GET': solves = Solves.query.filter_by(teamid=teamid).all() solve_ids = [s.chalid for s in solves] missing = Challenges.query.filter(not_( Challenges.id.in_(solve_ids))).all() addrs = Tracking.query.filter_by(team=teamid).order_by( Tracking.date.desc()).group_by(Tracking.ip).all() wrong_keys = WrongKeys.query.filter_by(team=teamid).order_by( WrongKeys.date.desc()).all() score = user.score() place = user.place() return render_template('admin/team.html', solves=solves, team=user, addrs=addrs, score=score, missing=missing, place=place, wrong_keys=wrong_keys) elif request.method == 'POST': admin_user = request.form.get('admin', None) if admin_user: admin_user = 1 if admin_user == "true" else 0 user.admin = admin_user user.banned = admin_user db.session.commit() return jsonify({'data': ['success']}) name = request.form.get('name', None) password = request.form.get('password', None) email = request.form.get('email', None) website = request.form.get('website', None) affiliation = request.form.get('affiliation', None) country = request.form.get('country', None) errors = [] name_used = Teams.query.filter(Teams.name == name).first() if name_used and int(name_used.id) != int(teamid): errors.append('That name is taken') email_used = Teams.query.filter(Teams.email == email).first() if email_used and int(email_used.id) != int(teamid): errors.append('That email is taken') if errors: db.session.close() return jsonify({'data': errors}) else: user.name = name user.email = email if password: user.password = bcrypt_sha256.encrypt(password) user.website = website user.affiliation = affiliation user.country = country db.session.commit() db.session.close() return jsonify({'data': ['success']})
def enabled(cls): return not_(cast(cls.commented, Boolean))
def admin_team(teamid): user = Teams.query.filter_by(id=teamid).first_or_404() if request.method == 'GET': solves = Solves.query.filter_by(teamid=teamid).all() solve_ids = [s.chalid for s in solves] missing = Challenges.query.filter(not_( Challenges.id.in_(solve_ids))).all() last_seen = db.func.max(Tracking.date).label('last_seen') addrs = db.session.query(Tracking.ip, last_seen) \ .filter_by(team=teamid) \ .group_by(Tracking.ip) \ .order_by(last_seen.desc()).all() wrong_keys = WrongKeys.query.filter_by(teamid=teamid).order_by( WrongKeys.date.asc()).all() awards = Awards.query.filter_by(teamid=teamid).order_by( Awards.date.asc()).all() score = user.score(admin=True) place = user.place(admin=True) return render_template('admin/team.html', solves=solves, team=user, addrs=addrs, score=score, missing=missing, place=place, wrong_keys=wrong_keys, awards=awards) elif request.method == 'POST': admin_user = request.form.get('admin', None) if admin_user: admin_user = True if admin_user == 'true' else False user.admin = admin_user # Set user.banned to hide admins from scoreboard user.banned = admin_user db.session.commit() db.session.close() return jsonify({'data': ['success']}) verified = request.form.get('verified', None) if verified: verified = True if verified == 'true' else False user.verified = verified db.session.commit() db.session.close() return jsonify({'data': ['success']}) name = request.form.get('name', None) password = request.form.get('password', None) email = request.form.get('email', None) website = request.form.get('website', None) affiliation = request.form.get('affiliation', None) country = request.form.get('country', None) errors = [] name_used = Teams.query.filter(Teams.name == name).first() if name_used and int(name_used.id) != int(teamid): errors.append('That name is taken') email_used = Teams.query.filter(Teams.email == email).first() if email_used and int(email_used.id) != int(teamid): errors.append('That email is taken') if errors: db.session.close() return jsonify({'data': errors}) else: user.name = name if email: user.email = email if password: user.password = bcrypt_sha256.encrypt(password) user.website = website user.affiliation = affiliation user.country = country db.session.commit() db.session.close() return jsonify({'data': ['success']})
def apply(self, query, value, alias=None): start, end = value # ~between() isn't possible until sqlalchemy 1.0.0 return query.filter(not_(self.get_column(alias).between(start, end)))
def admin_team(teamid): user = Teams.query.filter_by(id=teamid).first_or_404() if request.method == 'GET': solves = Solves.query.filter_by(teamid=teamid).all() solve_ids = [s.chalid for s in solves] missing = Challenges.query.filter(not_(Challenges.id.in_(solve_ids))).all() last_seen = db.func.max(Tracking.date).label('last_seen') addrs = db.session.query(Tracking.ip, last_seen) \ .filter_by(team=teamid) \ .group_by(Tracking.ip) \ .order_by(last_seen.desc()).all() wrong_keys = WrongKeys.query.filter_by(teamid=teamid).order_by(WrongKeys.date.asc()).all() awards = Awards.query.filter_by(teamid=teamid).order_by(Awards.date.asc()).all() score = user.score(admin=True) place = user.place(admin=True) return render_template('admin/team.html', solves=solves, team=user, addrs=addrs, score=score, missing=missing, place=place, wrong_keys=wrong_keys, awards=awards) elif request.method == 'POST': name = request.form.get('name', None) password = request.form.get('password', None) email = request.form.get('email', None) website = request.form.get('website', None) affiliation = request.form.get('affiliation', None) country = request.form.get('country', None) admin_user = True if request.form.get('admin', None) == 'on' else False verified = True if request.form.get('verified', None) == 'on' else False hidden = True if request.form.get('hidden', None) == 'on' else False errors = [] # if email: # valid_email = utils.check_email_format(email) # if not valid_email: # errors.append("That email address is invalid") if email: valid_email = utils.check_id_format(email) if not valid_email: errors.append("That student id is invalid") name_used = Teams.query.filter(Teams.name == name).first() if name_used and int(name_used.id) != int(teamid): errors.append('That name is taken') if utils.check_email_format(name) is True: errors.append('Team name cannot be an email address') email_used = Teams.query.filter(Teams.email == email).first() if email_used and int(email_used.id) != int(teamid): errors.append('That student id is taken') if website and (website.startswith('http://') or website.startswith('https://')) is False: errors.append('Websites must start with http:// or https://') if errors: db.session.close() return jsonify({'data': errors}) else: user.name = name if email: user.email = email if password: user.password = bcrypt_sha256.encrypt(password) user.website = website user.affiliation = affiliation user.country = country user.admin = admin_user user.verified = verified user.banned = hidden db.session.commit() db.session.close() return jsonify({'data': ['success']})
def process_send_message_notifications(payload): """ Sends out email notifications for messages that have been unseen for a long enough time """ # very crude and dumb algorithm logger.info(f"Sending out email notifications for unseen messages") with session_scope() as session: # users who have unnotified messages older than 5 minutes in any group chat users = ( session.execute( ( select(User) .join(GroupChatSubscription, GroupChatSubscription.user_id == User.id) .join(Message, Message.conversation_id == GroupChatSubscription.group_chat_id) .where(not_(GroupChatSubscription.is_muted)) .where(User.is_visible) .where(Message.time >= GroupChatSubscription.joined) .where(or_(Message.time <= GroupChatSubscription.left, GroupChatSubscription.left == None)) .where(Message.id > User.last_notified_message_id) .where(Message.id > GroupChatSubscription.last_seen_message_id) .where(Message.time < now() - timedelta(minutes=5)) .where(Message.message_type == MessageType.text) # TODO: only text messages for now ) ) .scalars() .unique() ) for user in users: # now actually grab all the group chats, not just less than 5 min old subquery = ( select( GroupChatSubscription.group_chat_id.label("group_chat_id"), func.max(GroupChatSubscription.id).label("group_chat_subscriptions_id"), func.max(Message.id).label("message_id"), func.count(Message.id).label("count_unseen"), ) .join(Message, Message.conversation_id == GroupChatSubscription.group_chat_id) .where(GroupChatSubscription.user_id == user.id) .where(not_(GroupChatSubscription.is_muted)) .where(Message.id > user.last_notified_message_id) .where(Message.id > GroupChatSubscription.last_seen_message_id) .where(Message.time >= GroupChatSubscription.joined) .where(Message.message_type == MessageType.text) # TODO: only text messages for now .where(or_(Message.time <= GroupChatSubscription.left, GroupChatSubscription.left == None)) .group_by(GroupChatSubscription.group_chat_id) .order_by(func.max(Message.id).desc()) .subquery() ) unseen_messages = session.execute( select(GroupChat, Message, subquery.c.count_unseen) .join(subquery, subquery.c.message_id == Message.id) .join(GroupChat, GroupChat.conversation_id == subquery.c.group_chat_id) .order_by(subquery.c.message_id.desc()) ).all() user.last_notified_message_id = max(message.id for _, message, _ in unseen_messages) session.commit() total_unseen_message_count = sum(count for _, _, count in unseen_messages) email.enqueue_email_from_template( user.email, "unseen_messages", template_args={ "user": user, "total_unseen_message_count": total_unseen_message_count, "unseen_messages": [ (group_chat, latest_message, count) for group_chat, latest_message, count in unseen_messages ], "group_chats_link": urls.messages_link(), }, )
def notin_op(self, query, column, value): return query.filter(not_(column.in_(value)))
def make_admin_trash_panel(self): board = self.board table = model.backup session = model.Session() template_kwargs = {} # List of current threads *and* orphaned posts. threads = [] if str(self.page).startswith('t'): self.page = self.page[1:] sql = table.select().where(and_(or_(table.c.postnum == self.page, table.c.parent == self.page), table.c.board_name == board.name))\ .order_by(table.c.timestampofarchival.desc(), table.c.postnum.asc()) thread = [dict(x.items()) for x in session.execute(sql).fetchall()] if not thread: raise WakaError('Thread not found.') threads = [{'posts': thread}] template_kwargs = { 'postform': board.options['ALLOW_TEXTONLY'] or board.options['ALLOW_IMAGES'], 'image_inp': board.options['ALLOW_IMAGES'], 'textonly_inp': 0, 'threads': threads, 'thread': self.page, 'parent': self.page } elif config.POST_BACKUP: max_res = board.options['IMAGES_PER_PAGE'] sqlcond = and_( or_( table.c.parent == 0, and_( table.c.parent > 0, not_( exists([table.c.num], table.c.parent == table.c.postnum)))), table.c.board_name == board.name) # Acquire the number of full threads *and* orphaned posts. sql = select([func.count()], sqlcond, table)\ .order_by(table.c.timestampofarchival.desc(), table.c.postnum.asc()) thread_ct = session.execute(sql).fetchone()[0] total = int(thread_ct + max_res - 1) / max_res offset = self.page * max_res (pages, prevpage, nextpage) \ = board.get_board_page_data(self.page, total, admin_page='postbackups') last_page = len(pages) - 1 if self.page > last_page and last_page > 0: self.page = last_page sql = table.select().where(sqlcond)\ .order_by(table.c.timestampofarchival.desc(), table.c.num.asc())\ .limit(board.options['IMAGES_PER_PAGE'])\ .offset(offset) threads = [{'posts' : [dict(x.items())]} \ for x in session.execute(sql)] # Loop through 'posts' key in each dictionary in the threads # list. for item in threads: thread = item['posts'] threadnum = thread[0]['postnum'] postcount = imgcount = shownimages = 0 # Orphaned? item['standalone'] = 0 if not thread[0]['parent']: sql = select( [func.count(), func.count(table.c.image)], table.c.parent == threadnum, table) (postcount, imgcount) = session.execute(sql).fetchone() max_res = board.options['REPLIES_PER_THREAD'] offset = postcount - imgcount if postcount > max_res \ else 0 sql = table.select().where(table.c.parent == threadnum)\ .order_by(table.c.timestampofarchival.desc(), table.c.postnum.asc())\ .limit(max_res)\ .offset(offset) thread.extend([dict(x.items()) \ for x in session.execute(sql)]) else: item['standalone'] = 1 for post in thread: image_dir \ = os.path.join(board.path, board.options['IMG_DIR']) thumb_dir \ = os.path.join(board.path, board.options['THUMB_DIR']) base_thumb = os.path.basename(post['thumbnail'] or '') base_image = os.path.basename(post['image'] or '') base_filename = (post['image'] or '')\ .replace(image_dir, '').lstrip('/') backup_dir = os.path.join(board.url, board.options['ARCHIVE_DIR'], board.options['BACKUP_DIR']) if post['image']: post['image'] = os.path.join(backup_dir, base_image) shownimages += 1 if re.match(board.options['THUMB_DIR'], post['thumbnail'] or ''): post['thumbnail'] \ = os.path.join(backup_dir, base_thumb) item['omit'] = postcount - max_res if postcount > max_res\ else 0 item['omitimages'] = imgcount - shownimages \ if imgcount > shownimages else 0 template_kwargs = {'postform' \ : board.options['ALLOW_TEXTONLY'] or board.options['ALLOW_IMAGES'], 'image_inp' : board.options['ALLOW_IMAGES'], 'textonly_inp' \ : board.options['ALLOW_IMAGES'] and board.options['ALLOW_TEXTONLY'], 'nextpage' : nextpage, 'prevpage' : prevpage, 'threads' : threads, 'pages' : pages} Template.__init__(self, 'backup_panel_template', **template_kwargs)
def in_group(cls, group): #pylint: disable=E0213 if group.membership_type == GroupMembershipType.inverted: return not_(cls.excluded_group_user_assocs.any( ExcludedUserGroup.group == group)) else: return cls.group_user_assocs.any(UserGroup.group == group)
def browse(self, id=None): """Generates page for browsing Forms as dictionary entries. Id variable (regex '[0-9]+_(ol|ml)') encodes both the index of the first letter of the words being browsed and the language (object or meta-) being browsed. A first letter index of 1000000 means browse everything. """ # Get OL orthography as an HTML table of links to browse actions OLOrthography = app_globals.defaultOutputOrthography[1] OLOrthographyAsList = OLOrthography.orthographyAsList OLOrthographyX = [ '<a href="%s" %s>%s</a>' % (url(controller='dictionary', action='browse', id=str(OLOrthographyAsList.index(x)) + '_ol', anchor='hl'), 'title="browse by %s character \'%s\'"' % (app_globals.objectLanguageName, h.storageToOutputTranslate( x[0])), h.storageToOutputTranslate(x[0])) for x in OLOrthographyAsList ] c.OLOrthographyTable = h.literal( h.tablify(OLOrthographyX, 14, 'orthographyAsLinks')) # Get ML orthography as an HTML table of links to browse actions MLOrthography = app_globals.metaLanguageOrthography MLOrthographyAsList = MLOrthography.orthographyAsList MLOrthographyX = [ '<a href="%s" %s>%s</a>' % (url(controller='dictionary', action='browse', id=str(MLOrthographyAsList.index(x)) + '_ml', anchor='hl'), 'title="browse by %s character \'%s\'"' % (app_globals.metaLanguageName, x[0]), x[0]) for x in MLOrthographyAsList ] c.MLOrthographyTable = h.literal( h.tablify(MLOrthographyX, 14, 'orthographyAsLinks')) # If there is a valid first-letter index, # build a query and return the appropriate variables. patt = re.compile('^[0-9]+_(ol|ml)$') if id and patt.search(id): headCharIndex = id.split('_')[0] c.languageToSortBy = id.split('_')[1] langToOrth = { 'ol': [ app_globals.storageOrthography[1], OLOrthographyAsList, 'transcription' ], 'ml': [MLOrthography, MLOrthographyAsList, 'gloss'] } orthography = langToOrth[c.languageToSortBy][0] orthographyAsList = langToOrth[c.languageToSortBy][1] try: c.headChar = orthographyAsList[int(headCharIndex)] except IndexError: c.headChar = None wordList_q = meta.Session.query(model.Form) wordList_q = wordList_q.filter( not_(model.Form.transcription.like(u'% %'))) # The default case # Non-empty headChar means a letter was clicked on if id and c.headChar: # filter and sort wordList for object-language-to-metalanguage view if c.languageToSortBy == 'ol': wordList_q = wordList_q.filter( model.Form.transcription.op('regexp')( '^(%s)' % '|'.join(c.headChar))) # existence of supergraphs means we have to filter the query # of Forms whose transcription/gloss begins with a supergraph superGraphs = getSuperGraphs(c.headChar, OLOrthographyAsList) if superGraphs: wordList_q = wordList_q.filter( not_( model.Form.transcription.op('regexp')( '^(%s)' % '|'.join(superGraphs)))) # sort wordList using functions.CustomSorter class c.wordList = wordList_q.all() if c.wordList: cs = h.CustomSorter(orthography) c.wordList = cs.sort(c.wordList) # filter and sort wordList for metalanguage-to-object-metalanguage view elif c.languageToSortBy == 'ml': wordList_q = wordList_q.outerjoin(model.Form.glosses) wordList_q = wordList_q.filter( model.Gloss.gloss.op('regexp')('^(%s)' % '|'.join(c.headChar))) # existence of supergraphs means we have to filter the query # of Forms whose transcription/gloss begins with a supergraph superGraphs = getSuperGraphs(c.headChar, MLOrthographyAsList) if superGraphs: wordList_q = wordList_q.filter( not_( model.Gloss.gloss.op('regexp')( '^(%s)' % '|'.join(superGraphs)))) wordList = wordList_q.all() if wordList: patt = re.compile('^%s' % c.headChar) newWordList = [] for form in wordList: goodGlosses = [ gloss.gloss for gloss in form.glosses if patt.match(gloss.gloss) ] for gg in goodGlosses: newForm = ThinForm(form.id, gg, form.transcription, form.keywords) newWordList.append(newForm) cs = h.CustomSorter(orthography) c.wordList = cs.sort(newWordList) # The special case # id of a million means we are browsing everything! elif id and int(headCharIndex) == 1000000: wordList = wordList_q.all() if c.languageToSortBy == 'ml': newWordList = [] for form in wordList: goodGlosses = [gloss.gloss for gloss in form.glosses] for gg in goodGlosses: newForm = ThinForm(form.id, gg, form.transcription, form.keywords) newWordList.append(newForm) cs = h.CustomSorter(orthography) c.wordList = cs.sort(newWordList) else: cs = h.CustomSorter(orthography) c.wordList = cs.sort(wordList) return render('/derived/dictionary/browse.html')
def build_expression(self, table): return not_(self.filter.build_expression(table))
def test_insert_tables(engine_testaccount): """ Inserts data into tables """ metadata = MetaData() users, addresses = _create_users_addresses_tables( engine_testaccount, metadata) conn = engine_testaccount.connect() try: # inserts data with an implicitly generated id ins = users.insert().values(name='jack', fullname='Jack Jones') results = engine_testaccount.execute(ins) # Note: SQLAlchemy 1.4 changed what ``inserted_primary_key`` returns # a cast is here to make sure the test works with both older and newer # versions assert list(results.inserted_primary_key) == [1,], 'sequence value' results.close() # inserts data with the given id ins = users.insert() conn.execute(ins, id=2, name='wendy', fullname='Wendy Williams') # verify the results s = select([users]) results = conn.execute(s) assert len([row for row in results]) == 2, \ 'number of rows from users table' results.close() # fetchone s = select([users]).order_by('id') results = conn.execute(s) row = results.fetchone() results.close() assert row[2] == 'Jack Jones', 'user name' assert row['fullname'] == 'Jack Jones', "user name by dict" assert row[users.c.fullname] == 'Jack Jones', \ 'user name by Column object' conn.execute(addresses.insert(), [ {'user_id': 1, 'email_address': '*****@*****.**'}, {'user_id': 1, 'email_address': '*****@*****.**'}, {'user_id': 2, 'email_address': '*****@*****.**'}, {'user_id': 2, 'email_address': '*****@*****.**'}, ]) # more records s = select([addresses]) results = conn.execute(s) assert len([row for row in results]) == 4, \ 'number of rows from addresses table' results.close() # select specified column names s = select([users.c.name, users.c.fullname]).order_by('name') results = conn.execute(s) results.fetchone() row = results.fetchone() assert row['name'] == 'wendy', 'name' # join s = select([users, addresses]).where(users.c.id == addresses.c.user_id) results = conn.execute(s) results.fetchone() results.fetchone() results.fetchone() row = results.fetchone() assert row['email_address'] == '*****@*****.**', 'email address' # Operator assert str(users.c.id == addresses.c.user_id) == \ 'users.id = addresses.user_id', 'equal operator' assert str(users.c.id == 7) == 'users.id = :id_1', \ 'equal to a static number' assert str(users.c.name == None) # NOQA assert str(users.c.id + addresses.c.id) == 'users.id + addresses.id', \ 'number + number' assert str(users.c.name + users.c.fullname) == \ 'users.name || users.fullname', 'str + str' # Conjunctions # example 1 obj = and_( users.c.name.like('j%'), users.c.id == addresses.c.user_id, or_( addresses.c.email_address == '*****@*****.**', addresses.c.email_address == '*****@*****.**' ), not_(users.c.id > 5) ) expected_sql = """users.name LIKE :name_1 AND users.id = addresses.user_id AND (addresses.email_address = :email_address_1 OR addresses.email_address = :email_address_2) AND users.id <= :id_1""" assert str(obj) == ''.join(expected_sql.split('\n')), \ "complex condition" # example 2 obj = users.c.name.like('j%') & (users.c.id == addresses.c.user_id) & \ ( (addresses.c.email_address == '*****@*****.**') | (addresses.c.email_address == '*****@*****.**') ) \ & ~(users.c.id > 5) assert str(obj) == ''.join(expected_sql.split('\n')), \ "complex condition using python operators" # example 3 s = select([(users.c.fullname + ", " + addresses.c.email_address). label('title')]). \ where( and_( users.c.id == addresses.c.user_id, users.c.name.between('m', 'z'), or_( addresses.c.email_address.like('*****@*****.**'), addresses.c.email_address.like('*****@*****.**') ) ) ) results = engine_testaccount.execute(s).fetchall() assert results[0][0] == 'Wendy Williams, [email protected]' # Aliases a1 = addresses.alias() a2 = addresses.alias() s = select([users]).where(and_( users.c.id == a1.c.user_id, users.c.id == a2.c.user_id, a1.c.email_address == '*****@*****.**', a2.c.email_address == '*****@*****.**')) results = engine_testaccount.execute(s).fetchone() assert results == (1, 'jack', 'Jack Jones') # Joins assert str(users.join(addresses)) == 'users JOIN addresses ON ' \ 'users.id = addresses.user_id' assert str(users.join(addresses, addresses.c.email_address.like( users.c.name + '%'))) == \ 'users JOIN addresses ' \ 'ON addresses.email_address LIKE users.name || :name_1' s = select([users.c.fullname]).select_from( users.join(addresses, addresses.c.email_address.like(users.c.name + '%'))) results = engine_testaccount.execute(s).fetchall() assert results[1] == ('Jack Jones',) s = select([users.c.fullname]).select_from(users.outerjoin( addresses)).order_by(users.c.fullname) results = engine_testaccount.execute(s).fetchall() assert results[-1] == ('Wendy Williams',) finally: conn.close() # drop tables addresses.drop(engine_testaccount) users.drop(engine_testaccount)
def upgrade(): metadata = sa.MetaData() history_entry = sa.Table('history_entry', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('user_id', sa.Integer, nullable=False), sa.Column('app_id', sa.Integer, nullable=False), sa.Column('name', sa.String(1024), nullable=False), sa.Column('last_played', sa.DateTime, nullable=True), sa.Column('total_hours', sa.Float, nullable=False), sa.Column('session_id', sa.INTEGER)) connection = op.get_bind() to_keep = select([ history_entry.c.user_id, history_entry.c.app_id, history_entry.c.last_played, history_entry.c.total_hours, sa.func.max(history_entry.c.session_id).label('most_recent_session') ]).select_from( history_entry ).group_by( history_entry.c.user_id, history_entry.c.app_id, history_entry.c.last_played, history_entry.c.total_hours ).alias('to_keep') to_delete = history_entry full_query = delete(to_delete).\ where(not_(exists( select([to_keep.c.user_id]).select_from( to_keep ).where( and_( to_keep.c.user_id == to_delete.c.user_id, to_keep.c.app_id == to_delete.c.app_id, to_keep.c.last_played == to_delete.c.last_played, to_keep.c.total_hours == to_delete.c.total_hours, to_keep.c.most_recent_session == to_delete.c.session_id ) ) ))) connection.execute(full_query) # Now clean up some duplicates within each session, from before the update_session table was introduced. to_keep = select([ history_entry.c.user_id, history_entry.c.app_id, history_entry.c.last_played, history_entry.c.total_hours, history_entry.c.session_id, sa.func.max(history_entry.c.id).label('max_id') ]).select_from( history_entry ).group_by( history_entry.c.user_id, history_entry.c.app_id, history_entry.c.last_played, history_entry.c.total_hours, history_entry.c.session_id ).alias('to_keep') full_query = delete(to_delete).\ where(not_(exists( select([to_keep.c.user_id]).select_from( to_keep ).where( and_( to_keep.c.user_id == to_delete.c.user_id, to_keep.c.app_id == to_delete.c.app_id, to_keep.c.last_played == to_delete.c.last_played, to_keep.c.total_hours == to_delete.c.total_hours, to_keep.c.session_id == to_delete.c.session_id, to_keep.c.max_id == to_delete.c.id ) ) ))) connection.execute(full_query)
SKIPPED_PARENTS = sql.exists().where( sql.and_( parent.session_id == SurveyTreeItem.session_id, SurveyTreeItem.depth > parent.depth, SurveyTreeItem.path.like(parent.path + "%"), parent.skip_children == True, # noqa: E712 )) del parent child_node = orm.aliased(SurveyTreeItem) NO_CUSTOM_RISKS_FILTER = sql.not_( sql.and_( SurveyTreeItem.type == "risk", sql.exists( sql.select([Risk.sql_risk_id]).where( sql.and_( Risk.sql_risk_id == SurveyTreeItem.id, Risk.is_custom_risk == True, # noqa: E712 ))), )) RISK_OR_MODULE_WITH_DESCRIPTION_FILTER = sql.or_( SurveyTreeItem.type != "module", SurveyTreeItem.has_description) # Used by tno.euphorie MODULE_WITH_RISK_FILTER = sql.and_( SurveyTreeItem.type == "module", SurveyTreeItem.skip_children == False, # noqa: E712 sql.exists( sql.select([child_node.id]).where( sql.and_(
def filterSearchQuery(searchValuesDict, query, table): """This function takes an SQLAlchemy ORM query object and applies filters to it using the keys and values from the searchValuesDict. The newly filtered query is returned. """ searchTerm1 = escapeUnderscores( removeWhiteSpace(searchValuesDict['searchTerm1'])) searchType1 = searchValuesDict['searchType1'] searchLocation1 = searchValuesDict['searchLocation1'] searchTerm2 = escapeUnderscores( removeWhiteSpace(searchValuesDict['searchTerm2'])) searchType2 = searchValuesDict['searchType2'] searchLocation2 = searchValuesDict['searchLocation2'] # Translate the search terms into the storage orthography, if necessary searchTerm1 = orthoTranslateSearchTerm(searchTerm1, searchLocation1) searchTerm2 = orthoTranslateSearchTerm(searchTerm2, searchLocation2) andOrNot = searchValuesDict['andOrNot'] restrictors = searchValuesDict['restrictors'] dateRestrictors = searchValuesDict['dateRestrictors'] if 'integerRestrictors' in searchValuesDict: integerRestrictors = searchValuesDict['integerRestrictors'] else: integerRestrictors = [] if 'emptyRestrictors' in searchValuesDict: emptyRestrictors = searchValuesDict['emptyRestrictors'] else: emptyRestrictors = [] orderByColumn = searchValuesDict['orderByColumn'] orderByDirection = searchValuesDict['orderByDirection'] # Modify the query object by adding a left outer join to the keyword and # formkeyword tables if appropriate. kwRestrictors = [r for r in restrictors if r['location'] == 'keywords'] if sum([len(r['options']) for r in kwRestrictors]): query = query.outerjoin(model.formkeyword_table, model.Keyword) # Modify the query object by adding a left outer join to the Gloss table # if appropriate. ggRestrictors = [r for r in restrictors if r['location'] == 'glossGrammaticality'] if sum([len(r['options']) for r in ggRestrictors]) or \ (searchTerm1 and searchLocation1 == 'gloss') or \ (searchTerm2 and searchLocation2 == 'gloss'): query = query.outerjoin(model.Form.glosses) # Get the filter condition of the first search statement filterCondition1 = getFilterCondition(searchTerm1, searchType1, searchLocation1, table) # Get the filter condition by coordinating the filter conditions of the two # search statements, if there is a second such statement if searchTerm2: filterCondition2 = getFilterCondition(searchTerm2, searchType2, searchLocation2, table) if andOrNot == 'and_': filterCondition = and_(filterCondition1, filterCondition2) elif andOrNot == 'or_': filterCondition = or_(filterCondition1, filterCondition2) else: filterCondition = and_(filterCondition1, not_(filterCondition2)) else: filterCondition = filterCondition1 query = query.filter(filterCondition) # General restrictors for restrictor in restrictors: if restrictor['options']: query = filterQueryByRestrictor(query, restrictor, table) # Date restrictors for dateRestrictor in dateRestrictors: if dateRestrictor['date']: query = filterQueryByDateRestrictor(query, dateRestrictor, table) # Integer restrictors for integerRestrictor in integerRestrictors: if integerRestrictor['integer']: query = filterQueryByIntegerRestrictor(query, integerRestrictor, table) # Empty restrictors for emptyRestrictor in emptyRestrictors: if emptyRestrictor['relation']: query = filterQueryByEmptyRestrictor(query, emptyRestrictor, table) # Order by query = orderQuery(query, orderByColumn, orderByDirection, table) return query
def test_if_cluster_creates_correct_networks(self): release = Release() release.version = "1111-6.0" release.name = u"release_name_" + str(release.version) release.description = u"release_desc" + str(release.version) release.operating_system = consts.RELEASE_OS.ubuntu release.state = consts.RELEASE_STATES.available release.networks_metadata = self.env.get_default_networks_metadata() release.attributes_metadata = { "editable": { "keystone": { "admin_tenant": "admin" } }, "generated": { "mysql": { "root_password": "" } } } self.db.add(release) self.db.commit() resp = self.app.post( reverse('ClusterCollectionHandler'), jsonutils.dumps({ 'name': 'cluster-name', 'release': release.id, 'net_provider': consts.CLUSTER_NET_PROVIDERS.nova_network, }), headers=self.default_headers ) self.assertEqual(201, resp.status_code) nets = self.db.query(NetworkGroup).filter( not_(NetworkGroup.name == "fuelweb_admin") ).all() obtained = [] for net in nets: obtained.append({ 'release': net.release, 'name': net.name, 'vlan_id': net.vlan_start, 'cidr': net.cidr, 'gateway': net.gateway }) expected = [ { 'release': release.id, 'name': u'public', 'vlan_id': None, 'cidr': '172.16.0.0/24', 'gateway': '172.16.0.1' }, { 'release': release.id, 'name': u'fixed', 'vlan_id': None, 'cidr': None, 'gateway': None }, { 'release': release.id, 'name': u'storage', 'vlan_id': 102, 'cidr': '192.168.1.0/24', 'gateway': None }, { 'release': release.id, 'name': u'management', 'vlan_id': 101, 'cidr': '192.168.0.0/24', 'gateway': None } ] self.assertItemsEqual(expected, obtained)
def authored_by_contacts_and_public(cls, contact_ids): return and_(Share.contact_id.in_(contact_ids), not_(Share.hidden), Share.public)
def select_player_profiles_whitelisted() -> Select: return select(PlayerProfile).join(User).where(not_(PlayerProfile.banned))
def select_player_profiles_by_did_whitelisted(did: int) -> Select: return select(PlayerProfile).join(User).where(User.did == did and not_(PlayerProfile.banned))
def _sanitize_schedule_path(): valid_schedules = sql.select([schedule.c.id]) query = (schedule_path.delete().where( sql.not_(schedule_path.c.schedule_id.in_(valid_schedules)))) op.get_bind().execute(query)
def public_wall_for_contact(cls, contact): return and_(Share.contact_id == contact.id, Share.public, not_(Share.hidden), Post.parent_id == None)