def __init__(self, *args, **kwargs): user = kwargs.pop('user') super(FilterForm, self).__init__(*args, **kwargs) univs = University.objects.order_by('title') sessions = CourseSession.objects.order_by('course__university__title', 'course__title', 'slug') if not user.is_staff: filter_sessions, filter_universities = [], [] for course_key in user.get_access_course_tuples(): filter_sessions.append(Q( course__university__slug=course_key[0], course__slug=course_key[1], slug=course_key[2], )) q = Q(slug=course_key[0]) if q not in filter_universities: filter_universities.append(q) filter_universities = reduce(lambda x, y: or_(x, y), filter_universities, University.objects.none()) filter_sessions = reduce(lambda x, y: or_(x, y), filter_sessions, CourseSession.objects.none()) univs = univs.filter(filter_universities) sessions = sessions.filter(filter_sessions) self.UNIV_CHOICES = [('', _(u'Все'))] + list(univs.values_list('slug', 'title')) self.SESSION_CHOICES = [('', _(u'Все'))] + [ (i[0], u'%(univ)s %(course)s %(session)s' % {'univ': i[1], 'course': i[2], 'session': i[3]}) for i in sessions.values_list('id', 'course__university__title', 'course__title', 'slug') ] self.fields['university'].choices = self.UNIV_CHOICES self.fields['session'].choices = self.SESSION_CHOICES
def sync_exceeded_traffic_limits(): """Adds and removes memberships of the 'traffic_limit_exceeded group.' """ processor = User.q.get(0) # Add memberships users = User.q.join(User._current_traffic_balance) \ .filter(CurrentTrafficBalance.amount < 0) \ .except_(User.q.join(User.current_properties)\ .filter(or_(CurrentProperty.property_name == 'traffic_limit_disabled',\ CurrentProperty.property_name == 'traffic_limit_exceeded')))\ .all() for user in users: make_member_of(user, config.traffic_limit_exceeded_group, processor, closed(session.utcnow(), None)) print("Traffic-Limit exceeded: " + user.name) # End memberships users = User.q.join(User.current_properties) \ .filter(CurrentProperty.property_name == 'traffic_limit_exceeded') \ .join(User._current_traffic_balance) \ .filter(or_(CurrentTrafficBalance.amount >= 0, CurrentProperty.property_name == 'traffic_limit_disabled')) \ .all() for user in users: remove_member_of(user, config.traffic_limit_exceeded_group, processor, closed(session.utcnow(), None)) print("Traffic-Limit no longer exceeded: " + user.name)
def get_queryset(self): current_date = self.get_requested_date() user = get_object_or_404(User, username=self.kwargs['username']) qs = [] for course_key in user.get_access_course_tuples(): qs.append(Q( course__university__slug=course_key[0], course__slug=course_key[1], slug=course_key[2], )) if qs: qs = reduce(lambda x, y: or_(x, y), qs) else: raise Http404 session_ids = CourseSession.objects.filter(qs).values_list('id', flat=True) q = super(TeacherCoursesRatingByDateView, self).get_queryset() q = q.filter( declined=False, status='published', session__id__in=session_ids, session__rating_enabled=True, updated_at__gte=timezone.datetime.combine(current_date, timezone.datetime.min.time()), updated_at__lte=timezone.datetime.combine(current_date, timezone.datetime.max.time()) ) return q
def validate_ordinal_id_suffix(number, suffix, lineno=None): '''Check if the given number and suffix can be combined to a *senseful* ordinal number. Examples: `1th` is not a senseful ordinal identifier, so ``validate_ordinal_id_suffix('1', 'th') will return ``False``, whereas `2nd` is a senseful ordinal identifier, so validate_ordinal_id_suffix('2', 'nd') will return ``True``. Keep in mind that if you use these parameters to call the function ``validate_ordinal_identifier``, it will return ``True`` for both versions. ''' num = int(number) if suffix == 'st': if not (num != 11 and number.endswith('1')): raise syntax_errors.NonMatchingSuffixError(num, suffix) elif suffix == 'nd': if not (num != 12 and number.endswith('2')): raise syntax_errors.NonMatchingSuffixError(num, suffix) elif suffix == 'rd': if not (num != 13 and number.endswith('3')): raise syntax_errors.NonMatchingSuffixError(num, suffix) else: last_digit = number[-1] assert suffix == 'th' if not or_( num in (11, 12, 13), last_digit not in set(['1', '2', '3'])): raise syntax_errors.NonMatchingSuffixError(num, suffix)
def build_condition_list(self, materialVariable): self.condition_list = [] #empty the condition list dm = 1e-6 for node in self.nodes(): #Loop through nodes of graph for otherNode in self[node].keys(): #loop through all egdes from a given node #if node < otherNode: #this returns true for all particles with materialIndex == node (direct comparison isn't supported) checkFrom = operator.and_((materialVariable > (node - dm) ), (materialVariable < (node + dm) )) condIt = 0 for cond in self[node][otherNode].keys(): #loop through all conditions attached to the graph edge op = self[node][otherNode][cond]['operator'] # fun = self[node][otherNode][cond]['function'] #{extract function, operator, value} val = self[node][otherNode][cond]['value'] # condExp = op(fun, val) #Now provide the function & value to the operator, return result as a variable if condIt == 0: totCond = condExp #if this is the first condition, assign to totCond else: #if this is the NOT first condition, combine conditin with previous totCond (using AND or OR) if self[node][otherNode].values()[0]['combineby'] == 'or': totCond = operator.or_(totCond, condExp) else: totCond = operator.and_(totCond, condExp) condIt += 1 #When we pass this on to fn.branching.conditional, we only want to apply it to paticles where # matIndex == node, which occurs where checkFrom == True, 1 combCond = operator.and_(totCond, checkFrom) #combCond = totCond self.condition_list.append(((combCond), otherNode)) self.condition_list.append((True , materialVariable)) #if no conditions are true, return current matId
def buildSWField(self, pSheet, pRow): # Looks through the row given in pRow and constructs a # XLSToSWField object that contains the data, returns the field record. lUsed = pSheet.cell_value(pRow, SSHEETCOLUMN_MAPFIELD_P) if operator.or_(lUsed.lower() == 'no', lUsed.lower() == 'no-temporary'): raise XLSToSWExceptions.FieldNotMapped( repr(pSheet) + ':' + repr(pRow)) lFieldDefaultValue = '' lClassName = pSheet.cell_value(pRow, SSHEETCOLUMN_PNITABLENAME).strip() lFieldName = pSheet.cell_value(pRow, SSHEETCOLUMN_PNIATTRIBUTENAME).strip() lFieldExternalName = pSheet.cell_value( pRow, SSHEETCOLUMN_PNITABLEEXTERNALNAME).strip() lFieldType = pSheet.cell_value(pRow, SSHEETCOLUMN_PNIATTRIBUTETYPE).strip() lFieldDefaultValue = pSheet.cell_value( pRow, SSHEETCOLUMN_PNIATTRIBUTEDEFAULTVALUE) lFieldLength = pSheet.cell_value(pRow, SSHEETCOLUMN_PNIATTRIBUTELENGTH) lFieldPriority = pSheet.cell_value(pRow, SSHEETCOLUMN_PNIATTRIBUTEPRIORITY) lFieldText = self.buildSWFieldComment(pSheet, pRow) lFieldFromTable = pSheet.cell_value( pRow, SSHEETCOLUMN_FOREIGNTABLENAME).strip() lFieldFromField = pSheet.cell_value( pRow, SSHEETCOLUMN_FOREIGNATTRIBUTENAME).strip() lFeaturePoint = pSheet.cell_value( pRow, SSHEETCOLUMN_FEATUREPOINTDESCRIPTION) if operator.and_(lFeaturePoint != "", self.s_show_features_p == True): print("----------Feature " + repr(lFeaturePoint)) lField = XLSToSWField.XLSToSWField(lClassName, lFieldName, lFieldType) lField.s_field_external_name = lFieldExternalName if lFieldLength != '': lField.s_field_length = lFieldLength if lFieldPriority != '': lField.s_field_priority = lFieldPriority if lFieldText != '': lField.s_field_comment = lFieldText if lFieldDefaultValue != '': lField.s_field_default_value = lFieldDefaultValue if lField.fieldType().lower() == "join": lField.s_field_join_type = pSheet.cell_value( pRow, SSHEETCOLUMN_PNIJOINTYPE) lField.s_field_join_to = pSheet.cell_value(pRow, SSHEETCOLUMN_PNIJOINTO) print("is a valid join " + repr(lField.isValidJoin())) if lField.isValidJoin() == False: print("found an invalid join ") if lFieldFromTable != '': lField.s_field_from_table = lFieldFromTable if lFieldFromField != '': lField.s_field_from_field = lFieldFromField lField.showMe() return lField
def get_all_data(group, discipline): lessons = ( Lesson.query.filter(Lesson.group_id == group.id) .filter(Lesson.discipline_id == discipline.id) .order_by(Lesson.date, Lesson.id) .all() ) students = ( group.students.outerjoin(Student.marks) .options(contains_eager(Student.marks)) .filter(or_(Mark.lesson_id == None, Mark.lesson_id.in_([i.id for i in lessons]))) .all() ) labs = discipline.labs students_info = {} for student in students: student_info = {"marks": {}, "tasks": {}, "points": 0, "percents": 0} students_info[student.id] = student_info for mark in student.marks: student_info["marks"][mark.lesson_id] = mark student_info["points"], student_info["percents"] = student.points( student_info["marks"], lessons, sum([len(lab.tasks) for lab in labs if lab.regular and lab.visible]), len(student_info["tasks"]), ) return {"students": students, "lessons": lessons, "labs": labs, "students_info": students_info}
def piecewise_vec(x, data): r = np.zeros(len(x)) for i in xrange(len(data) - 1): cond = operator.and_(data[i][1] <= x, x < data[i + 1][1]) cond = operator.or_(cond, x == data[-1][1]) r[cond] = data[i][0] return r
def piecewise_vec(x, data): r = np.zeros(len(x)) for i in xrange(len(data) - 1): cond = operator.and_(data[i][1] <= x, x < data[i + 1][1]) # catch values equal to the largest upper limit cond = operator.or_(cond, x == data[-1][1]) r[cond] = data[i][0] return r
def user_by_username_or_email(username_or_email): """ Retrieves user by username or email. Returns: user. Raises: NoResultFound if user with this username or email doesn't exist. """ value_lower = username_or_email.lower() return dbs().query(User).filter(or_(User.email_lower==value_lower, User.username_lower==value_lower)).one()
def merge_option(self, colors, styles): """merge attributes :param curses color_pair object colors: color attribute from config file :param dict styles: style attribute from config file """ for k, v in styles: if v == 'True': colors = or_(colors, STYLE.get(k)) return colors
def exec_msg(self, buttonnames=None, **kwds): '''Execs a QtGui.QMessageBox from the given widget''' kwds.setdefault('parent', self.app.splashwindow) if buttonnames is not None: generator = (getattr(QtGui.QMessageBox, i) for i in buttonnames) kwds.update(standardButtons=op.or_(*generator)) popup = QtGui.QMessageBox(**kwds) return popup.exec_()
def get_filter_for_user(self): user = self.request.user if user.is_staff: return [] qs = [] for course_key in user.get_access_course_tuples(): qs.append(Q( session__course__university__slug=course_key[0], session__course__slug=course_key[1], session__slug=course_key[2], )) return reduce(lambda x, y: or_(x, y), qs, CourseStudentFeedback.objects.none())
def products_from_subcategory(page): try: products = page.category.filters() except Category.DoesNotExist: return for child_category in page.category.children.published(): try: child_category.category # try downcast to category model products = operator.or_(products, products_from_subcategory(child_category)) except Category.DoesNotExist: pass return products
def _prepare_query(self, only_passed=False, only_planned=False, **kw): query = db.query(Transaction).options( subqueryload(Transaction.incomeTagGroup).subqueryload(TagGroup.tags), subqueryload(Transaction.expenseTagGroup).subqueryload(TagGroup.tags) ).with_parent(request.identity["user"]) if only_planned: query = query.filter(or_(Transaction.date == None, Transaction.date > datetime.utcnow())) if only_passed: query = query.filter(Transaction.date <= datetime.utcnow()) return query
def list_volunteers(session, volunteer_id=None): return session.query(Volunteer.id.label("ID"), Volunteer.first_name.label('firstName'), Volunteer.last_name.label('lastName'), Volunteer.email.label('email'), Volunteer.mobile_number.label('mobileNo'), Volunteer.preferred_hours.label('prefHours'), Volunteer.experience_years.label('expYears'), Volunteer.possibleRoles.label('possibleRoles'), Volunteer.qualifications.label('qualifications'), Volunteer.availabilities.label('availabilities'))\ .filter(or_(Volunteer.id == volunteer_id, volunteer_id == None))\ .all()
def accept(self, content_name): try: name = content_name.split('/')[2:-1] except: return False ret = False for component in name: f = urllib2.urlopen('http://127.0.0.1:4000/?name={0}'.format(urllib.quote_plus(component))) res = f.read(100) ret = operator.or_(ret, res == 'True') if ret: break return ret
def get(self, page=0, pre_page=None): filters = [Access.status == ACCESS.APPROVAL] if not session.is_admin() and not session.in_group(DEFAULT_GROUP.OP): filters.append( or_(Access.apply_uid == session.get_uid(), Access.copy_to_uid.like("%" + session.get_uid() + "%"))) return self.succ( Access.select(page=page, pre_page=pre_page, _filters=filters, _orders=Access.apply_time.desc()))
def search_info(): if request.method == 'POST': area_id = request.form.get('aid') start_time = request.form.get('sd') end_time = request.form.get('ed') if not start_time: start_time = date.today().strftime("%Y-%m-%d") sk = request.form.get('sk') # p = request.form.get('p') if end_time: start_date = datetime.strptime(start_time, '%Y-%m-%d') end_date = datetime.strptime(end_time, "%Y-%m-%d") differ_days = int((end_date - start_date).days) # 如果用户输入有开始和结束时间则显示数据库该分段的信息 houses_first = House.query.filter( or_( and_(House.min_days <= differ_days, House.max_days >= differ_days), and_(House.min_days <= differ_days, House.max_days == 0))) else: # 用户没有输入结束时间则显示所有信息 houses_first = House.query.filter() if area_id: # 如果用户选择了区域信息 houses_second = houses_first.filter(House.area_id == int(area_id)) else: # 没有选择 houses_second = houses_first # 过滤订单中的房源:待支付和已支付状态 orders_filter = Order.query.filter( Order.status.in_(["WAIT_PAYMENT", "PAID", "WAIT_ACCEPT"])).all() # 去重房源id,因为过滤条件为待支付和已置夫,但还有完成后的订单记录,所以需要去重 orders_filter_ids = set( [order_filter.house_id for order_filter in orders_filter]) houses_third = houses_second.filter(~House.id.in_(orders_filter_ids)) # 排序 # 新房源 if sk == 'new': houses_final = houses_third.order_by(desc('update_time')) # 入住最多 elif sk == 'booking': houses_final = houses_third.order_by(desc('order_count')) # 价格由高-低 elif sk == 'price-inc': houses_final = houses_third.order_by('price') # 价格由低到高 else: houses_final = houses_third.order_by(desc('price')) houses = houses_final.all() houses_info = [house.to_full_dict() for house in houses] return jsonify(houses_info)
def add_rule(self, name, left, right, rate_forward, rate_reverse=None, *args, **kwargs): if len(left) == 0: left = None else: left = sum(left[1: ], left[0]) if len(right) == 0: right = None else: right = sum(right[1: ], right[0]) if rate_reverse is None: rule = operator.rshift(left, right) else: rule = operator.or_(left, right) return self.__add_rule(name, rule, rate_forward, rate_reverse, *args, **kwargs)
def sat_comp(x, t): # Split arg1 U_[lo, hi] arg2 = # G_[0, lo] arg1 and F_[lo, hi] untimed_until untimed_until = np.array([ or_(bitarray.all(f1(x, get_times(x, tau, lo, hi))), f2(x, get_times(x, tau, hi, hi))) for tau in t ]) # F_[lo, hi] untimed_until f_uu = bitarray(untimed_until[list(get_times(x, tau, lo, hi))].any() for tau in t) # G_[0, lo] arg1 g_arg1 = bitarray( bitarray.all(f1(x, get_times(x, tau, lo, hi))) for tau in t) return and_(g_arg1, f_uu)
def extract_masked_phases(anmsname, ant2, scan_number): correlation = 0 tb.open(anmsname) t = tb.query("DATA_DESC_ID=0 AND ANTENNA1=0 AND ANTENNA2={} AND SCAN_NUMBER={}" "".format(ant2, scan_number)) data = t.getcol('CORRECTED_DATA')[correlation] f = t.getcol("FLAG")[correlation] fr = t.getcol("FLAG_ROW") times = t.getcol('TIME') flags = operator.or_(f, fr) # ? ma.masked_invalid() phi = ma.masked_array(180/np.pi*np.angle(data), mask=flags) return phi, times
def accept(self, content_name): try: name = content_name.split('/')[2:-1] except: return False ret = False for component in name: f = urllib2.urlopen('http://127.0.0.1:4000/?name={0}'.format( urllib.quote_plus(component))) res = f.read(100) ret = operator.or_(ret, res == 'True') if ret: break return ret
def atang(xy): den = xy[0]**(-1) alpha = np.arctan(xy[1] * den) * 180.0 / pi import operator condition1 = operator.and_(xy[0] < 0.00, xy[1] < 0.0) condition2 = operator.and_(xy[0] > 0.0, xy[1] > 0.0) condition3 = operator.and_(xy[0] > 0.0, xy[1] < 0.0) condition4 = operator.and_(xy[0] < 0.0, xy[1] > 0.0) condition23 = operator.or_(condition2, condition3) r = np.where( condition4, alpha + 180.0, np.where(condition23, alpha, np.where(condition1, alpha - 180.0, 0.0))) return r
def atang(xy): den = xy[0]**(-1) alpha = np.arctan(xy[1]*den)*180.0/pi import operator condition1 = operator.and_(xy[0] < 0.00, xy[1] < 0.0) condition2 = operator.and_(xy[0] > 0.0, xy[1] > 0.0) condition3 = operator.and_(xy[0] > 0.0, xy[1] < 0.0) condition4 = operator.and_(xy[0] < 0.0, xy[1] > 0.0) condition23 =operator.or_(condition2, condition3) r = np.where(condition4, alpha + 180.0, np.where(condition23, alpha, np.where(condition1, alpha - 180.0, 0.0))) return r
def atividadeLista(): if request.form.get('submitFiltro') == 'submitFiltro': filtro = request.form.get('txtFiltro') if filtro is None or filtro == "": atividadeLista = Atividade.query.all() else: atividadeLista = Atividade.query.filter( or_(Atividade.id.like('%' + filtro + '%'), Atividade.descricao.like('%' + filtro + '%'))) else: atividadeLista = Atividade.query.all() return render_template('atividade/atividadeCadastroLista.html', atividadeLista=atividadeLista)
def rate_users_df(dataframe): with open('./recommend_system/api/params.json') as file_read_params: json_params = json.load(file_read_params) MIN_VISITS_INTEREST = json_params.get('MIN_VISITS_INTEREST') NB_VISITS_INTEREST = json_params.get('NB_VISITS_INTEREST') dataframe = dataframe.assign(Rating = lambda x: 0) condition2 = operator.and_(dataframe['Visit_count'] > MIN_VISITS_INTEREST -1 , dataframe['Visit_count'] < NB_VISITS_INTEREST) condition3 = operator.or_(dataframe['Print_project_file'] == 1, dataframe['Visit_count'] > NB_VISITS_INTEREST-1) condition4 = operator.and_(dataframe['Behavior_interesting'] == 1, condition2) dataframe['Rating'].mask(condition4, 1, inplace=True) dataframe['Rating'].mask(condition3, 1, inplace=True) return dataframe
def login(): if current_user.is_authenticated: flash('您已经登录') return redirect(url_for('main.index')) form = LoginForm() if form.validate_on_submit(): username_or_email = form.username_or_email.data user = User.query.filter( or_(User.username == username_or_email, User.email == username_or_email)).first() if user is not None and user.verify_password(form.password.data): login_user(user, form.remember_me.data) return redirect(request.args.get('next') or url_for('main.index')) flash('用户名或密码无效.') return render_template('auth/login.html', form=form)
def get(self, page=0, pre_page=None): filters = [Access.status == ACCESS.APPROVAL] if not session.is_admin() and not session.in_group(DEFAULT_GROUP.OP): filters.append(or_( Access.apply_uid == session.get_uid(), Access.copy_to_uid.like("%" + session.get_uid() + "%") )) return self.succ(Access.select( page=page, pre_page=pre_page, _filters=filters, _orders=Access.apply_time.desc() ))
def search(session, query=None, limit=250, corpus=None, **kwargs): sql = session.query(Document) if corpus is not None and corpus != '--all--': sql = sql.join(Document.corpora).filter(Corpus.id == int(corpus)) if query == '' or query is None: sql = sql.limit(limit) else: sql = sql.join(Author).filter( or_( Document.text.match(query), Author.name.match(query) ) ).limit(limit) messages = sql.all() return messages
def user_list(): """ 会员列表 """ page = request.args.get('page', 1, type=int) keyword = request.args.get('keyword', '', type=str) if keyword: # 根据姓名或者邮箱查询 filters = or_(User.username == keyword, User.email == keyword) # 或 page_data = User.query.filter(filters).order_by( User.addtime.desc()).paginate(page=page, per_page=5) else: page_data = User.query.order_by(User.addtime.desc()).paginate( page=page, per_page=5) return render_template("admin/user_list.html", page_data=page_data)
def get_experiments(cls, id_or_name=None, current_dir=None): logger.debug("Get experiments") id_or_name = '' if not id_or_name else id_or_name with session_scope() as session: experiments = session.query(Experiment)\ .filter(or_(Experiment.exp_id.like('%%%s%%' % id_or_name), Experiment.exp_name.like('%%%s%%' % id_or_name))) \ .options(joinedload('simulations').joinedload('experiment')) if current_dir: experiments = experiments.filter( Experiment.working_directory == current_dir) experiments = experiments.all() session.expunge_all() return experiments
def search_results(query, page=1): mark = PostMark.query.filter_by(content=query).first() if mark is None: mark = PostMark(content=query, num=1) db.session.add(mark) db.session.commit() else: mark.num = mark.num + 1 db.session.commit() results = Post.query.whoosh_search(query, MAX_SEARCH_RESULTS)\ .filter(or_(Post.open == OPEN, Post.user_id == g.user.id)).order_by(Post.timestamp.desc())\ .paginate(page, POSTS_PER_PAGE, False) return render_template('pages/post/search_results.html', query=query, results=results)
def pencalc(keys): penalty = 0 for i in keys: subject = [evals[i + 1], evals[i + 2]] print(subject) cond = (operator.or_(*[isinstance(j, float) for j in subject]) and (any(i in pset.arguments for i in subject))) print(cond) if (cond == True): if ((subject[0] in pset.arguments) and isinstance(subject[1], float)): penalty = penalty + 10 penalty = penalty + 10 return penalty
def funcionarioLista(): funcionarioLista = Funcionario.query.all() if request.form.get('submitFiltro') == 'submitFiltro': filtro = request.form.get('txtFiltro') if filtro is None or filtro == "": funcionarioLista = Funcionario.query.all() else: funcionarioLista = Funcionario.query.filter( or_(Funcionario.matricula.like('%' + filtro + '%'), Funcionario.nome.like('%' + filtro + '%'))) else: funcionarioLista = Funcionario.query.all() return render_template('funcionario/funcionarioCadastroLista.html', funcionarioLista=funcionarioLista)
def clienteLista(): clienteLista = Cliente.query.all() if request.form.get('submitFiltro') == 'submitFiltro': filtro = request.form.get('txtFiltro') if filtro is None or filtro == "": clienteLista = Cliente.query.all() else: clienteLista = Cliente.query.filter( or_(Cliente.cpf_cnpj.like('%' + filtro + '%'), Cliente.nome.like('%' + filtro + '%'))) else: clienteLista = Cliente.query.all() return render_template('cliente/clienteCadastroLista.html', clienteLista=clienteLista)
def get_sort(self, default=None, allowed_fields=None): pfields = ((isinstance(request.query_params.sort_by, str) and request.query_params.sort_by) or default or self.default_sort).split(',') rv = [] allowed_fields = allowed_fields or self._sortable_dict for pfield in pfields: asc = True if pfield.startswith('-'): pfield = pfield[1:] asc = False field = allowed_fields.get(pfield) if not field: continue rv.append(field if asc else ~field) return reduce(lambda a, b: operator.or_(a, b) if a and b else None, rv)
def refresh(self, tables: I2B2Tables, ignore_upload_id: Optional[int]) -> int: """ Update the next number to the last used number plus one :param tables: database tables link :param ignore_upload_id: If present, ignore this id as it will be deleted :return: next available number """ session = sessionmaker(bind=tables.crc_engine)() q = func.max(tables.patient_dimension.c.patient_num) if ignore_upload_id is not None: q = q.filter(or_(tables.patient_dimension.c.upload_id.is_(None), tables.patient_dimension.c.upload_id != ignore_upload_id)) qr = session.query(q).all() self._next_number = qr[0][0] + 1 if qr else 1 session.close() return self._next_number
def get_queryset(self): profiles = Profile.objects.all() # If search terms in request, split each word and search for them # in name & institution if self.q: qs = ~Q(pk=None) # always true search_terms = list(filter(None, self.q.split(' '))) for st in search_terms: qs = and_( or_(Q(name__icontains=st), Q(institution__icontains=st)), qs) profiles = profiles.filter(qs) return profiles
def searchkey(): # 查询所有的文章 key = request.args.get("key") page = request.args.get("pages", 1) per_page = request.args.get("per_page", 5) artspg = Article.query.filter( or_(Article.title.contains(key), Article.text.contains(key))).paginate(int(page), int(per_page), False).items arts_list = [art.to_dict() for art in artspg] data = { "arts": arts_list, } return jsonify(data)
def locally_register_new_user(user, activated=False): from syllabus.models.user import User, UserAlreadyExists user.activated = activated user.right = None existing_user = User.query.filter( or_(User.email == user.email, User.username == user.username)).first() if existing_user is not None: exception = UserAlreadyExists( "tried to create user {} while user {} already exists".format( user.to_dict, existing_user.to_dict())) if existing_user.email == user.email: exception.reason = "email" else: exception.reason = "username" raise exception db_session.add(user) db_session.commit()
def get_search_queryset(queryset, search_fields, search_term): # Based on the Django app search functionality found in the # function get_search_results of django/contrib/admin/options.py. if search_term: orm_lookups = [ search_field + '__icontains' for search_field in search_fields ] for bit in search_term.split(): or_queries = [ models.Q(**{orm_lookup: bit}) for orm_lookup in orm_lookups ] statement = models.Q() for or_query in or_queries: statement = operator.or_(or_query, statement) queryset = queryset.filter(statement) return queryset
def meusprojetos(): user = User.query.filter_by(email=session["session_name"]).first_or_404() funcionario = Funcionario.query.filter( Funcionario.matricula == user.id).first() sqlCordenador = db.session.query(FuncionarioProjeto.id).filter( FuncionarioProjeto.funcionario_id == funcionario.id, FuncionarioProjeto.cordenador == 1).subquery() minhasHoras = LancamentoHoras.query.join(LancamentoHoras.projetos).join(LancamentoHoras.atividades).join(LancamentoHoras.funcionarios).\ filter(or_(LancamentoHoras.id.in_((sqlCordenador)), LancamentoHoras.funcionario_id == funcionario.id)) meusProjetos = LancamentoHoras.query.join(LancamentoHoras.projetos) return render_template('relatorio/relatorioMeusProjetos.html', meusProjetos=minhasHoras)
def make_expression( filters: FILTERS_TYPE, restrict_columns: t.List[str] = None ) -> pds.Expression: """ Predicates are expressed in disjunctive normal form (DNF), like [[('x', '=', 0), ...], ...] list of tuples act like and expression list of lists act like or expression todo: dont know what happens with list of lista and tuples _expression = operator.and_(_expression, _exp) """ # ---------------------------------------------------- 01 # validate e.validation.ShouldBeInstanceOf( value=filters, value_types=(list, ), msgs=["Was expecting list type for filters"] ).raise_if_failed() # ---------------------------------------------------- 02 # loop _ret_exp = None for _filter in filters: if isinstance(_filter, list): _exp = make_expression(_filter) if _ret_exp is None: _ret_exp = _exp else: _ret_exp = operator.or_(_ret_exp, _exp) elif isinstance(_filter, Filter): if bool(restrict_columns): e.validation.ShouldBeOneOf( value=_filter.column, values=restrict_columns, msgs=["You should use one of restricted columns ..."] ).raise_if_failed() _exp = _filter.expression if _ret_exp is None: _ret_exp = _exp else: _ret_exp = operator.and_(_ret_exp, _exp) else: raise e.code.ShouldNeverHappen(msgs=[f"Unknown type {type(_filter)}"]) # ---------------------------------------------------- 03 # return return _ret_exp
def get_messages_by_query(self, evenement_id: str, restricted_group_id: str, tag_ids: List[str]) -> MessagesList: query = self.session.query(self.entity_type) if evenement_id: query = query.filter(self.entity_type.evenement_id == evenement_id) if restricted_group_id: query = query.outerjoin(GroupEntity, MessageEntity.restricted_to).\ filter(or_(GroupEntity.uuid == restricted_group_id, GroupEntity.uuid==None)) if tag_ids: current_app.logger.info(f"tag_ids {tag_ids}") query = query.join(TagEntity, self.entity_type.tags).filter( TagEntity.uuid.in_(tag_ids)) matches = query.all() return matches
def verify_password(email_or_token, password): if email_or_token == '': return False if password == '': g.current_user = User.verify_auth_token(email_or_token) g.token_used = True return g.current_user is not None # 这里邮箱/用户名登录 # user = User.query.filter_by(email=email_or_token).first() user = User.query.filter( or_(User.username == email_or_token, User.email == email_or_token)).first() if not user: return False g.current_user = user g.token_used = False return user.verify_password(password)
def auto_mark_as_used_after_event() -> None: """Automatically mark as used bookings that correspond to events that have happened (with a delay). """ if not FeatureToggle.UPDATE_BOOKING_USED.is_active(): raise ValueError("This function is behind a deactivated feature flag.") now = datetime.datetime.now() threshold = now - constants.AUTO_USE_AFTER_EVENT_TIME_DELAY # fmt: off bookings = ( Booking.query .filter_by(isUsed=False, isCancelled=False) .filter(Stock.id == Booking.stockId) .filter(Stock.beginningDatetime < threshold) ) individual_bookings = ( bookings .filter(Booking.educationalBookingId == None) ) educational_bookings = ( bookings .filter(EducationalBooking.id == Booking.educationalBookingId) .filter(or_(EducationalBooking.status != EducationalBookingStatus.REFUSED, EducationalBooking.status.is_(None))) ) # fmt: on n_individual_updated = individual_bookings.update( {"isUsed": True, "status": BookingStatus.USED, "dateUsed": now}, synchronize_session=False ) db.session.commit() n_educational_updated = educational_bookings.update( {"isUsed": True, "status": BookingStatus.USED, "dateUsed": now}, synchronize_session=False ) db.session.commit() logger.info( "Automatically marked bookings as used after event", extra={ "individualBookingsUpdatedCount": n_individual_updated, "educationalBookingsUpdatedCount": n_educational_updated, }, )
def ecrire_texte(image, texte): """ Fonction qui permet d'écrire le texte dans les quartets R après le quartet N°7 (donc au 9ieme quartet) le 8 premiers qurtets sont réservés à la taille du texte """ N = 8 texte = "La prise du batiment se fera demain à 16h avec toutes les équipes!!!" liste_lettres = [] for i in texte: a0 = operator.and_(ord(i), 0b1111) # bits de poids faible de la lettre a1 = operator.rshift(operator.and_(ord(i), 0b11110000), 4) # bits de poids forts de la lettre liste_lettres.append(a1) liste_lettres.append(a0) print(liste_lettres) print(len(liste_lettres)) # ecrire les résultat dans les 4 bits de poids faible de cahque R de chaque pixel taille_liste_lettres = len(liste_lettres) print("liste lettre :", taille_liste_lettres) ############# Mise à zeros des quartets de poids faible de r cpt = 0 for y in range(1, 512): for x in range(0, 512): if cpt == taille_liste_lettres * 2 - 1: break r, g, b = image.getpixel((x, y)) r = operator.and_(r, 0b11110000) image.putpixel((x, y), (r, g, b)) cpt += 1 ####################### ecriture dans les quartets de poids faibles de R des lettres du texte cpt = 0 for y in range(1, 512): for x in range(0, 512): #print(x,y) if cpt == taille_liste_lettres: break r, g, b = image.getpixel((x, y)) r = operator.or_(liste_lettres[cpt], r) image.putpixel((x, y), (r, g, b)) cpt += 1 return image
def add_transition(self, nodes, function, FnOperator, value, combineby = 'and'): """ Function that allows you to easily add material transitions in Underworld simulations. This function creates graph 'nodes', representing the two materials. It also provided a simple way of decribing the rules determining the transision process Parameters ---------- nodes : Tuple (a,b) represents the possibility of a transition from material a to material b function: underworld.function._function.Function (could also be a constant) nOperator: operator operators must be provided in function form through the operator package, eg. operator.gt(1., 2.). Only less than and greater than operators are supported. value: float the value will be compared to the providided function, given the provided operator combineby: string 'and' or 'or', defaults to 'and'. If multiple rules are provided for a single edge in the graph (representing the material transition) then they be applied in the sense of any ('or'), or all ('and') """ #only greater than or less than comparisons are supported for conditons if not operator.or_(FnOperator.__name__ == ('lt'), FnOperator.__name__ == ('gt')): raise AssertionError("FnOperator must be either operator.lt or operator.gt", FnOperator) firstEdge = True try: self[nodes[0]][nodes[1]] #see if the node exists #get names of previous condition dict: prevdname = self[nodes[0]][nodes[1]].keys()[0] firstEdge = False except: self.add_node(nodes[0]) self.add_node(nodes[1]) self.add_edges_from([nodes]) #create a random name for dictionary (we need to have a different key for each condition on the graph edge) dname = uuid.uuid4() self[nodes[0]][nodes[1]][dname] = {} self[nodes[0]][nodes[1]][dname]['function'] = function self[nodes[0]][nodes[1]][dname]['operator'] = FnOperator self[nodes[0]][nodes[1]][dname]['value'] = value self[nodes[0]][nodes[1]][dname]['combineby'] = 'and' if combineby == 'or': self[nodes[0]][nodes[1]][dname]['combineby'] = 'or' if not firstEdge: assert self[nodes[0]][nodes[1]][dname]['combineby'] == self[nodes[0]][nodes[1]][prevdname]['combineby'], "if the graph has multiple conditions on an edge, provided 'combineby' string must be identical to avoid ambiguity."
def board_article_list(request, board_name=None, page_num=1): if board_name is None: raise PermissionDenied() try: board = Board.objects.get(name=board_name) except: raise Http404() search_type = request.GET.get('search_type', '') query = request.GET.get('query', '') is_search = True if search_type == 'title': articles = Article.objects.filter(board=board, deleted=False, is_important=False, title__contains=query).order_by('-id') elif search_type == 'content': articles = Article.objects.filter(board=board, deleted=False, is_important=False, content__contains=query).order_by('-id') elif search_type == 'title.content': q = operator.and_(operator.or_(Q(title__contains=query), Q(content__contains=query)), Q(board=board, deleted=False, is_important=False)) articles = Article.objects.filter(q).order_by('-id') elif search_type == 'username' and not board.is_anonymous: articles = Article.objects.filter(board=board, deleted=False, is_important=False, user__username=query).order_by('-id') elif search_type == 'name' and not board.is_anonymous: articles = Article.objects.filter(board=board, deleted=False, is_important=False, writer_name__contains=query).order_by('-id') else: articles = Article.objects.filter(board=board, deleted=False, is_important=False).order_by('-id') is_search = False count_per_page = board.article_per_page total_page_num = max((articles.count()-1)/count_per_page+1,1) page_num = int(page_num) if page_num < 1 or page_num > total_page_num: raise Http404() start = (page_num-1)*count_per_page; end = page_num*count_per_page articles = articles[start:end] page_nums = get_page_nums(total_page_num, 5, page_num) important_articles = Article.objects.filter(board=board, deleted=False, is_important=True).order_by('-id') article_count = articles.count() + important_articles.count() return render_to_response('board/article_list.html', RequestContext(request, {'board': board, 'articles': articles, 'page_num': page_num, 'total_page_num': total_page_num, 'page_nums': page_nums, 'get_parameters': request.GET.urlencode(), 'get_dict': request.GET, 'article_count': article_count, 'is_search': is_search, 'important_articles': important_articles}))
def standard_env(): _env = Env() _env.update(vars(math)) # sin, cos, sqrt, pi, ... _env.update(vars(cmath)) _env.update({ "+": lambda a, b: op.add(a, b), "-": lambda a, b: op.sub(a, b), "/": lambda a, b: op.truediv(a, b), "*": lambda a, b: op.mul(a, b), "//": lambda a, b: op.floordiv(a, b), "%": lambda a, b: op.mod(a, b), "pow": lambda x, p: x ** p, "^": lambda a, b: op.xor(a, b), "|": lambda a, b: op.or_(a, b), "&": lambda a, b: op.and_(a, b), "~": lambda x: ~x, ">>": lambda x, dc: x >> dc, "<<": lambda x, dc: x << dc, '>': op.gt, '<': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, '!=': op.ne, 'not': op.not_, 'eq?': op.is_, 'equal?': op.eq, 'ord': ord, 'chr': chr, '#': op.getitem, '#=': op.setitem, '#~': op.delitem, 'length': len, 'list': lambda *x: list(x), 'list?': lambda x: isinstance(x, list), 'append': op.add, 'car': lambda x: x[0], 'cdr': lambda x: x[1:], 'cons': lambda x, y: [x] + y, 'join': lambda char, li: char.join(str(e) for e in li), 'time': time.time, 'round': round, 'abs': abs, 'zip': lambda *x: list(zip(*x)), 'type': lambda x: type(x).__name__, 'range': lambda start, stop: [_ for _ in range(start, stop + 1)], 'map': lambda *x: list(map(*x)), 'max': max, 'min': min, 'open-input-file': open, 'open-output-file': lambda f: open(f, 'w'), 'close-file': lambda f: f.close(), 'read-file': lambda f: f.read(), 'write-in-file': lambda f, s: f.write(s), 'load-file': lambda f: load(f), 'null': None, 'null?': lambda x: bool(x), 'int': lambda x: int(x), 'float': lambda x: float(x), 'number?': lambda x: isinstance(x, (int, float)), 'bool': lambda x: bool(x), 'bool?': lambda x: isinstance(x, bool), 'procedure?': callable, 'symbol?': lambda x: isinstance(x, str), 'call/cc': callcc }) return _env
def test_enum_flags(): """Test using the enum flags. """ # Test using flags instances flags_cls = EnumTest.Flags assert EnumTest.Flags is flags_cls assert flags_cls(0) == 0 flag = flags_cls('a') assert flag == 1 assert flag is flags_cls(flag) assert 'enumflags' in repr(flag) assert str(flag) == 'EnumTestFlags' # Test logic operations assert and_(flag, EnumTest.a) assert or_(flags_cls('b'), flag) assert xor(flags_cls('b'), flag) == 3 with pytest.raises(TypeError): and_(flag, 2) assert invert(flag) == -2
def table(expr): """ Problem 46 >>> table("and(A,or(A,B))") true true true true fail true fail true fail fail fail fail """ from operator import and_, or_, xor # noqa nand = lambda a, b: not and_(a, b) nor = lambda a, b: not or_(a, b) equ = lambda a, b: a is b impl = lambda a, b: a or not b clean_expr = expr.replace('and', 'and_').replace('or', 'or_') compiled_expr = compile(clean_expr, '<string>', 'eval') for A in [True, False]: for B in [True, False]: C = eval(compiled_expr) print("{A} {B} {C}".format(**locals()) .lower().replace('false', 'fail'))
def buildSWField (self, pSheet, pRow): lUsed = pSheet.cell_value(pRow,9) if operator.or_(lUsed.lower()=='no', lUsed.lower()=='no-temporary'): raise XLSToSWExceptions.FieldNotMapped(repr(pSheet) + ':' + repr(pRow)) lFieldDefaultValue='' lClassName = pSheet.cell_value(pRow,10) lFieldName = pSheet.cell_value(pRow, 11) lFieldExternalName = pSheet.cell_value(pRow,26) lFieldType = pSheet.cell_value(pRow,13) lFieldDefaultValue = pSheet.cell_value(pRow, 12) lFieldLength = pSheet.cell_value(pRow,14) lFieldPriority = pSheet.cell_value(pRow,28) lFieldText = self.buildSWFieldComment(pSheet, pRow) lFeaturePoint=pSheet.cell_value(pRow,25) if operator.and_(lFeaturePoint!="", self.s_show_features_p==True): print ("----------Feature " + lFeaturePoint) lField = XLSToSWField.XLSToSWField(lClassName, lFieldName, lFieldType) lField.s_field_external_name = lFieldExternalName if lFieldLength!='': lField.s_field_length = lFieldLength if lFieldPriority!='': lField.s_field_priority=lFieldPriority if lFieldText!='': lField.s_field_comment=lFieldText if lFieldDefaultValue!='': lField.s_field_default_value=lFieldDefaultValue if lField.fieldType().lower() == "join": lField.s_field_join_type=pSheet.cell_value(pRow,32) lField.s_field_join_to =pSheet.cell_value(pRow,31) if lField.isValidJoin()==False: print ("found an invalid join ") lField.showMe() # lField.showMe() return lField
def test_jbool_functions_fexprs(self): jl = JeevesLib x = jl.mkLabel("x") jl.restrict(x, lambda (a, _): a == 42) for lh in (True, False): for ll in (True, False): for rh in (True, False): for rl in (True, False): l = jl.mkSensitive(x, lh, ll) r = jl.mkSensitive(x, rh, rl) self.assertEquals(jl.concretize((42, 0), l and r), operator.and_(lh, rh)) self.assertEquals(jl.concretize((10, 0), l and r), operator.and_(ll, rl)) self.assertEquals(jl.concretize((42, 0), l or r), operator.or_(lh, rh)) self.assertEquals(jl.concretize((10, 0), l or r), operator.or_(ll, rl)) self.assertEquals(jl.concretize((42, 0), not l), operator.not_(lh)) self.assertEquals(jl.concretize((10, 0), not l), operator.not_(ll)) y = jl.mkLabel("y") jl.restrict(y, lambda (_, b): b == 42) for lh in (True, False): for ll in (True, False): for rh in (True, False): for rl in (True, False): l = jl.mkSensitive(x, lh, ll) r = jl.mkSensitive(y, rh, rl) self.assertEquals(jl.concretize((42, 0), l and r), operator.and_(lh, rl)) self.assertEquals(jl.concretize((10, 0), l and r), operator.and_(ll, rl)) self.assertEquals(jl.concretize((42, 42), l and r), operator.and_(lh, rh)) self.assertEquals(jl.concretize((10, 42), l and r), operator.and_(ll, rh)) self.assertEquals(jl.concretize((42, 0), l or r), operator.or_(lh, rl)) self.assertEquals(jl.concretize((10, 0), l or r), operator.or_(ll, rl)) self.assertEquals(jl.concretize((42, 42), l or r), operator.or_(lh, rh)) self.assertEquals(jl.concretize((10, 42), l or r), operator.or_(ll, rh))
def testOperators(self): with self.cached_session(): var_f = variables.Variable([2.0]) add = var_f + 0.0 radd = 1.0 + var_f sub = var_f - 1.0 rsub = 1.0 - var_f mul = var_f * 10.0 rmul = 10.0 * var_f div = var_f / 10.0 rdiv = 10.0 / var_f lt = var_f < 3.0 rlt = 3.0 < var_f le = var_f <= 2.0 rle = 2.0 <= var_f gt = var_f > 3.0 rgt = 3.0 > var_f ge = var_f >= 2.0 rge = 2.0 >= var_f neg = -var_f abs_v = abs(var_f) var_i = variables.Variable([20]) mod = var_i % 7 rmod = 103 % var_i var_b = variables.Variable([True, False]) and_v = operator.and_(var_b, [True, True]) or_v = operator.or_(var_b, [False, True]) xor_v = operator.xor(var_b, [False, False]) invert_v = ~var_b rnd = np.random.rand(4, 4).astype("f") var_t = variables.Variable(rnd) slice_v = var_t[2, 0:0] var_m = variables.Variable([[2.0, 3.0]]) matmul = var_m.__matmul__([[10.0], [20.0]]) rmatmul = var_m.__rmatmul__([[10.0], [20.0]]) self.evaluate(variables.global_variables_initializer()) self.assertAllClose([2.0], self.evaluate(add)) self.assertAllClose([3.0], self.evaluate(radd)) self.assertAllClose([1.0], self.evaluate(sub)) self.assertAllClose([-1.0], self.evaluate(rsub)) self.assertAllClose([20.0], self.evaluate(mul)) self.assertAllClose([20.0], self.evaluate(rmul)) self.assertAllClose([0.2], self.evaluate(div)) self.assertAllClose([5.0], self.evaluate(rdiv)) self.assertAllClose([-2.0], self.evaluate(neg)) self.assertAllClose([2.0], self.evaluate(abs_v)) self.assertAllClose([True], self.evaluate(lt)) self.assertAllClose([False], self.evaluate(rlt)) self.assertAllClose([True], self.evaluate(le)) self.assertAllClose([True], self.evaluate(rle)) self.assertAllClose([False], self.evaluate(gt)) self.assertAllClose([True], self.evaluate(rgt)) self.assertAllClose([True], self.evaluate(ge)) self.assertAllClose([True], self.evaluate(rge)) self.assertAllClose([6], self.evaluate(mod)) self.assertAllClose([3], self.evaluate(rmod)) self.assertAllClose([True, False], self.evaluate(and_v)) self.assertAllClose([True, True], self.evaluate(or_v)) self.assertAllClose([True, False], self.evaluate(xor_v)) self.assertAllClose([False, True], self.evaluate(invert_v)) self.assertAllClose(rnd[2, 0:0], self.evaluate(slice_v)) self.assertAllClose([[80.0]], self.evaluate(matmul)) self.assertAllClose([[20.0, 30.0], [40.0, 60.0]], self.evaluate(rmatmul))
def _create_methods(arith_method, radd_func, comp_method, bool_method, use_numexpr, special=False, default_axis='columns'): # creates actual methods based upon arithmetic, comp and bool method # constructors. # NOTE: Only frame cares about default_axis, specifically: special methods # have default axis None, whereas flex methods have default axis 'columns' # if we're not using numexpr, then don't pass a str_rep if use_numexpr: op = lambda x: x else: op = lambda x: None if special: def names(x): if x[-1] == "_": return "__%s_" % x else: return "__%s__" % x else: names = lambda x: x radd_func = radd_func or operator.add # Inframe, all special methods have default_axis=None, flex methods have # default_axis set to the default (columns) new_methods = dict( add=arith_method(operator.add, names('add'), op('+'), default_axis=default_axis), radd=arith_method(radd_func, names('radd'), op('+'), default_axis=default_axis), sub=arith_method(operator.sub, names('sub'), op('-'), default_axis=default_axis), mul=arith_method(operator.mul, names('mul'), op('*'), default_axis=default_axis), truediv=arith_method(operator.truediv, names('truediv'), op('/'), truediv=True, fill_zeros=np.inf, default_axis=default_axis), floordiv=arith_method(operator.floordiv, names('floordiv'), op('//'), default_axis=default_axis, fill_zeros=np.inf), # Causes a floating point exception in the tests when numexpr # enabled, so for now no speedup mod=arith_method(operator.mod, names('mod'), None, default_axis=default_axis, fill_zeros=np.nan), pow=arith_method(operator.pow, names('pow'), op('**'), default_axis=default_axis), # not entirely sure why this is necessary, but previously was included # so it's here to maintain compatibility rmul=arith_method(operator.mul, names('rmul'), op('*'), default_axis=default_axis, reversed=True), rsub=arith_method(lambda x, y: y - x, names('rsub'), op('-'), default_axis=default_axis, reversed=True), rtruediv=arith_method(lambda x, y: operator.truediv(y, x), names('rtruediv'), op('/'), truediv=True, fill_zeros=np.inf, default_axis=default_axis, reversed=True), rfloordiv=arith_method(lambda x, y: operator.floordiv(y, x), names('rfloordiv'), op('//'), default_axis=default_axis, fill_zeros=np.inf, reversed=True), rpow=arith_method(lambda x, y: y ** x, names('rpow'), op('**'), default_axis=default_axis, reversed=True), rmod=arith_method(lambda x, y: y % x, names('rmod'), op('%'), default_axis=default_axis, fill_zeros=np.nan, reversed=True), ) new_methods['div'] = new_methods['truediv'] new_methods['rdiv'] = new_methods['rtruediv'] # Comp methods never had a default axis set if comp_method: new_methods.update(dict( eq=comp_method(operator.eq, names('eq'), op('==')), ne=comp_method(operator.ne, names('ne'), op('!='), masker=True), lt=comp_method(operator.lt, names('lt'), op('<')), gt=comp_method(operator.gt, names('gt'), op('>')), le=comp_method(operator.le, names('le'), op('<=')), ge=comp_method(operator.ge, names('ge'), op('>=')), )) if bool_method: new_methods.update(dict( and_=bool_method(operator.and_, names('and_'), op('&')), or_=bool_method(operator.or_, names('or_'), op('|')), # For some reason ``^`` wasn't used in original. xor=bool_method(operator.xor, names('xor'), op('^')), rand_=bool_method(lambda x, y: operator.and_(y, x), names('rand_'), op('&')), ror_=bool_method(lambda x, y: operator.or_(y, x), names('ror_'), op('|')), rxor=bool_method(lambda x, y: operator.xor(y, x), names('rxor'), op('^')) )) new_methods = dict((names(k), v) for k, v in new_methods.items()) return new_methods
def bitwise_or_usecase(x, y): return operator.or_(x, y)
def search(cls, base_query, term): term = u'%{}%'.format(term) search_filter = or_(cls.name.ilike(term), cls.email.like(term)) return base_query.filter(search_filter)
def search_criterion(self): q = request.GET['__q'] if '__q' in request.GET else None criterion = self.__model__._real_id > 0 if self.__model__ is not None and q is not None: q_parts = q.replace('-',' ').replace('_',' ').replace('/',' ').split(' ') model_obj = self.__model__() column_names = model_obj._column_list for q in q_parts: tmp_criterion = self.__model__._real_id < 0 # False for column_name in column_names: prop = getattr(self.__model__, column_name) if column_name in model_obj._get_relation_names(): ref_metadata = model_obj._get_relation_metadata(column_name) ref_class = model_obj._get_relation_class(column_name) ref_obj = ref_class() sub_column_names = ref_obj._column_list for sub_column_name in sub_column_names: # only applied to actual column if sub_column_name in ref_obj._get_actual_column_names(): sub_column = getattr(ref_class, sub_column_name) if ref_metadata.uselist: # one to many tmp_criterion = or_(tmp_criterion, prop.any(sub_column.ilike(q + '%'))) tmp_criterion = or_(tmp_criterion, prop.any(sub_column.ilike('% '+q+'%'))) tmp_criterion = or_(tmp_criterion, prop.any(sub_column.ilike('%-'+q+'%'))) tmp_criterion = or_(tmp_criterion, prop.any(sub_column.ilike('%/'+q+'%'))) tmp_criterion = or_(tmp_criterion, prop.any(sub_column.ilike('%|_'+q+'%', escape='|'))) else: # many to one tmp_criterion = or_(tmp_criterion, prop.has(sub_column.ilike(q + '%'))) tmp_criterion = or_(tmp_criterion, prop.has(sub_column.ilike('% '+q+'%'))) tmp_criterion = or_(tmp_criterion, prop.has(sub_column.ilike('%-'+q+'%'))) tmp_criterion = or_(tmp_criterion, prop.has(sub_column.ilike('%/'+q+'%'))) tmp_criterion = or_(tmp_criterion, prop.has(sub_column.ilike('%|_'+q+'%', escape='|'))) elif column_name in model_obj._get_actual_column_names(): tmp_criterion = or_(tmp_criterion, prop.ilike(q + '%')) tmp_criterion = or_(tmp_criterion, prop.ilike('% '+q+'%')) tmp_criterion = or_(tmp_criterion, prop.ilike('%-'+q+'%')) tmp_criterion = or_(tmp_criterion, prop.ilike('%/'+q+'%')) tmp_criterion = or_(tmp_criterion, prop.ilike('%|_'+q+'%', escape='|')) criterion = and_(criterion, tmp_criterion) return criterion