def polymorphic_union(table_map, typecolname, aliasname='p_union'): """create a UNION statement used by a polymorphic mapper. See the SQLAlchemy advanced mapping docs for an example of how this is used.""" colnames = util.Set() colnamemaps = {} types = {} for key in table_map.keys(): table = table_map[key] # mysql doesnt like selecting from a select; make it an alias of the select if isinstance(table, sql.Select): table = table.alias() table_map[key] = table m = {} for c in table.c: colnames.add(c.name) m[c.name] = c types[c.name] = c.type colnamemaps[table] = m def col(name, table): try: return colnamemaps[table][name] except KeyError: return sql.cast(sql.null(), types[name]).label(name) result = [] for type, table in table_map.iteritems(): if typecolname is not None: result.append(sql.select([col(name, table) for name in colnames] + [sql.literal_column("'%s'" % type).label(typecolname)], from_obj=[table])) else: result.append(sql.select([col(name, table) for name in colnames], from_obj=[table])) return sql.union_all(*result).alias(aliasname)
def polymorphic_union(table_map, typecolname, aliasname="p_union", cast_nulls=True): """Create a ``UNION`` statement used by a polymorphic mapper. See :ref:`concrete_inheritance` for an example of how this is used. :param table_map: mapping of polymorphic identities to :class:`.Table` objects. :param typecolname: string name of a "discriminator" column, which will be derived from the query, producing the polymorphic identity for each row. If ``None``, no polymorphic discriminator is generated. :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()` construct generated. :param cast_nulls: if True, non-existent columns, which are represented as labeled NULLs, will be passed into CAST. This is a legacy behavior that is problematic on some backends such as Oracle - in which case it can be set to False. """ colnames = util.OrderedSet() colnamemaps = {} types = {} for key in table_map.keys(): table = table_map[key] # mysql doesnt like selecting from a select; # make it an alias of the select if isinstance(table, sql.Select): table = table.alias() table_map[key] = table m = {} for c in table.c: colnames.add(c.key) m[c.key] = c types[c.key] = c.type colnamemaps[table] = m def col(name, table): try: return colnamemaps[table][name] except KeyError: if cast_nulls: return sql.cast(sql.null(), types[name]).label(name) else: return sql.type_coerce(sql.null(), types[name]).label(name) result = [] for type, table in table_map.iteritems(): if typecolname is not None: result.append( sql.select( [col(name, table) for name in colnames] + [sql.literal_column(sql_util._quote_ddl_expr(type)).label(typecolname)], from_obj=[table], ) ) else: result.append(sql.select([col(name, table) for name in colnames], from_obj=[table])) return sql.union_all(*result).alias(aliasname)
def polymorphic_union(table_map, typecolname, aliasname='p_union'): colnames = util.Set() colnamemaps = {} types = {} for key in table_map.keys(): table = table_map[key] # mysql doesnt like selecting from a select; make it an alias of the select if isinstance(table, sql.Select): table = table.alias() table_map[key] = table m = {} for c in table.c: colnames.add(c.name) m[c.name] = c types[c.name] = c.type colnamemaps[table] = m def col(name, table): try: return colnamemaps[table][name] except KeyError: return sql.cast(sql.null(), types[name]).label(name) result = [] for type, table in table_map.iteritems(): if typecolname is not None: result.append(sql.select([col(name, table) for name in colnames] + [sql.column("'%s'" % type).label(typecolname)], from_obj=[table])) else: result.append(sql.select([col(name, table) for name in colnames], from_obj=[table])) return sql.union_all(*result).alias(aliasname)
def get_activity_query(user_id=None, session_id=None, test_id=None): # pylint: disable=no-member from .models import Activity, Comment, User _filter = functools.partial(_apply_filters, user_id=user_id, session_id=session_id, test_id=test_id) comments = select([ literal_column("('comment:' || comment.id)").label('id'), literal_column(str(ACTION_COMMENTED)).label('action'), Comment.user_id.label('user_id'), Comment.session_id.label('session_id'), Comment.test_id.label('test_id'), Comment.timestamp.label('timestamp'), Comment.comment.label('text'), User.email.label('user_email'), ]).select_from(Comment.__table__.join(User, User.id == Comment.user_id)) comments = _filter(Comment, comments) activity = select([ literal_column("('activity:' || activity.id)").label('id'), Activity.action.label('action'), Activity.user_id.label('user_id'), Activity.session_id.label('session_id'), Activity.test_id.label('test_id'), Activity.timestamp.label('timestamp'), literal_column("NULL").label('text'), User.email.label('user_email'), ]).select_from(Activity.__table__.join(User, User.id == Activity.user_id)) activity = _filter(Activity, activity) u = union_all(comments, activity).alias('u') return select([u]).order_by(u.c.timestamp)
def _generate_monthly_uniques(self, table, tables): idx = tables.index(table) # Join them all at = union_all(*[ select([tbl]).where(tbl.c.message == "Ack") for tbl in tables[idx-29:idx+1] ]) # Get uniques s = select([ func.count(func.distinct(at.c.uaid_hash)). label("unique_count"), at.c.browser_os, at.c.browser_version, ]).\ group_by(at.c.browser_os, at.c.browser_version) results = self._conn.execute(s).fetchall() if not results: return # Determine the date for this entry tname = table.name date_parts = [tname[-8:-4], tname[-4:-2], tname[-2:]] insert_date = "-".join(date_parts) self._conn.execute(monthly_rollup.insert(), [ dict(date=insert_date, count=x.unique_count, browser_os=x.browser_os, browser_version=x.browser_version) for x in results ])
def AvailableWriteReferences(self, request, context): # can't write anything for ourselves, but let's return empty so this can be used generically on profile page if request.to_user_id == context.user_id: return references_pb2.AvailableWriteReferencesRes() with session_scope() as session: if not session.execute( select(User).where_users_visible(context).where( User.id == request.to_user_id)).scalar_one_or_none(): context.abort(grpc.StatusCode.NOT_FOUND, errors.USER_NOT_FOUND) can_write_friend_reference = (session.execute( select(Reference).where( Reference.from_user_id == context.user_id).where( Reference.to_user_id == request.to_user_id).where( Reference.reference_type == ReferenceType.friend)). scalar_one_or_none()) is None q1 = (select(literal(True), HostRequest).outerjoin( Reference, and_( Reference.host_request_id == HostRequest.conversation_id, Reference.from_user_id == context.user_id, ), ).where(Reference.id == None).where( HostRequest.can_write_reference).where( HostRequest.surfer_user_id == context.user_id).where( HostRequest.host_user_id == request.to_user_id)) q2 = (select(literal(False), HostRequest).outerjoin( Reference, and_( Reference.host_request_id == HostRequest.conversation_id, Reference.from_user_id == context.user_id, ), ).where(Reference.id == None).where( HostRequest.can_write_reference).where( HostRequest.surfer_user_id == request.to_user_id).where( HostRequest.host_user_id == context.user_id)) union = union_all(q1, q2).order_by( HostRequest.end_time_to_write_reference.asc()).subquery() union = select(union.c[0].label("surfed"), aliased(HostRequest, union)) host_request_references = session.execute(union).all() return references_pb2.AvailableWriteReferencesRes( can_write_friend_reference=can_write_friend_reference, available_write_references=[ references_pb2.AvailableWriteReferenceType( host_request_id=host_request.conversation_id, reference_type=reftype2api[ ReferenceType.surfed if surfed else ReferenceType. hosted], time_expires=Timestamp_from_datetime( host_request.end_time_to_write_reference), ) for surfed, host_request in host_request_references ], )
def polymorphic_union(table_map, typecolname, aliasname='p_union', cast_nulls=True): """Create a ``UNION`` statement used by a polymorphic mapper. See :ref:`concrete_inheritance` for an example of how this is used. :param table_map: mapping of polymorphic identities to :class:`.Table` objects. :param typecolname: string name of a "discriminator" column, which will be derived from the query, producing the polymorphic identity for each row. If ``None``, no polymorphic discriminator is generated. :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()` construct generated. :param cast_nulls: if True, non-existent columns, which are represented as labeled NULLs, will be passed into CAST. This is a legacy behavior that is problematic on some backends such as Oracle - in which case it can be set to False. """ colnames = util.OrderedSet() colnamemaps = {} types = {} for key in list(table_map.keys()): table = table_map[key] # mysql doesnt like selecting from a select; # make it an alias of the select if isinstance(table, sql.Select): table = table.alias() table_map[key] = table m = {} for c in table.c: colnames.add(c.key) m[c.key] = c types[c.key] = c.type colnamemaps[table] = m def col(name, table): try: return colnamemaps[table][name] except KeyError: if cast_nulls: return sql.cast(sql.null(), types[name]).label(name) else: return sql.type_coerce(sql.null(), types[name]).label(name) result = [] for type, table in table_map.items(): if typecolname is not None: result.append( sql.select([col(name, table) for name in colnames] + [sql.literal_column(sql_util._quote_ddl_expr(type)). label(typecolname)], from_obj=[table])) else: result.append(sql.select([col(name, table) for name in colnames], from_obj=[table])) return sql.union_all(*result).alias(aliasname)
def auto_describes(docid): rty = db.relationship_types.alias() ide1 = db.identifiers.alias() ide2 = db.identifiers.alias() doc = db.documents.alias() pac = db.packages.alias() pfi = db.packages_files.alias() one = ( select([ doc.c.document_id, ide1.c.identifier_id .label('left_identifier_id'), rty.c.relationship_type_id, ide2.c.identifier_id .label('right_identifier_id'), ]) .select_from( doc .join(pac, doc.c.package_id == pac.c.package_id) .join(ide1, (doc.c.document_id == ide1.c.document_id) & (doc.c.document_namespace_id == ide1.c.document_namespace_id) ) .join(ide2, (pac.c.package_id == ide2.c.package_id) & (doc.c.document_namespace_id == ide2.c.document_namespace_id) ) .join(rty, rty.c.name == 'DESCRIBES') ) .where(doc.c.document_id == docid) ) two = ( select([ doc.c.document_id, ide1.c.identifier_id .label('left_identifier_id'), rty.c.relationship_type_id, ide2.c.identifier_id .label('right_identifier_id'), ]) .select_from( doc .join(pac, doc.c.package_id == pac.c.package_id) .join(pfi, pac.c.package_id == pfi.c.package_id, isouter=True) .join(ide1, (doc.c.document_id == ide1.c.document_id) & (doc.c.document_namespace_id == ide1.c.document_namespace_id) ) .join(ide2, (pfi.c.package_file_id == ide2.c.package_file_id) & (doc.c.document_namespace_id == ide2.c.document_namespace_id) ) .join(rty, rty.c.name == 'DESCRIBES') ) .where(doc.c.document_id == docid) ) return union_all(one, two)
def getElementTable(self, *, dataId: Optional[DataId] = None) -> FromClause: # Docstring inherited from DimensionRecordStorage.getElementTable. tables = [ link.getElementTable(dataId) for link in self._chain if link.matches(dataId) ] if len(tables) == 0: raise RuntimeError( f"No matching table for {self.element.name}, {dataId}.") elif len(tables) == 1: return tables[0] else: return union_all(*tables)
def polymorphic_union(table_map, typecolname, aliasname='p_union'): """Create a ``UNION`` statement used by a polymorphic mapper. See :ref:`concrete_inheritance` for an example of how this is used. """ colnames = set() colnamemaps = {} types = {} for key in table_map.keys(): table = table_map[key] # mysql doesnt like selecting from a select; # make it an alias of the select if isinstance(table, sql.Select): table = table.alias() table_map[key] = table m = {} for c in table.c: colnames.add(c.key) m[c.key] = c types[c.key] = c.type colnamemaps[table] = m def col(name, table): try: return colnamemaps[table][name] except KeyError: return sql.cast(sql.null(), types[name]).label(name) result = [] for type, table in table_map.iteritems(): if typecolname is not None: result.append( sql.select([col(name, table) for name in colnames] + [ sql.literal_column( sql_util._quote_ddl_expr(type)).label(typecolname) ], from_obj=[table])) else: result.append( sql.select([col(name, table) for name in colnames], from_obj=[table])) return sql.union_all(*result).alias(aliasname)
def ListPendingReferencesToWrite(self, request, context): with session_scope() as session: q1 = (select(literal(True), HostRequest).outerjoin( Reference, and_( Reference.host_request_id == HostRequest.conversation_id, Reference.from_user_id == context.user_id, ), ).where_users_column_visible( context, HostRequest.host_user_id).where(Reference.id == None).where( HostRequest.can_write_reference).where( HostRequest.surfer_user_id == context.user_id)) q2 = (select(literal(False), HostRequest).outerjoin( Reference, and_( Reference.host_request_id == HostRequest.conversation_id, Reference.from_user_id == context.user_id, ), ).where_users_column_visible( context, HostRequest.surfer_user_id).where(Reference.id == None).where( HostRequest.can_write_reference).where( HostRequest.host_user_id == context.user_id)) union = union_all(q1, q2).order_by( HostRequest.end_time_to_write_reference.asc()).subquery() union = select(union.c[0].label("surfed"), aliased(HostRequest, union)) host_request_references = session.execute(union).all() return references_pb2.ListPendingReferencesToWriteRes( pending_references=[ references_pb2.AvailableWriteReferenceType( host_request_id=host_request.conversation_id, reference_type=reftype2api[ ReferenceType.surfed if surfed else ReferenceType. hosted], time_expires=Timestamp_from_datetime( host_request.end_time_to_write_reference), ) for surfed, host_request in host_request_references ], )
def _query_weekly_average(self, table, tables): # First see if we can find 6 days prior for a full week idx = tables.index(table) # For Python list math, 6 has 6 numbers before it as zero index # based, so 6 or larger is needed if idx < 6: return None # Get our weekly set together # Note that we add one to idx since list splicing needs one higher than # the index for right-side inclusive week_tables = union_all(*[ select([tbl]).where(tbl.c.message == "Ack") for tbl in tables[idx-6:idx+1] ]) # Calculate channels per user for the past week chans_per_user = select([ week_tables.c.uaid_hash, func.count(func.distinct(week_tables.c.channel_id)).label("count") ]).\ group_by(week_tables.c.uaid_hash) # Rank them into ntiles ranked = select([ chans_per_user.c.uaid_hash, chans_per_user.c.count, func.ntile(100).over(order_by=text("count ASC")).label("rank"), ]) # Remove the bottom/upper 5%, get sum/count for avg weekly_channels_stats = select([ func.sum(ranked.c.count), func.count(ranked.c.uaid_hash), ]).\ where(ranked.c.rank > 5).\ where(ranked.c.rank <= 95) sums, count = self._conn.execute(weekly_channels_stats).fetchone() weekly_avg = Decimal(sums) / Decimal(count) return weekly_avg
def get_activity_query(user_id=None, session_id=None, test_id=None): # pylint: disable=no-member from .models import Activity, Comment, User _filter = functools.partial(_apply_filters, user_id=user_id, session_id=session_id, test_id=test_id) comments = select([ literal_column("('comment:' || comment.id)").label('id'), literal_column(str(ACTION_COMMENTED)).label('action'), Comment.user_id.label('user_id'), Comment.session_id.label('session_id'), Comment.test_id.label('test_id'), Comment.timestamp.label('timestamp'), Comment.comment.label('text'), (User.first_name + ' ' + User.last_name).label('user_name'), User.email.label('user_email'), ]).select_from(Comment.__table__.join(User, User.id == Comment.user_id)) comments = _filter(Comment, comments) activity = select([ literal_column("('activity:' || activity.id)").label('id'), Activity.action.label('action'), Activity.user_id.label('user_id'), Activity.session_id.label('session_id'), Activity.test_id.label('test_id'), Activity.timestamp.label('timestamp'), literal_column("NULL").label('text'), (User.first_name + ' ' + User.last_name).label('user_name'), User.email.label('user_email'), ]).select_from(Activity.__table__.join(User, User.id == Activity.user_id)) activity = _filter(Activity, activity) u = union_all(comments, activity).alias('u') return select([u]).order_by(u.c.timestamp)
def get_old_messages_backend(request, user_profile, anchor = REQ(converter=int), num_before = REQ(converter=to_non_negative_int), num_after = REQ(converter=to_non_negative_int), narrow = REQ('narrow', converter=narrow_parameter, default=None), use_first_unread_anchor = REQ(default=False, converter=ujson.loads), apply_markdown=REQ(default=True, converter=ujson.loads)): # type: (HttpRequest, UserProfile, int, int, int, Optional[List[Dict[str, Any]]], bool, bool) -> HttpResponse include_history = ok_to_include_history(narrow, user_profile.realm) if include_history and not use_first_unread_anchor: query = select([column("id").label("message_id")], None, table("zerver_message")) inner_msg_id_col = literal_column("zerver_message.id") elif narrow is None: query = select([column("message_id"), column("flags")], column("user_profile_id") == literal(user_profile.id), table("zerver_usermessage")) inner_msg_id_col = column("message_id") else: # TODO: Don't do this join if we're not doing a search query = select([column("message_id"), column("flags")], column("user_profile_id") == literal(user_profile.id), join(table("zerver_usermessage"), table("zerver_message"), literal_column("zerver_usermessage.message_id") == literal_column("zerver_message.id"))) inner_msg_id_col = column("message_id") num_extra_messages = 1 is_search = False if narrow is not None: # Add some metadata to our logging data for narrows verbose_operators = [] for term in narrow: if term['operator'] == "is": verbose_operators.append("is:" + term['operand']) else: verbose_operators.append(term['operator']) request._log_data['extra'] = "[%s]" % (",".join(verbose_operators),) # Build the query for the narrow num_extra_messages = 0 builder = NarrowBuilder(user_profile, inner_msg_id_col) search_term = None # type: Optional[Dict[str, Any]] for term in narrow: if term['operator'] == 'search': if not is_search: search_term = term query = query.column(column("subject")).column(column("rendered_content")) is_search = True else: # Join the search operators if there are multiple of them search_term['operand'] += ' ' + term['operand'] else: query = builder.add_term(query, term) if is_search: query = builder.add_term(query, search_term) # We add 1 to the number of messages requested if no narrow was # specified to ensure that the resulting list always contains the # anchor message. If a narrow was specified, the anchor message # might not match the narrow anyway. if num_after != 0: num_after += num_extra_messages else: num_before += num_extra_messages sa_conn = get_sqlalchemy_connection() if use_first_unread_anchor: condition = column("flags").op("&")(UserMessage.flags.read.mask) == 0 # We exclude messages on muted topics when finding the first unread # message in this narrow muting_conditions = exclude_muting_conditions(user_profile, narrow) if muting_conditions: condition = and_(condition, *muting_conditions) first_unread_query = query.where(condition) first_unread_query = first_unread_query.order_by(inner_msg_id_col.asc()).limit(1) first_unread_result = list(sa_conn.execute(first_unread_query).fetchall()) if len(first_unread_result) > 0: anchor = first_unread_result[0][0] else: anchor = LARGER_THAN_MAX_MESSAGE_ID before_query = None after_query = None if num_before != 0: before_anchor = anchor if num_after != 0: # Don't include the anchor in both the before query and the after query before_anchor = anchor - 1 before_query = query.where(inner_msg_id_col <= before_anchor) \ .order_by(inner_msg_id_col.desc()).limit(num_before) if num_after != 0: after_query = query.where(inner_msg_id_col >= anchor) \ .order_by(inner_msg_id_col.asc()).limit(num_after) if anchor == LARGER_THAN_MAX_MESSAGE_ID: # There's no need for an after_query if we're targeting just the target message. after_query = None if before_query is not None: if after_query is not None: query = union_all(before_query.self_group(), after_query.self_group()) else: query = before_query elif after_query is not None: query = after_query else: # This can happen when a narrow is specified. query = query.where(inner_msg_id_col == anchor) main_query = alias(query) query = select(main_query.c, None, main_query).order_by(column("message_id").asc()) # This is a hack to tag the query we use for testing query = query.prefix_with("/* get_old_messages */") query_result = list(sa_conn.execute(query).fetchall()) # The following is a little messy, but ensures that the code paths # are similar regardless of the value of include_history. The # 'user_messages' dictionary maps each message to the user's # UserMessage object for that message, which we will attach to the # rendered message dict before returning it. We attempt to # bulk-fetch rendered message dicts from remote cache using the # 'messages' list. search_fields = dict() # type: Dict[int, Dict[str, Text]] message_ids = [] # type: List[int] user_message_flags = {} # type: Dict[int, List[str]] if include_history: message_ids = [row[0] for row in query_result] # TODO: This could be done with an outer join instead of two queries user_message_flags = dict((user_message.message_id, user_message.flags_list()) for user_message in UserMessage.objects.filter(user_profile=user_profile, message__id__in=message_ids)) for row in query_result: message_id = row[0] if user_message_flags.get(message_id) is None: user_message_flags[message_id] = ["read", "historical"] if is_search: (_, subject, rendered_content, content_matches, subject_matches) = row search_fields[message_id] = get_search_fields(rendered_content, subject, content_matches, subject_matches) else: for row in query_result: message_id = row[0] flags = row[1] user_message_flags[message_id] = parse_usermessage_flags(flags) message_ids.append(message_id) if is_search: (_, _, subject, rendered_content, content_matches, subject_matches) = row search_fields[message_id] = get_search_fields(rendered_content, subject, content_matches, subject_matches) cache_transformer = lambda row: MessageDict.build_dict_from_raw_db_row(row, apply_markdown) id_fetcher = lambda row: row['id'] message_dicts = generic_bulk_cached_fetch(lambda message_id: to_dict_cache_key_id(message_id, apply_markdown), Message.get_raw_db_rows, message_ids, id_fetcher=id_fetcher, cache_transformer=cache_transformer, extractor=extract_message_dict, setter=stringify_message_dict) message_list = [] for message_id in message_ids: msg_dict = message_dicts[message_id] msg_dict.update({"flags": user_message_flags[message_id]}) msg_dict.update(search_fields.get(message_id, {})) message_list.append(msg_dict) statsd.incr('loaded_old_messages', len(message_list)) ret = {'messages': message_list, "result": "success", "msg": ""} return json_success(ret)
def process_send_reference_reminders(payload): """ Sends out reminders to write references after hosting/staying """ logger.info(f"Sending out reference reminder emails") # Keep this in chronological order! reference_reminder_schedule = [ # (number, timedelta before we stop being able to write a ref, text for how long they have left to write the ref) # the end time to write a reference is supposed to be midnight in the host's timezone # 8 pm ish on the last day of the stay (1, timedelta(days=15) - timedelta(hours=20), "14 days"), # 2 pm ish a week after stay (2, timedelta(days=8) - timedelta(hours=14), "7 days"), # 10 am ish 3 days before end of time to write ref (3, timedelta(days=4) - timedelta(hours=10), "3 days"), ] with session_scope() as session: # iterate the reminders in backwards order, so if we missed out on one we don't send duplicates for reminder_no, reminder_time, reminder_text in reversed(reference_reminder_schedule): user = aliased(User) other_user = aliased(User) # surfers needing to write a ref q1 = ( select(literal(True), HostRequest, user, other_user) .join(user, user.id == HostRequest.surfer_user_id) .join(other_user, other_user.id == HostRequest.host_user_id) .outerjoin( Reference, and_( Reference.host_request_id == HostRequest.conversation_id, # if no reference is found in this join, then the surfer has not written a ref Reference.from_user_id == HostRequest.surfer_user_id, ), ) .where(user.is_visible) .where(other_user.is_visible) .where(Reference.id == None) .where(HostRequest.can_write_reference) .where(HostRequest.surfer_sent_reference_reminders < reminder_no) .where(HostRequest.end_time_to_write_reference - reminder_time < now()) ) # hosts needing to write a ref q2 = ( select(literal(False), HostRequest, user, other_user) .join(user, user.id == HostRequest.host_user_id) .join(other_user, other_user.id == HostRequest.surfer_user_id) .outerjoin( Reference, and_( Reference.host_request_id == HostRequest.conversation_id, # if no reference is found in this join, then the host has not written a ref Reference.from_user_id == HostRequest.host_user_id, ), ) .where(user.is_visible) .where(other_user.is_visible) .where(Reference.id == None) .where(HostRequest.can_write_reference) .where(HostRequest.host_sent_reference_reminders < reminder_no) .where(HostRequest.end_time_to_write_reference - reminder_time < now()) ) union = union_all(q1, q2).subquery() union = select( union.c[0].label("surfed"), aliased(HostRequest, union), aliased(user, union), aliased(other_user, union), ) reference_reminders = session.execute(union).all() for surfed, host_request, user, other_user in reference_reminders: # checked in sql assert user.is_visible if not are_blocked(session, user.id, other_user.id): send_reference_reminder_email(user, other_user, host_request, surfed, reminder_text) if surfed: host_request.surfer_sent_reference_reminders = reminder_no else: host_request.host_sent_reference_reminders = reminder_no session.commit()
def get(self): parser = RequestParser(trim=True) parser.add_argument('page', type=int, default=DEFAULT_PAGE) parser.add_argument('pageSize', type=int, default=DEFAULT_PAGE_SIZE) parser.add_argument('id', type=str) parser.add_argument('accountChangeType', type=str) parser.add_argument('isAcdemen', type=int) parser.add_argument('timeLower', type=int) parser.add_argument('timeUpper', type=int) parser.add_argument('amountLower', type=float) parser.add_argument('amountUpper', type=float) parser.add_argument('rechargeid', type=str) parser.add_argument('orderId', type=str) parser.add_argument('memberUsername', type=str) parser.add_argument('memberLevelConfig', type=str) parser.add_argument('memberParentUsername', type=str) args = parser.parse_args(strict=True) args['accountChangeType'] = args['accountChangeType'].split(',') criterion = set() criterion.add(Member.isTsetPLay != 1) if args['id']: criterion.add( MemberAccountChangeRecord.id.in_(args['id'].split(','))) if args['isAcdemen'] is not None: if args['isAcdemen'] == 1: criterion.add( MemberAccountChangeRecord.isAcdemen == args['isAcdemen']) if '6' in args['accountChangeType']: args['accountChangeType'].remove('6') else: criterion.add( or_(MemberAccountChangeRecord.isAcdemen.is_(None), MemberAccountChangeRecord.isAcdemen == 0)) if args['rechargeid']: criterion.add( MemberAccountChangeRecord.rechargeid == args['rechargeid']) if args['accountChangeType']: criterion.add( MemberAccountChangeRecord.accountChangeType.in_( args['accountChangeType'])) if args['timeLower']: criterion.add(MemberAccountChangeRecord.time >= args['timeLower']) if args['timeUpper']: criterion.add(MemberAccountChangeRecord.time <= args['timeUpper'] + SECONDS_PER_DAY) # if args['amountUpper']: # criterion.add(MemberAccountChangeRecord.amount <= args['amountUpper']) # if args['amountLower']: # criterion.add(MemberAccountChangeRecord.amount <= args['amountLower']) if args['amountUpper'] is not None and args['amountLower'] is not None: if args['amountUpper'] >= args['amountLower']: criterion.add( MemberAccountChangeRecord.amount <= args['amountUpper']) criterion.add( MemberAccountChangeRecord.amount >= args['amountLower']) else: criterion.add( MemberAccountChangeRecord.amount >= args['amountUpper']) criterion.add( MemberAccountChangeRecord.amount <= args['amountLower']) elif args['amountUpper'] is not None: if args['amountUpper'] >= 0: criterion.add( MemberAccountChangeRecord.amount <= args['amountUpper']) else: criterion.add( MemberAccountChangeRecord.amount >= args['amountUpper']) elif args['amountLower'] is not None: if args['amountLower'] >= 0: criterion.add( MemberAccountChangeRecord.amount >= args['amountLower']) else: criterion.add( MemberAccountChangeRecord.amount <= args['amountLower']) if args['orderId']: criterion.add(MemberAccountChangeRecord.orderId == args['orderId']) if args['memberUsername']: criterion.add( Member.username.in_(args['memberUsername'].split(','))) if args['memberLevelConfig']: criterion.add( Member.levelConfig.in_(args['memberLevelConfig'].split(','))) # if args['memberLevelConfig'] is None: # criterion.add(Member.levelConfig == '') if args['memberParentUsername']: parent = Member.query.filter( Member.username == args['memberParentUsername']).first() if parent: criterion.add(Member.parent == parent.id) query = db.session.query( func.sum(MemberAccountChangeRecord.amount).label('sumAmount')) query = query.outerjoin( Member, MemberAccountChangeRecord.memberId == Member.id) query = query.outerjoin(MemberLevel, Member.levelConfig == MemberLevel.id) query = query.outerjoin(User, User.id == MemberAccountChangeRecord.actionUID) result_query = [] if args['accountChangeType']: if '6' in args['accountChangeType']: query_yule = db.session.query( func.sum( EntertainmentCityBetsDetail.Profit).label('sumAmount')) query_yule = query_yule.outerjoin( Member, EntertainmentCityBetsDetail.PlayerName == Member.username) query_yule = query_yule.outerjoin( MemberLevel, Member.levelConfig == MemberLevel.id) criterion_query = set() if args['id']: criterion_query.add( EntertainmentCityBetsDetail.id.in_( args['id'].split(','))) if args['rechargeid']: criterion_query.add(EntertainmentCityBetsDetail.BillNo == args['rechargeid']) if args['timeLower']: criterion_query.add(EntertainmentCityBetsDetail.BetTime >= args['timeLower']) if args['timeUpper']: criterion_query.add(EntertainmentCityBetsDetail.BetTime <= args['timeUpper'] + SECONDS_PER_DAY) # if args['amountLower']: # criterion_query.add(EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) # if args['amountUpper']: # criterion_query.add(EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) if args['amountUpper'] is not None and args[ 'amountLower'] is not None: if args['amountUpper'] >= args['amountLower']: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountUpper']) criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountLower']) elif args['amountUpper'] is not None: if args['amountUpper'] >= 0: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountUpper']) elif args['amountLower'] is not None: if args['amountLower'] >= 0: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountLower']) if args['orderId']: criterion_query.add( EntertainmentCityBetsDetail.BillNo == args['orderId']) if args['memberUsername']: criterion_query.add( Member.username.in_(args['memberUsername'].split(','))) if args['memberLevelConfig']: criterion_query.add( Member.levelConfig.in_( args['memberLevelConfig'].split(','))) # if args['memberLevelConfig'] is None: # criterion_query.add(Member.levelConfig == '') if args['memberParentUsername']: parent = Member.query.filter( Member.username == args['memberParentUsername']).first() if parent: criterion_query.add(Member.parent == parent.id) query = query.filter(*criterion) query_yule = query_yule.filter(*criterion_query) # union_all 拼接数据 result = union_all(query, query_yule) # 将拼接好的数据重新命名,变成一个新的查询表 user_alias = aliased(result, name='user_alias') user_alias = db.session.query(func.sum( user_alias.c.sumAmount)).all()[0][0] return {'success': True, 'data': user_alias} user_alias = query.filter(*criterion).all()[0][0] return {'success': True, 'data': user_alias}
def get(self): parser = RequestParser(trim=True) parser.add_argument('page', type=int, default=DEFAULT_PAGE) parser.add_argument('pageSize', type=int, default=DEFAULT_PAGE_SIZE) parser.add_argument('id', type=str) parser.add_argument('accountChangeType', type=str) parser.add_argument('isAcdemen', type=int) parser.add_argument('timeLower', type=int) parser.add_argument('timeUpper', type=int) parser.add_argument('amountLower', type=float) parser.add_argument('amountUpper', type=float) parser.add_argument('rechargeid', type=str) parser.add_argument('orderId', type=str) parser.add_argument('memberUsername', type=str) parser.add_argument('memberLevelConfig', type=str) parser.add_argument('memberParentUsername', type=str) args = parser.parse_args(strict=True) args['accountChangeType'] = args['accountChangeType'].split(',') criterion = set() criterion.add(Member.isTsetPLay != 1) if args['id']: criterion.add( MemberAccountChangeRecord.id.in_(args['id'].split(','))) if args['isAcdemen'] is not None: if args['isAcdemen'] == 1: criterion.add( MemberAccountChangeRecord.isAcdemen == args['isAcdemen']) if '6' in args['accountChangeType']: args['accountChangeType'].remove('6') else: criterion.add( or_(MemberAccountChangeRecord.isAcdemen.is_(None), MemberAccountChangeRecord.isAcdemen == 0)) if args['rechargeid']: criterion.add( MemberAccountChangeRecord.rechargeid == args['rechargeid']) if args['accountChangeType']: criterion.add( MemberAccountChangeRecord.accountChangeType.in_( args['accountChangeType'])) if args['timeLower']: criterion.add(MemberAccountChangeRecord.time >= args['timeLower']) if args['timeUpper']: criterion.add(MemberAccountChangeRecord.time <= args['timeUpper'] + SECONDS_PER_DAY) # if args['amountUpper']: # criterion.add(MemberAccountChangeRecord.amount <= args['amountUpper']) # if args['amountLower']: # criterion.add(MemberAccountChangeRecord.amount <= args['amountLower']) if args['amountUpper'] is not None and args['amountLower'] is not None: if args['amountUpper'] >= args['amountLower']: criterion.add( MemberAccountChangeRecord.amount <= args['amountUpper']) criterion.add( MemberAccountChangeRecord.amount >= args['amountLower']) else: criterion.add( MemberAccountChangeRecord.amount >= args['amountUpper']) criterion.add( MemberAccountChangeRecord.amount <= args['amountLower']) elif args['amountUpper'] is not None: if args['amountUpper'] >= 0: criterion.add( MemberAccountChangeRecord.amount <= args['amountUpper']) else: criterion.add( MemberAccountChangeRecord.amount >= args['amountUpper']) elif args['amountLower'] is not None: if args['amountLower'] >= 0: criterion.add( MemberAccountChangeRecord.amount >= args['amountLower']) else: criterion.add( MemberAccountChangeRecord.amount <= args['amountLower']) if args['orderId']: criterion.add(MemberAccountChangeRecord.orderId == args['orderId']) if args['memberUsername']: criterion.add( Member.username.in_(args['memberUsername'].split(','))) if args['memberLevelConfig']: criterion.add( Member.levelConfig.in_(args['memberLevelConfig'].split(','))) # if args['memberLevelConfig'] is None: # criterion.add(Member.levelConfig == '') if args['memberParentUsername']: parent = Member.query.filter( Member.username == args['memberParentUsername']).first() if parent: criterion.add(Member.parent == parent.id) query = db.session.query( MemberAccountChangeRecord.id.label('id'), MemberAccountChangeRecord.orderId.label('orderId'), MemberAccountChangeRecord.time.label('time'), MemberAccountChangeRecord.accountChangeType.label( 'accountChangeType'), MemberAccountChangeRecord.info.label('accountChangeTypesName'), MemberAccountChangeRecord.amount.label('amount'), MemberAccountChangeRecord.rechargeid.label('rechargeid'), MemberAccountChangeRecord.memberBalance.label('balanceAfter'), literal(0).label('balanceBefore'), MemberAccountChangeRecord.memberFrozenBalance.label( 'frozenBalanceBefore'), literal('KK').label('eccode'), Member.id.label('memberId'), Member.username.label('username'), Member.type.label('memberType'), MemberLevel.levelName.label('levelName'), MemberLevel.id.label('levelId'), User.username.label('OperatorName')).order_by( MemberAccountChangeRecord.time.desc()) query = query.outerjoin( Member, MemberAccountChangeRecord.memberId == Member.id) query = query.outerjoin(MemberLevel, Member.levelConfig == MemberLevel.id) query = query.outerjoin(User, User.id == MemberAccountChangeRecord.actionUID) result_query = [] if args['accountChangeType']: if '6' in args['accountChangeType']: query_yule = db.session.query( EntertainmentCityBetsDetail.id.label('id'), EntertainmentCityBetsDetail.BillNo.label('orderId'), EntertainmentCityBetsDetail.BetTime.label('time'), literal(6).label('accountChangeType'), EntertainmentCityBetsDetail.Remark.label( 'accountChangeTypesName'), EntertainmentCityBetsDetail.Profit.label('amount'), EntertainmentCityBetsDetail.BillNo.label('rechargeid'), EntertainmentCityBetsDetail.Balance.label('balanceAfter'), literal(0).label('balanceBefore'), EntertainmentCityBetsDetail.CusAccount.label( 'frozenBalanceBefore'), EntertainmentCityBetsDetail.ECCode.label('eccode'), Member.id.label('memberId'), Member.username.label('username'), Member.type.label('memberType'), MemberLevel.levelName.label('levelName'), MemberLevel.id.label('levelId'), literal('').label('OperatorName')).order_by( EntertainmentCityBetsDetail.BetTime.desc()) query_yule = query_yule.outerjoin( Member, EntertainmentCityBetsDetail.PlayerName == Member.username) query_yule = query_yule.outerjoin( MemberLevel, Member.levelConfig == MemberLevel.id) criterion_query = set() if args['id']: criterion_query.add( EntertainmentCityBetsDetail.id.in_( args['id'].split(','))) if args['rechargeid']: criterion_query.add(EntertainmentCityBetsDetail.BillNo == args['rechargeid']) if args['timeLower']: criterion_query.add(EntertainmentCityBetsDetail.BetTime >= args['timeLower']) if args['timeUpper']: criterion_query.add(EntertainmentCityBetsDetail.BetTime <= args['timeUpper'] + SECONDS_PER_DAY) # if args['amountLower']: # criterion_query.add(EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) # if args['amountUpper']: # criterion_query.add(EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) if args['amountUpper'] is not None and args[ 'amountLower'] is not None: if args['amountUpper'] >= args['amountLower']: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountUpper']) criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountLower']) elif args['amountUpper'] is not None: if args['amountUpper'] >= 0: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountUpper']) elif args['amountLower'] is not None: if args['amountLower'] >= 0: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountLower']) if args['orderId']: criterion_query.add( EntertainmentCityBetsDetail.BillNo == args['orderId']) if args['memberUsername']: criterion_query.add( Member.username.in_(args['memberUsername'].split(','))) if args['memberLevelConfig']: criterion_query.add( Member.levelConfig.in_( args['memberLevelConfig'].split(','))) # if args['memberLevelConfig'] is None: # criterion_query.add(Member.levelConfig == '') if args['memberParentUsername']: parent = Member.query.filter( Member.username == args['memberParentUsername']).first() if parent: criterion_query.add(Member.parent == parent.id) query = query.filter(*criterion) query_yule = query_yule.filter(*criterion_query) # union_all 拼接数据 result = union_all(query, query_yule) # 将拼接好的数据重新命名,变成一个新的查询表 user_alias = aliased(result, name='user_alias') user_alias = db.session.query(user_alias).order_by( user_alias.c.time.desc()) pagination = paginate_one(user_alias, args['page'], args['pageSize']) pagination = convert_pagination(pagination) total_amount = 0 for item in pagination.items: total_amount += item['amount'] item[ 'accountChangeTypeName'] = MemberAccountChangeTypes.d.get( item['accountChangeType']) # if item['accountChangeType'] in [100001, 100002]: # item['balanceAfter'] = item['balanceBefore'] + item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] # # if item['accountChangeType'] in [200001]: # item['balanceAfter'] = item['balanceBefore'] - item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] + item['amount'] # # if item['accountChangeType'] in [200002]: # item['balanceAfter'] = item['balanceBefore'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] - item['amount'] # # if item['accountChangeType'] in [200003]: # item['balanceAfter'] = item['balanceBefore'] + item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] - item['amount'] # # if item['accountChangeType'] in [900001]: # item['balanceAfter'] = item['balanceBefore'] + item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] # # if item['accountChangeType'] in [900002]: # item['balanceAfter'] = item['balanceBefore'] - item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] return make_response_from_pagination(pagination, totalAmount=total_amount) pagination = paginate(query, criterion, args['page'], args['pageSize']) pagination = convert_pagination(pagination) total_amount = 0 for item in pagination.items: total_amount += item['amount'] # item['accountChangeTypeName'] = MemberAccountChangeTypes.d.get(item['accountChangeType']) # # if item['accountChangeType'] in [100001, 100002]: # item['balanceAfter'] = item['balanceBefore'] + item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] # # if item['accountChangeType'] in [200001]: # item['balanceAfter'] = item['balanceBefore'] - item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] + item['amount'] # # if item['accountChangeType'] in [200002]: # item['balanceAfter'] = item['balanceBefore'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] - item['amount'] # # if item['accountChangeType'] in [200003]: # item['balanceAfter'] = item['balanceBefore'] + item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] - item['amount'] # # if item['accountChangeType'] in [900001]: # item['balanceAfter'] = item['balanceBefore'] + item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] # # if item['accountChangeType'] in [900002]: # item['balanceAfter'] = item['balanceBefore'] - item['amount'] # item['frozenBalanceAfter'] = item['frozenBalanceBefore'] return make_response_from_pagination(pagination, totalAmount=total_amount)
def get(self): parser = RequestParser(trim=True) parser.add_argument('page', type=int, default=DEFAULT_PAGE) parser.add_argument('pageSize', type=int, default=DEFAULT_PAGE_SIZE) parser.add_argument('id', type=str) parser.add_argument('accountChangeType', type=str) parser.add_argument('isAcdemen', type=int) parser.add_argument('timeLower', type=int) parser.add_argument('timeUpper', type=int) parser.add_argument('amountLower', type=float) parser.add_argument('amountUpper', type=float) parser.add_argument('rechargeid', type=str) parser.add_argument('orderId', type=str) parser.add_argument('memberUsername', type=str) parser.add_argument('memberLevelConfig', type=str) parser.add_argument('memberParentUsername', type=str) args = parser.parse_args(strict=True) args['accountChangeType'] = args['accountChangeType'].split(',') criterion = set() criterion.add(Member.isTsetPLay != 1) if args['id']: criterion.add( MemberAccountChangeRecord.id.in_(args['id'].split(','))) if args['isAcdemen'] is not None: if args['isAcdemen'] == 1: criterion.add( MemberAccountChangeRecord.isAcdemen == args['isAcdemen']) if '6' in args['accountChangeType']: args['accountChangeType'].remove('6') else: criterion.add( or_(MemberAccountChangeRecord.isAcdemen.is_(None), MemberAccountChangeRecord.isAcdemen == 0)) if args['rechargeid']: criterion.add( MemberAccountChangeRecord.rechargeid == args['rechargeid']) if args['accountChangeType']: criterion.add( MemberAccountChangeRecord.accountChangeType.in_( args['accountChangeType'])) if args['timeLower']: criterion.add(MemberAccountChangeRecord.time >= args['timeLower']) if args['timeUpper']: criterion.add(MemberAccountChangeRecord.time <= args['timeUpper'] + SECONDS_PER_DAY) # if args['amountUpper']: # criterion.add(MemberAccountChangeRecord.amount <= args['amountUpper']) # if args['amountLower']: # criterion.add(MemberAccountChangeRecord.amount <= args['amountLower']) if args['amountUpper'] is not None and args['amountLower'] is not None: if args['amountUpper'] >= args['amountLower']: criterion.add( MemberAccountChangeRecord.amount <= args['amountUpper']) criterion.add( MemberAccountChangeRecord.amount >= args['amountLower']) else: criterion.add( MemberAccountChangeRecord.amount >= args['amountUpper']) criterion.add( MemberAccountChangeRecord.amount <= args['amountLower']) elif args['amountUpper'] is not None: if args['amountUpper'] >= 0: criterion.add( MemberAccountChangeRecord.amount <= args['amountUpper']) else: criterion.add( MemberAccountChangeRecord.amount >= args['amountUpper']) elif args['amountLower'] is not None: if args['amountLower'] >= 0: criterion.add( MemberAccountChangeRecord.amount >= args['amountLower']) else: criterion.add( MemberAccountChangeRecord.amount <= args['amountLower']) if args['orderId']: criterion.add(MemberAccountChangeRecord.orderId == args['orderId']) if args['memberUsername']: criterion.add( Member.username.in_(args['memberUsername'].split(','))) if args['memberLevelConfig']: criterion.add( Member.levelConfig.in_(args['memberLevelConfig'].split(','))) # if args['memberLevelConfig'] is None: # criterion.add(Member.levelConfig == '') if args['memberParentUsername']: parent = Member.query.filter( Member.username == args['memberParentUsername']).first() if parent: criterion.add(Member.parent == parent.id) query = db.session.query( MemberAccountChangeRecord.orderId.label('orderId'), Member.username.label('username'), MemberLevel.levelName.label('levelName'), MemberAccountChangeRecord.time.label('time'), MemberAccountChangeRecord.info.label('accountChangeTypesName'), MemberAccountChangeRecord.amount.label('amount'), MemberAccountChangeRecord.memberBalance.label('memberBalance'), User.username.label('OperatorName'), User.username.label('dealName'), ).order_by(MemberAccountChangeRecord.time.desc()) query = query.outerjoin( Member, MemberAccountChangeRecord.memberId == Member.id) query = query.outerjoin(MemberLevel, Member.levelConfig == MemberLevel.id) query = query.outerjoin(User, User.id == MemberAccountChangeRecord.actionUID) result_query = [] if args['accountChangeType']: if '6' in args['accountChangeType']: query_yule = db.session.query( EntertainmentCityBetsDetail.BillNo.label('orderId'), Member.username.label('username'), MemberLevel.levelName.label('levelName'), EntertainmentCityBetsDetail.BetTime.label('time'), func.concat(EntertainmentCityBetsDetail.ECCode, '-派彩').label('accountChangeTypesName'), EntertainmentCityBetsDetail.Profit.label('amount'), EntertainmentCityBetsDetail.Balance.label('memberBalance'), literal('').label('OperatorName'), literal('').label('dealName'), ).order_by(EntertainmentCityBetsDetail.BetTime.desc()) query_yule = query_yule.outerjoin( Member, EntertainmentCityBetsDetail.PlayerName == Member.username) query_yule = query_yule.outerjoin( MemberLevel, Member.levelConfig == MemberLevel.id) criterion_query = set() if args['id']: criterion_query.add( EntertainmentCityBetsDetail.id.in_( args['id'].split(','))) if args['rechargeid']: criterion_query.add(EntertainmentCityBetsDetail.BillNo == args['rechargeid']) if args['timeLower']: criterion_query.add(EntertainmentCityBetsDetail.BetTime >= args['timeLower']) if args['timeUpper']: criterion_query.add(EntertainmentCityBetsDetail.BetTime <= args['timeUpper'] + SECONDS_PER_DAY) # if args['amountLower']: # criterion_query.add(EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) # if args['amountUpper']: # criterion_query.add(EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) if args['amountUpper'] is not None and args[ 'amountLower'] is not None: if args['amountUpper'] >= args['amountLower']: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountUpper']) criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountLower']) elif args['amountUpper'] is not None: if args['amountUpper'] >= 0: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountUpper']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountUpper']) elif args['amountLower'] is not None: if args['amountLower'] >= 0: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount >= args['amountLower']) else: criterion_query.add( EntertainmentCityBetsDetail.ValidBetAmount <= args['amountLower']) if args['orderId']: criterion_query.add( EntertainmentCityBetsDetail.BillNo == args['orderId']) if args['memberUsername']: criterion_query.add( Member.username.in_(args['memberUsername'].split(','))) if args['memberLevelConfig']: criterion_query.add( Member.levelConfig.in_( args['memberLevelConfig'].split(','))) # if args['memberLevelConfig'] is None: # criterion_query.add(Member.levelConfig == '') if args['memberParentUsername']: parent = Member.query.filter( Member.username == args['memberParentUsername']).first() if parent: criterion_query.add(Member.parent == parent.id) query = query.filter(*criterion) query_yule = query_yule.filter(*criterion_query) # union_all 拼接数据 result = union_all(query, query_yule) # 将拼接好的数据重新命名,变成一个新的查询表 user_alias = aliased(result, name='user_alias') user_alias = db.session.query(user_alias).order_by( user_alias.c.time.desc()).all() results = [] for item in user_alias: results.append( (item[0], item[1], item[2], changeData_str(item[3]), item[4], item[5], item[6], item[7], item[8])) title = [ '单号', '帐号', '会员等级', '操作时间', '信息', '金额', '小计', '操作人员', '处理人员', '其他' ] workbook = Workbook() worksheet = workbook.active worksheet.append(title) worksheet.column_dimensions['A'].width = 20 worksheet.column_dimensions['B'].width = 10 worksheet.column_dimensions['C'].width = 18 worksheet.column_dimensions['D'].width = 20 worksheet.column_dimensions['E'].width = 15 worksheet.column_dimensions['F'].width = 8 worksheet.column_dimensions['G'].width = 8 worksheet.column_dimensions['H'].width = 8 worksheet.column_dimensions['I'].width = 8 worksheet.column_dimensions['J'].width = 8 for result in results: worksheet.append(result) filename = '交易记录-' + str(int(time.time())) + '.xlsx' workbook.save(os.path.join(current_app.static_folder, filename)) return make_response([{ 'success': True, 'resultFilename': filename, }]) query = query.filter(*criterion).all() results = [] for item in query: results.append((item[0], item[1], item[2], changeData_str(item[3]), item[4], item[5], item[6], item[7], item[8])) title = [ '单号', '帐号', '会员等级', '操作时间', '信息', '金额', '小计', '操作人员', '处理人员', '其他' ] workbook = Workbook() worksheet = workbook.active worksheet.append(title) worksheet.column_dimensions['A'].width = 20 worksheet.column_dimensions['B'].width = 10 worksheet.column_dimensions['C'].width = 18 worksheet.column_dimensions['D'].width = 20 worksheet.column_dimensions['E'].width = 15 worksheet.column_dimensions['F'].width = 8 worksheet.column_dimensions['G'].width = 8 worksheet.column_dimensions['H'].width = 8 worksheet.column_dimensions['I'].width = 8 worksheet.column_dimensions['J'].width = 8 for result in results: worksheet.append(result) filename = '交易记录-' + str(int(time.time())) + '.xlsx' workbook.save(os.path.join(current_app.static_folder, filename)) return make_response([{ 'success': True, 'resultFilename': filename, }])
def limit_query_to_range(query: Query, num_before: int, num_after: int, anchor: int, anchored_to_left: bool, anchored_to_right: bool, id_col: ColumnElement, first_visible_message_id: int) -> Query: ''' This code is actually generic enough that we could move it to a library, but our only caller for now is message search. ''' need_before_query = (not anchored_to_left) and (num_before > 0) need_after_query = (not anchored_to_right) and (num_after > 0) need_both_sides = need_before_query and need_after_query # The semantics of our flags are as follows: # # num_after = number of rows < anchor # num_after = number of rows > anchor # # But we also want the row where id == anchor (if it exists), # and we don't want to union up to 3 queries. So in some cases # we do things like `after_limit = num_after + 1` to grab the # anchor row in the "after" query. # # Note that in some cases, if the anchor row isn't found, we # actually may fetch an extra row at one of the extremes. if need_both_sides: before_anchor = anchor - 1 after_anchor = max(anchor, first_visible_message_id) before_limit = num_before after_limit = num_after + 1 elif need_before_query: before_anchor = anchor before_limit = num_before if not anchored_to_right: before_limit += 1 elif need_after_query: after_anchor = max(anchor, first_visible_message_id) after_limit = num_after + 1 if need_before_query: before_query = query if not anchored_to_right: before_query = before_query.where(id_col <= before_anchor) before_query = before_query.order_by(id_col.desc()) before_query = before_query.limit(before_limit) if need_after_query: after_query = query if not anchored_to_left: after_query = after_query.where(id_col >= after_anchor) after_query = after_query.order_by(id_col.asc()) after_query = after_query.limit(after_limit) if need_both_sides: query = union_all(before_query.self_group(), after_query.self_group()) elif need_before_query: query = before_query elif need_after_query: query = after_query else: # If we don't have either a before_query or after_query, it's because # some combination of num_before/num_after/anchor are zero or # use_first_unread_anchor logic found no unread messages. # # The most likely reason is somebody is doing an id search, so searching # for something like `message_id = 42` is exactly what we want. In other # cases, which could possibly be buggy API clients, at least we will # return at most one row here. query = query.where(id_col == anchor) return query
cn.execute(select([func.count(students.c.id)])).fetchall() # The max() function is implemented by following usage of func from SQLAlchemy which will result in 85, the total maximum marks obtained − cn.execute(select([func.max(students.c.id)])).fetchall() #find min value cn.execute(select([func.min(students.c.id)])).fetchall() # find Average cn.execute(select([func.avg(students.c.id)])).fetchall() # =========================SQLAlchemy Core - Using Set Operations========================== '''union()''': from sqlalchemy.sql import union,union_all,except_,intersect u1 = union(addresses.select().where(addresses.c.email_add.like('*****@*****.**')), addresses.select().where(addresses.c.email_add.like('*****@*****.**'))) conn.execute(u1).fetchone() ua = union_all(addresses.select().where(addresses.c.email_add.like('*****@*****.**')), addresses.select().where(addresses.c.email_add.like('*****@*****.**'))) conn.execute(ua).fetchall() ue = except_(addresses.select().where(addresses.c.email_add.like('*****@*****.**')), addresses.select().where(addresses.c.postal_add.like('%Pune'))) result = conn.execute(ue) ints = intersect(addresses.select().where(addresses.c.email_add.like('*****@*****.**')), addresses.select().where(addresses.c.postal_add.like('%Pune'))) result = conn.execute(ints) #sum of febbonacy a ,b = 0,1 max_value = 4000000 fib_value = [] sum_value = 0 for i in range(100000): if b !=1 and sum(fib_value) < max_value :