def load_admin(): search_text = request.args.get('search_text', '') page_size = int(request.args.get('page_size', '10')) page_number = int(request.args.get('page_number', '1')) sort_name = request.args.get('sort_name', 'number') sort_order = request.args.get('sort_order', 'asc') if search_text == '': if sort_order == 'asc': pagination = Admin.query.order_by(db.asc(Admin.id)).paginate( page_number, page_size, False) else: pagination = Admin.query.order_by(db.desc(Admin.id)).paginate( page_number, page_size, False) else: if sort_order == 'asc': pagination = Admin.query.order_by(db.asc(Admin.id)).paginate( page_number, page_size, False) else: pagination = Admin.query.order_by(db.desc(Admin.id)).paginate( page_number, page_size, False) admins = [] for item in pagination.items: admins.append({ 'id': item.id, 'user_name': item.user_name, 'create_time': item.create_time.strftime("%Y-%m-%d") if item.create_time else None }) return json.dumps({"total": pagination.total, "rows": admins})
def load_student(): search_text = request.args.get('search_text', '') page_size = int(request.args.get('page_size', '10')) page_number = int(request.args.get('page_number', '1')) sort_name = request.args.get('sort_name', 'number') sort_order = request.args.get('sort_order', 'asc') if search_text == '': if sort_order == 'asc': pagination = Student.query.order_by(db.asc( Student.number)).paginate(page_number, page_size, False) else: pagination = Student.query.order_by(db.desc( Student.number)).paginate(page_number, page_size, False) else: if sort_order == 'asc': pagination = Student.query.order_by(db.asc( Student.number)).paginate(page_number, page_size, False) else: pagination = Student.query.order_by(db.desc( Student.number)).paginate(page_number, page_size, False) students = [] for item in pagination.items: students.append({ 'number': item.number, 'name': item.name, 'signature': item.signature, 'age': item.age, 'sex': item.sex, 'entrance_year': item.entrance_year, 'classes_id': item.classes_id, 'class': item.classes.name, 'college': item.college.name, 'college_id': item.college_id, 'birth_day': item.birth_day.strftime("%Y-%m-%d") if item.birth_day else None, 'head_url': item.head_url, 'telephone': item.telephone, 'mail': item.mail, }) return json.dumps({"total": pagination.total, "rows": students})
def get_active_posts(mode=None): orders = { 'recent': (db.desc(Post.time), ), 'popular': (db.desc('commentCount'), db.desc(Post.time)), 'best': (db.desc('likeCount'), db.desc(Post.time)), 'early': (db.asc(Post.time), ) } order = orders.get(mode, orders['recent']) likes = db.select([db.func.count(Vote.id)]).where( db.and_(Vote.value == 1, Vote.post_id == Post.id)).as_scalar().label('likeCount') dislikes = db.select([db.func.count(Vote.id)]).where( db.and_(Vote.value == -1, Vote.post_id == Post.id)).as_scalar().label('dislikeCount') comments = db.select([ db.func.count(Comment.id) ]).where(Comment.post_id == Post.id).as_scalar().label('commentCount') return filter_by_active_posts( Post.query.with_entities(Post, likes, dislikes, comments)).order_by(*order)
def treeList(): user_id = session.get('user_id') id = request.values.get("id") listData = Tree.query.filter(db.and_(Tree.project_id == id)).order_by( db.asc(Tree.index_id)).all() def getChild(pid): result = [] for obj in listData: if obj.pid == pid: result.append({ "id": obj.id, "name": obj.name, "index_id": obj.index_id, "noteType": obj.type, "children": getChild(obj.id), }) return result content = getChild(0) return make_response( jsonify({ 'code': 0, 'msg': 'sucess', 'content': content }))
def getCustomKeywords(taskId): rowData = Task.query.filter_by(id=taskId).first() caseIds = json.loads(rowData.case_id) projectData = Tree.query.filter_by(id=caseIds[0]).first() projectId = projectData.project_id keywordRootId = projectData.pid keywordRows = Tree.query.filter( db.and_(Tree.project_id == projectId, Tree.type == 4)).all() keywordRootConfig = CaseProjectSetting.query.filter_by( pid=keywordRootId).first() keywordRootlibs = [] if keywordRootConfig: keywordRootLibDatas = CaseLibs.query.filter( db.and_(CaseLibs.id.in_(json.loads( keywordRootConfig.libs)))).all() for lib in keywordRootLibDatas: keywordRootlibs.append(lib.name) keywordDatas = [] for keyword in keywordRows: keywordId = keyword.id keyInfo = CaseInfo.query.filter_by(pid=keywordId).first() caseSteps = [] caseStepDatas = CaseStep.query.filter_by(case_id=keywordId).order_by( db.asc(CaseStep.indexId)).all() for caseStep in caseStepDatas: caseSteps.append({'values': json.loads(caseStep.values)}) keywordDatas.append({ 'name': keyInfo.name, 'Arguments': json.loads(keyInfo.params), 'returns': json.loads(keyInfo.return_values), 'caseSteps': caseSteps, }) return keywordDatas, keywordRootlibs
def refresh_account_with_longest_time_since_refresh(): acc = (Account.query.join(Account.tokens).group_by(Account) .filter(Account.backoff_until < db.func.now()) .filter(~Account.dormant).order_by(db.asc( Account.last_refresh)).first()) if acc: refresh_account(acc.id)
def refresh_account(account_id): account = Account.query.get(account_id) print("Refreshing account {}".format(account)) try: limit = 100 if account.service == 'mastodon': limit = 3 posts = (Post.query.with_parent(account, 'posts').order_by( db.asc(Post.updated_at)).limit(limit).all()) posts = refresh_posts(posts) account.touch_refresh() account.reset_backoff() except TemporaryError: db.session.rollback() account.backoff() except PermanentError: db.session.rollback() make_dormant(account) if sentry: sentry.captureException() except Exception as e: db.session.rollback() account.backoff() db.session.commit() raise e finally: db.session.commit()
def get(self, assembly_id): args = self.reqparse.parse_args() contigs = user_assembly_or_404(assembly_id).contigs # Column loading fields = args.fields.split(',') if args.coverages: fields.append('coverage') if args.pca: fields.extend(['pc_1', 'pc_2', 'pc_3']) contigs = contigs.options(db.load_only(*fields)) # Sort if args.sort: order = db.desc(args.sort[1:]) if args.sort[0] == '-' else db.asc(args.sort) contigs = contigs.order_by(order) # Filters if args.length: for value in args.length: filter = filter_contigs(Contig.length, value) contigs = contigs.filter(filter) if args.gc: for value in args.gc: filter = filter_contigs(Contig.gc, value) contigs = contigs.filter(filter) if args.bins: contigs = contigs.options(db.joinedload('bins')) bin_ids = args.bins.split(',') contigs = contigs.join((Bin, Contig.bins)).filter(Bin.id.in_(bin_ids)) # Load in pagination contig_pagination = contigs.paginate(args.index, args._items, False) result = [] for i, contig in enumerate(contig_pagination.items): r = {} if args.fields: for field in fields: if field != 'coverage': r[field] = getattr(contig, field) if args.coverages: r.update(contig.coverages) if args.pca: r['pc_1'], r['pc_2'], r['pc_3'] = contig.pc_1, contig.pc_2, contig.pc_3 if args.colors: for bin in contig.bins: r['color_{}'.format(bin.bin_set_id)] = bin.color if args.bins: r['bin'] = [b.id for b in contig.bins if str(b.id) in bin_ids][0] result.append(r) return { 'contigs': result if args.contigs else [], 'indices': contig_pagination.pages, 'index': args.index, 'count': contigs.count(), 'items': args._items }
def active_tags(self): """ JOINS: https://habr.com/ru/post/230643/ """ return filter_by_active_posts(Tag.query.with_entities(Tag, db.func.count('*').label('cnt')).join(Post.tags)) \ .distinct() \ .group_by(Tag.id) \ .order_by(db.desc('cnt'), db.asc(Tag.name))
def updateStepIndex(caseId): steps = CaseStep.query.filter_by(case_id=caseId).order_by(db.asc(CaseStep.indexId)).all() if len(steps) > 0: for index,row in enumerate(steps): data = { 'indexId': index+1, } rowData = CaseStep.query.filter_by(id=row.id) rowData.update(data) db.session.commit()
def queue_fetch_for_most_stale_accounts( min_staleness=timedelta(minutes=2), limit=20): accs = (Account.query.join(Account.tokens).group_by(Account).filter( Account.last_fetch < db.func.now() - min_staleness).filter( Account.backoff_until < db.func.now()).filter( ~Account.dormant).order_by(db.asc( Account.last_fetch)).limit(limit)) for acc in accs: fetch_acc.s(acc.id).delay() db.session.commit()
def refresh_account_with_oldest_post(): then = time() post = (Post.query.outerjoin(Post.author).join(Account.tokens) .filter(Account.backoff_until < db.func.now()) .filter(~Account.dormant).group_by(Post).order_by( db.asc(Post.updated_at)).first()) if post: aid = post.author_id refresh_account(aid) now = time() logging.info('Refreshed posts for {} for having oldest post in {}s'.format(aid, now-then))
def export_scores_csv(start_date, end_date): scores = Scores.query.filter(Scores.date.between(start_date, end_date)).order_by(db.asc(Scores.date)).all() response = Response('{csv_head}\n{csv_body}'.format(csv_head=csv_head(), csv_body=''.join(map(score_to_csv, scores)))) response.headers['Content-Type'] = 'text/csv' response.headers['Content-Disposition'] = 'inline; filename=scores-{start}-to-{end}.csv'.format(start=start_date, end=end_date) return response
def user_activities_grouped_by_date(self, nsfw=True): activities = self.activities.order_by(db.asc(Activity.timestamp)).all() grouped = defaultdict(dict) for t in ActivityType.query.all(): if (not t.nsfw) or (nsfw): dates = defaultdict(int) for a in activities: if a.activitytype.id == t.id: dates[a.timestamp.date()] += 1 grouped[t] = dates return grouped
def projectGlobalValues(): id = request.values.get("id") globalValuesData = GlobalValues.query.filter_by(project_id=id).order_by(db.asc(GlobalValues.add_time)).all() content = [] if globalValuesData: for item in globalValuesData: content.append({ "id": item.id, "key": item.key_name, "value": item.key_value, "valueType": item.value_type, }) return make_response(jsonify({'code': 0, 'msg': '', 'content': content}))
class Log(db.Model): """ Model used for logging to database. """ __tablename__ = 'logs' id = db.Column(db.Integer, primary_key=True) # auto incrementing logger = db.Column( db.String(64)) # the name of the logger. (e.g. myapp.views) level = db.Column(db.String(16), index=True) # info, debug, or error? trace = db.Column(db.Text) # the full traceback printout msg = db.Column(db.Text) # any custom log you may have included extra = db.Column(db.Text) # Any extra data given created_at = db.Column(db.DateTime, default=datetime.utcnow) # the current timestamp resolved_by_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True, index=True) resolved_at = db.Column(db.DateTime, nullable=True) # Set default order by __mapper_args__ = {"order_by": [db.asc(created_at)]} def __init__(self, logger=None, level=None, trace=None, msg=None, extra=None): self.logger = logger self.level = level self.trace = trace self.msg = msg self.extra = extra def __unicode__(self): return self.__repr__() def __repr__(self): return "<Log: %s - %s>" % ( self.created_at.strftime('%m/%d/%Y-%H:%M:%S'), self.msg[:50]) def resolve(self, user_id): """ Mark this log as resolved / at least acknowledge it's been seen. """ if not self.resolved: self.resolved_by_user_id = user_id self.resolved_at = datetime.utcnow() @property def resolved(self): """ Returns whether this log has been resolved or not. """ return self.resolved_by_user_id is not None
def load_system_notification(): search_text = request.args.get('search_text', '') page_size = int(request.args.get('page_size', '10')) page_number = int(request.args.get('page_number', '1')) sort_name = request.args.get('sort_name', 'number') sort_order = request.args.get('sort_order', 'asc') if search_text == '': if sort_order == 'asc': pagination = Notification.query.order_by( db.asc(Notification.create_time)).paginate( page_number, page_size, False) else: pagination = Notification.query.order_by( db.desc(Notification.create_time)).paginate( page_number, page_size, False) else: if sort_order == 'asc': pagination = Notification.query.order_by( db.asc(Notification.create_time)).paginate( page_number, page_size, False) else: pagination = Notification.query.order_by( db.desc(Notification.create_time)).paginate( page_number, page_size, False) notifications = [] for item in pagination.items: notifications.append({ 'id': item.id, 'title': item.title, 'content': item.content, 'create_time': item.create_time.strftime("%Y-%m-%d") if item.create_time else None }) return json.dumps({"total": pagination.total, "rows": notifications})
def userList(): userDatas = users.query.filter_by(status = 1).order_by(db.asc(users.add_time)).all() content = [] if userDatas: for item in userDatas: content.append({ "id": item.id, "name": item.username, "email": item.email, "account_type": item.account_type, "status": item.status, "add_time": item.add_time.strftime('%Y-%m-%d %H:%M:%S'), }) return make_response(jsonify({'code': 0, 'msg': '', 'content': content}))
def index(page=None): tags = Tag.query.all() tid = int(request.args.get('tid', 0)) star = int(request.args.get('star', 0)) time = int(request.args.get('time', 0)) play_num = int(request.args.get('pn', 0)) comment_num = int(request.args.get('cn', 0)) page_data = Movie.query if tid != 0: page_data = page_data.filter_by(tag_id=tid) if star != 0: page_data = page_data.filter_by(star=star) if time != 0: if time == 1: page_data = page_data.order_by(db.desc(Movie.release_time)) else: page_data = page_data.order_by(db.asc(Movie.release_time)) if play_num != 0: if play_num == 1: page_data = page_data.order_by(db.desc(Movie.playnum)) else: page_data = page_data.order_by(db.asc(Movie.playnum)) if comment_num != 0: if comment_num == 1: page_data = page_data.order_by(db.desc(Movie.commentnum)) else: page_data = page_data.order_by(db.asc(Movie.commentnum)) if page is None: page = 1 page_data = page_data.paginate(page=page, per_page=10) p = dict(tid=tid, star=star, time=time, pn=play_num, cn=comment_num) return render_template("home/index.html", tags=tags, p=p, page_data=page_data)
class TF2Item(db.Model): __tablename__ = "tf2_schema" defindex = db.Column(db.Integer, primary_key=True, autoincrement=False) item_name = db.Column(db.String(256, collation="utf8_swedish_ci")) proper_name = db.Column(db.Boolean) item_slot = db.Column(db.String(64)) image_url = db.Column(db.String(256)) image_url_large = db.Column(db.String(256)) image_inventory = db.Column(db.String(256)) inactive = db.Column(db.Boolean, default=False) # Relationships equip_regions = db.relationship('TF2EquipRegion', secondary=schema_equipregion, backref=db.backref('tf2_item', lazy="dynamic"), lazy="subquery") bodygroups = db.relationship('TF2BodyGroup', secondary=schema_bodygroup, backref=db.backref('tf2_item', lazy="dynamic"), lazy="subquery") class_model = db.relationship('TF2ClassModel', backref=db.backref('tf2_item'), collection_class=attribute_mapped_collection('class_name'), lazy="subquery", cascade='all') __mapper_args__ = { "order_by": [db.asc(defindex)] } def __repr__(self): return u"{} (defindex: {})".format(self.item_name, self.defindex) def __init__(self, defindex=None, item_name=None, proper_name=None, item_slot=None, image_url=None, image_url_large=None, image_inventory=None, class_model=None, _equip_regions=None, _bodygroups=None): self.defindex = defindex self.item_name = item_name self.proper_name = proper_name self.item_slot = item_slot self.image_url = image_url self.image_url_large = image_url_large self.image_inventory = image_inventory if class_model: for class_name, model in class_model.items(): self.class_model[class_name] = get_or_create(db.session, TF2ClassModel, defindex=defindex, class_name=class_name, model_path=model) if _equip_regions: for equip_region in _equip_regions: self.equip_regions.append(get_or_create(db.session, TF2EquipRegion, equip_region=equip_region)) if _bodygroups: for bodygroup in _bodygroups: self.bodygroups.append(get_or_create(db.session, TF2BodyGroup, bodygroup=bodygroup))
class ModAuthor(db.Model): __tablename__ = "mod_author" mod_id = db.Column(db.Integer, db.ForeignKey('mods.id'), primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('users.account_id'), primary_key=True) order = db.Column(db.Integer, default=0, nullable=False) user = db.relationship("User", backref="author") mod = db.relationship("Mod", backref="author") __mapper_args__ = {"order_by": [db.asc(order)]} def __repr__(self): return u"{user.name} is an author of mod {mod.pretty_name}".format( user=self.user, mod=self.mod)
def getLibKeywords(): id = request.values.get("id") results = CaseKeywords.query.filter(db.and_(CaseKeywords.lib_id == id)).order_by(db.asc(CaseKeywords.add_time)).all() content = [] for keyword in results: content.append({ 'id': keyword.id, 'name_en': keyword.name_en, 'name_zh': keyword.name_zh, 'shortdoc': keyword.shortdoc, 'doc': keyword.doc, 'args': keyword.args, 'add_time': keyword.add_time.strftime('%Y-%m-%d %H:%M:%S'), }) return make_response(jsonify({'code': 0, 'content': content, 'msg': u''}))
def to_object(self): """ Get it as an object """ photos = db.session.query(Photo)\ .filter(Photo.gallery_id == self.id)\ .order_by(db.asc(Photo.created), db.desc(Photo.name)) gallery = { "id": self.id, "name": self.name, "folder": self.folder, "share_code": self.share_code, "modified": self.modified.strftime('%Y-%m-%d %H:%M:%S'), "created": self.created.strftime('%Y-%m-%d %H:%M:%S'), "photos": [photo.to_object() for photo in photos] } return gallery
def proxyConfigList(): proxyConfigData = ProxyConfig.query.order_by(db.asc(ProxyConfig.add_time)).all() content = [] if proxyConfigData: for item in proxyConfigData: row_data = users.query.filter(db.and_(users.id == item.user_id)).first() username = "" if row_data: username = row_data.username content.append({ "id": item.id, "name": item.name, "path": item.path, "add_user": username, "add_time": item.add_time.strftime('%Y-%m-%d %H:%M:%S'), "Description": "", "browserType": item.browser_type, }) return make_response(jsonify({'code': 0, 'msg': '', 'content': content}))
def projectCaseList(): user_id = session.get('user_id') id = request.json.get("id") listData = Tree.query.filter(db.and_(Tree.project_id == id, Tree.type == 2)).order_by( db.asc(Tree.index_id)).all() content = [] for case in listData: sampleData = Sample.query.filter_by(pid=case.id).first() if sampleData: content.append({ "key": case.id, "name": case.name, }) return make_response( jsonify({ 'code': 0, 'msg': 'sucess', 'content': content }))
def get(self, contigset_id): args = self.reqparse.parse_args() contigs = user_contigset_or_404(contigset_id).contigs if args.fields: fields = args.fields.split(',') contigs = contigs.options(db.load_only(*fields)) if args.sort: order = db.desc(args.sort[1:]) if args.sort[0] == '-' else db.asc(args.sort) contigs = contigs.order_by(order) if args.length: length = args.length.rstrip('-').rstrip('+') if not length.isnumeric(): return length = int(length) if args.length.endswith('-'): filter = Contig.length < length elif args.length.endswith('+'): filter = Contig.length > length else: filter = Contig.length == length contigs = contigs.filter(filter) if args.bins: bin_ids = args.bins.split(',') contigs = contigs.join((Bin, Contig.bins)).filter(Bin.id.in_(bin_ids)) if args.coverages: contigs = contigs.options(db.joinedload('coverages')) contig_pagination = contigs.paginate(args.index, args._items, False) result = [] for contig in contig_pagination.items: r = {} if args.fields: for field in fields: r[field] = getattr(contig, field) if args.coverages: for cov in contig.coverages: r[cov.name] = cov.value result.append(r) return {'contigs': result, 'indices': contig_pagination.pages, 'index': args.index, 'count': contigs.count(), 'items': args._items}
class User(db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(50), nullable=False) email = db.Column(db.String(64), unique=False, nullable=True) enabled = db.Column(db.Boolean, default=True, nullable=False) first_seen = db.Column(db.DateTime, default=datetime.datetime.utcnow, index=True) last_seen = db.Column(db.DateTime, default=datetime.datetime.utcnow) admin = db.Column(db.Boolean, default=False) show_ads = db.Column(db.Boolean, default=True) replay_ratings = db.relationship('ReplayRating', backref='user', lazy='dynamic', cascade="all, delete-orphan") favourites = db.relationship('ReplayFavourite', backref='user', lazy='dynamic', cascade="all, delete-orphan") downloads = db.relationship('ReplayDownload', backref="user", lazy="dynamic", cascade="all") subscriptions = db.relationship('Subscription', backref="user", lazy="dynamic", cascade="all") searches = db.relationship('Search', backref="user", lazy="dynamic", cascade="all") logs_resolved = db.relationship('Log', backref='resolved_by_user', lazy='dynamic', cascade='all') replay_aliases = db.relationship('ReplayAlias', backref='user', lazy='dynamic', cascade="all") replay_players = db.relationship('ReplayPlayer', backref='user', lazy='dynamic', order_by='ReplayPlayer.replay_id') # Set default order by __mapper_args__ = {"order_by": [db.asc(first_seen)]} ACCOUNT_ID_TO_STEAM_ID_CORRECTION = 76561197960265728 def __init__(self, _id=None, name=None, enabled=True): self.id = _id self.name = name self.enabled = enabled def __repr__(self): return self.name def get_id(self): return unicode(self.id) def is_active(self): return self.enabled def is_anonymous(self): return False def is_authenticated(self): return True def is_admin(self): return self.admin def update_last_seen(self): # Called every page load for current_user self.last_seen = datetime.datetime.utcnow() db.session.add(self) db.session.commit() def update_steam_name(self): """ Update user's name from their name on Steam.""" try: steam_account_info = steam.user.profile(self.steam_id) if steam_account_info is not None: if self.name != steam_account_info.persona: self.name = steam_account_info.persona db.session.add(self) db.session.commit() except steam.api.SteamError: pass def allows_ads(self): return self.show_ads def get_language(self): # TODO: When we do localization return "english" @property def is_premium(self): subscription = self.subscriptions.filter( Subscription.expires_at >= datetime.datetime.utcnow()).first() return bool(subscription) @property def steam_id(self): return self.id + User.ACCOUNT_ID_TO_STEAM_ID_CORRECTION
class User(db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(50), nullable=False) email = db.Column(db.String(64), unique=False, nullable=True) enabled = db.Column(db.Boolean, default=True, nullable=False) first_seen = db.Column(db.DateTime, default=datetime.utcnow) last_seen = db.Column(db.DateTime, default=datetime.utcnow) admin = db.Column(db.Boolean, default=False) show_ads = db.Column(db.Boolean, default=True) logs_resolved = db.relationship('Log', backref='resolved_by_user', lazy='dynamic', cascade='all') challenges = db.relationship('Challenge', backref='user', lazy='dynamic', cascade="all") # Set default order by __mapper_args__ = {"order_by": [db.asc(first_seen)]} ACCOUNT_ID_TO_STEAM_ID_CORRECTION = 76561197960265728 def __init__(self, _id=None, name=None, enabled=True): self.id = _id self.name = name self.enabled = enabled def __repr__(self): return self.name def get_id(self): return unicode(self.id) def is_active(self): return self.enabled def is_anonymous(self): return False def is_authenticated(self): return True def is_admin(self): return self.admin def update_last_seen(self): # Called every page load for current_user self.last_seen = datetime.utcnow() db.session.add(self) db.session.commit() def update_steam_name(self): # Called every page load for current_user (API is cached) steam_account_info = steam.user.profile(self.steam_id) try: if steam_account_info is not None: if self.name is not steam_account_info.persona: self.name = steam_account_info.persona db.session.add(self) db.session.commit() except steam.api.HTTPError: pass def allows_ads(self): return self.show_ads @property def steam_id(self): return self.id + User.ACCOUNT_ID_TO_STEAM_ID_CORRECTION def get_active_challenge(self): return self.challenges.filter( Challenge.start_at <= datetime.utcnow(), Challenge.end_at >= datetime.utcnow()).first()
class User(db.Model): __tablename__ = "users" account_id = db.Column(db.Integer, primary_key=True, autoincrement=False) name = db.Column(db.String(256, collation="utf8_swedish_ci"), default=account_id) profile_url = db.Column(db.String(128)) avatar_small = db.Column(db.String(128)) avatar_medium = db.Column(db.String(128)) avatar_large = db.Column(db.String(128)) joined = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False) last_seen = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False) next_steam_check = db.Column(db.DateTime, default=datetime.datetime.utcnow() + datetime.timedelta(hours=4)) user_class = db.Column(db.Integer, default=0) upload_credits = db.Column(db.Integer, default=0) signed_in = db.Column(db.Boolean, default=True) enabled = db.Column(db.Boolean, default=True) __mapper_args__ = { "order_by": [db.asc(joined)] } def __init__(self, account_id=None, signed_in=None, last_seen=None): self.account_id = account_id self.signed_in = signed_in self.last_seen = last_seen self.fetch_steam_info() def __repr__(self): return self.name def get_id(self): return unicode(self.account_id) def is_active(self): return self.enabled @staticmethod def is_anonymous(): return False @staticmethod def is_authenticated(): return True def is_admin(self): return True if self.user_class > 1 else False def is_uploader(self): return True if self.upload_credits > 0 or self.upload_credits == -1 or self.is_admin() else False def update_last_seen(self): now = datetime.datetime.utcnow() if not self.next_steam_check: self.next_steam_check = datetime.datetime.utcnow() self.last_seen = now if self.next_steam_check < now: self.fetch_steam_info() db.session.add(self) db.session.commit() def fetch_steam_info(self): steam_info = steam.user.profile(self.steam_id) self.update_steam_info(steam_info) def update_steam_info(self, steam_info): try: self.name = steam_info.persona self.profile_url = steam_info.profile_url self.avatar_small = steam_info.avatar_small self.avatar_medium = steam_info.avatar_medium self.avatar_large = steam_info.avatar_large self.next_steam_check = datetime.datetime.utcnow() + datetime.timedelta(hours=4) except (HTTPError, HTTPTimeoutError): self.next_steam_check = datetime.datetime.utcnow() + datetime.timedelta(minutes=30) @property def steam_id(self): return self.account_id + 76561197960265728 @property def perma_profile_url(self): return "http://steamcommunity.com/profiles/{}".format(self.steam_id)
def activities_ordered_by_first(self): return self.activities.order_by(db.asc(Activity.timestamp)).all()
def getTaskInfo(taskId): rowData = Task.query.filter_by(id=taskId).first() caseIds = json.loads(rowData.case_id) valueType = rowData.value_type versionId = rowData.version_id projectId = Tree.query.filter_by(id=caseIds[0]).first().project_id projectRootData = Tree.query.filter( db.and_(Tree.project_id == projectId, Tree.pid == 0)).first() projectConfig = CaseProjectSetting.query.filter_by( pid=projectRootData.id).first() libs = [] if projectConfig and projectConfig.libs: libDatas = CaseLibs.query.filter( db.and_(CaseLibs.id.in_(json.loads(projectConfig.libs)))).all() for lib in libDatas: libs.append(lib.name) # 查出所有勾选用例的数据 caseDatas = [] for caseId in caseIds: caseInfo = Tree.query.filter_by(id=caseId).first() caseDetailData = CaseInfo.query.filter_by(pid=caseId).first() caseSteps = [] if versionId: caseStepDatas = CaseStep.query.filter( db.and_( CaseStep.case_id == caseId, CaseStep.delete_flag == 0, db.or_(CaseStep.version_id == versionId, CaseStep.version_id == None)), ).order_by( db.asc(CaseStep.indexId)).all() else: caseStepDatas = CaseStep.query.filter( db.and_(CaseStep.case_id == caseId, CaseStep.version_id == None)).order_by( db.asc(CaseStep.indexId)).all() for caseStep in caseStepDatas: caseSteps.append({ 'values': json.loads(caseStep.values), 'id': caseStep.id, }) deleteStepIds = CaseStep.query.filter( db.and_(CaseStep.delete_flag == 1, CaseStep.version_id == versionId)).all() if deleteStepIds: for deleteStep in deleteStepIds: for caseStep in caseSteps: if deleteStep.pid == caseStep['id']: caseSteps.remove(caseStep) releaseIsoStepIds = CaseStep.query.filter( db.and_(CaseStep.delete_flag == 0, CaseStep.version_id == versionId)).all() if releaseIsoStepIds: for releaseIsoStep in releaseIsoStepIds: for caseStep in caseSteps: if releaseIsoStep.pid == caseStep['id']: caseSteps.remove(caseStep) setUpData = caseDetailData.set_up if caseDetailData.set_up else '[]' tearDownData = caseDetailData.tear_down if caseDetailData.tear_down else '[]' caseDatas.append({ 'case_name': caseInfo.name, 'caseId': caseId, 'suiteId': caseInfo.pid, 'case_steps': caseSteps, 'setUp': json.loads(setUpData), 'tearDown': json.loads(tearDownData), }) test_suites = [] testSuiteIds = Tree.query.filter(db.and_( Tree.id.in_(caseIds))).with_entities(Tree.pid).distinct().all()[0] for suiteId in testSuiteIds: suiteData = Tree.query.filter_by(id=suiteId).first() test_cases = [] for caseData in caseDatas: if caseData['suiteId'] == suiteId: test_cases.append(caseData) suiteConfig = CaseProjectSetting.query.filter_by(pid=suiteId).first() suitelibs = [] defaultLibs = CaseLibs.query.filter_by(lib_type=1).all() for lib in defaultLibs: suitelibs.append(lib.name) if suiteConfig: suiteLibDatas = CaseLibs.query.filter( db.and_(CaseLibs.id.in_(json.loads(suiteConfig.libs)))).all() for lib in suiteLibDatas: suitelibs.append(lib.name) test_suites.append({ 'name': suiteData.name, 'suiteId': suiteId, 'libs': suitelibs, 'test_cases': test_cases, }) globalValuesData = GlobalValues.query.filter( db.and_(GlobalValues.project_id == projectId, GlobalValues.value_type == valueType)).all() globalValues = [] if globalValuesData: for valueData in globalValuesData: globalValues.append({ 'name': valueData.key_name, 'value': valueData.key_value, }) # 全局文件参数 globalFilesData = ProjectFile.query.filter( db.and_(ProjectFile.pid == projectId)).all() if globalFilesData: for valueData in globalFilesData: if 'win32' in sys.platform: appRootPath = app.root_path.replace('\\', '\\\\') fileData = appRootPath + '\\\\' + valueData.key_value else: fileData = app.root_path + '/' + valueData.key_value globalValues.append({ 'name': valueData.key_name, 'value': fileData, }) taskInfo = { 'project_name': projectRootData.name, 'libs': libs, 'test_suites': test_suites, 'globalValues': globalValues, } return taskInfo
def fetch_acc(id_): account = Account.query.get(id_) print("Fetching {}".format(account)) try: if not account.fetch_history_complete: oldest = (db.session.query(Post).with_parent( account, 'posts').order_by(db.asc(Post.created_at)).first()) # ^ None if this is our first fetch ever, otherwise the oldest post if oldest: max_id = oldest.remote_id else: max_id = None since_id = None elif account.fetch_current_batch_end: oldest = (db.session.query(Post).with_parent( account, 'posts').filter(Post.created_at > account. fetch_current_batch_end.created_at).order_by( db.asc(Post.created_at)).first()) # ^ None if this is our first fetch of this batch, otherwise oldest of this batch if oldest: max_id = oldest.remote_id else: max_id = None since_id = account.fetch_current_batch_end.remote_id else: # we shouldn't get here unless the user had no posts on the service last time we fetched max_id = None latest = (db.session.query(Post).with_parent( account, 'posts').order_by(db.desc(Post.created_at)).limit(1).scalar()) # ^ should be None considering the user has no posts # will be the latest post in the off chance that something goes weird if latest: since_id = latest.remote_id else: since_id = None fetch_posts = noop if (account.service == 'twitter'): fetch_posts = libforget.twitter.fetch_posts elif (account.service == 'mastodon'): fetch_posts = libforget.mastodon.fetch_posts posts = fetch_posts(account, max_id, since_id) if posts is None: # ??? raise TemporaryError("Fetching posts went horribly wrong") if (len([ post for post in posts if post.remote_id not in (max_id, since_id) ]) == 0): # if there are no posts other than the edges # we either finished the historic fetch # or we finished the current batch account.fetch_history_complete = True batch_end = (Post.query.with_parent(account, 'posts').order_by( db.desc(Post.created_at)).first()) if batch_end: account.fetch_current_batch_end_id = batch_end.id else: account.fetch_current_batch_end_id = None db.session.commit() else: for post in posts: db.session.merge(post) db.session.commit() if not account.fetch_history_complete: # reschedule immediately if we're still doing the historic fetch print("{} is not done fetching history, resheduling.".format( account)) fetch_acc.apply_async((id_, ), countdown=1) except TemporaryError: db.session.rollback() account.backoff() except PermanentError: db.session.rollback() make_dormant(account) if sentry: sentry.captureException() finally: db.session.rollback() account.touch_fetch() db.session.commit()
def caseData(): id = request.values.get("caseId") versionId = request.values.get("versionId") caseData = CaseInfo.query.filter_by(pid=id).first() if versionId: caseStepDatas = CaseStep.query.filter( db.and_( CaseStep.case_id == id, CaseStep.delete_flag == 0, db.or_(CaseStep.version_id == versionId, CaseStep.version_id == None), ) ).order_by(db.asc(CaseStep.indexId)).all() else: caseStepDatas = CaseStep.query.filter(db.and_(CaseStep.case_id ==id,CaseStep.version_id == None)).order_by(db.asc(CaseStep.indexId)).all() caseSteps = [] for index,item in enumerate(caseStepDatas): # imageUrl = ('img/'+item.element_cap) if item.element_cap else '' # eventType = 'empty' # eventDoc = {} # eventNameDrict = json.loads(item.event_name) # if eventNameDrict: # keywordData = CaseKeywords.query.filter_by(id=eventNameDrict['key']).first() # eventType = keywordData.word_type # eventDoc = { # 'id': keywordData.id, # 'word_type': keywordData.word_type, # 'name_en': keywordData.name_en, # 'name_zh': keywordData.name_zh, # 'shortdoc': keywordData.shortdoc, # 'doc': keywordData.doc, # 'args': keywordData.args, # 'add_time': keywordData.add_time.strftime('%Y-%m-%d %H:%M:%S'), # } caseSteps.append({ 'id': item.id, 'indexId': index+1, 'versionId': item.version_id, 'values': json.loads(item.values), 'add_time': item.add_time.strftime('%Y-%m-%d %H:%M:%S'), }) deleteStepIds = CaseStep.query.filter(db.and_(CaseStep.delete_flag == 1, CaseStep.version_id == versionId)).all() if deleteStepIds: for deleteStep in deleteStepIds: for caseStep in caseSteps: if deleteStep.pid == caseStep['id']: caseSteps.remove(caseStep) releaseIsoStepIds = CaseStep.query.filter(db.and_(CaseStep.delete_flag == 0, CaseStep.version_id == versionId)).all() if releaseIsoStepIds: for releaseIsoStep in releaseIsoStepIds: for caseStep in caseSteps: if releaseIsoStep.pid == caseStep['id']: caseSteps.remove(caseStep) user_data = users.query.filter(db.and_(users.id == caseData.user_id)).first() userName = "" if user_data: userName = user_data.username setUpData = caseData.set_up if caseData.set_up else '[]' tearDownData = caseData.tear_down if caseData.tear_down else '[]' content = { 'name': caseData.name, 'doc': caseData.doc, 'setUp': json.loads(setUpData), 'tearDown': json.loads(tearDownData), 'caseId': caseData.pid, 'caseStep': caseSteps, 'userName': userName, 'add_time': caseData.add_time.strftime('%Y-%m-%d %H:%M:%S'), 'update_time': caseData.add_time.strftime('%Y-%m-%d %H:%M:%S'), } return make_response( jsonify({'code': 0, 'content': content, 'msg': ''}))