def view(): """ WIP: View admins. """ if not current_user.is_admin(): abort(404) admins = UserMetadata.select().where(UserMetadata.key == "admin") postcount = (SubPost.select(SubPost.uid, fn.Count( SubPost.pid).alias("post_count")).group_by( SubPost.uid).alias("post_count")) commcount = (SubPostComment.select( SubPostComment.uid, fn.Count(SubPostComment.cid).alias("comment_count")).group_by( SubPostComment.uid).alias("j2")) users = User.select( User.name, User.status, User.uid, User.joindate, postcount.c.post_count.alias("post_count"), commcount.c.comment_count, ) users = users.join(postcount, JOIN.LEFT_OUTER, on=User.uid == postcount.c.uid) users = users.join(commcount, JOIN.LEFT_OUTER, on=User.uid == commcount.c.uid) users = (users.where(User.uid << [x.uid for x in admins]).order_by( User.joindate.asc()).dicts()) return render_template("admin/users.html", users=users, admin_route="admin.view")
def _base_next_unchecked(cls): comments_count = fn.Count(Comment.id).alias('comments_count') fails = fn.Count(SolutionExerciseTestExecution.id).alias('failures') return cls.select( cls.id, cls.state, cls.exercise, comments_count, fails, ).join( SolutionFile, join_type=JOIN.LEFT_OUTER, on=(SolutionFile.solution == cls.id), ).join( Comment, join_type=JOIN.LEFT_OUTER, on=(Comment.file == SolutionFile.id), ).join( SolutionExerciseTestExecution, join_type=JOIN.LEFT_OUTER, on=(SolutionExerciseTestExecution.solution == cls.id), ).where(cls.state == Solution.STATES.CREATED.name, ).group_by( cls.id, ).order_by( comments_count, fails, cls.submission_timestamp.asc(), )
def view(): """ WIP: View admins. """ if current_user.is_admin(): admins = UserMetadata.select().where(UserMetadata.key == 'admin') postcount = SubPost.select( SubPost.uid, fn.Count(SubPost.pid).alias('post_count')).group_by( SubPost.uid).alias('post_count') commcount = SubPostComment.select( SubPostComment.uid, fn.Count(SubPostComment.cid).alias('comment_count')).group_by( SubPostComment.uid).alias('j2') users = User.select(User.name, User.status, User.uid, User.joindate, postcount.c.post_count.alias('post_count'), commcount.c.comment_count) users = users.join(postcount, JOIN.LEFT_OUTER, on=User.uid == postcount.c.uid) users = users.join(commcount, JOIN.LEFT_OUTER, on=User.uid == commcount.c.uid) users = users.where(User.uid << [x.uid for x in admins]).order_by( User.joindate.asc()).dicts() return render_template('admin/users.html', users=users, admin_route='admin.view') else: abort(404)
def users_search(term): """ WIP: Search users. """ if current_user.is_admin(): term = re.sub(r'[^A-Za-z0-9.\-_]+', '', term) postcount = SubPost.select( SubPost.uid, fn.Count(SubPost.pid).alias('post_count')).group_by( SubPost.uid).alias('post_count') commcount = SubPostComment.select( SubPostComment.uid, fn.Count(SubPostComment.cid).alias('comment_count')).group_by( SubPostComment.uid).alias('j2') users = User.select(User.name, User.status, User.uid, User.joindate, postcount.c.post_count, commcount.c.comment_count) users = users.join(postcount, JOIN.LEFT_OUTER, on=User.uid == postcount.c.uid) users = users.join(commcount, JOIN.LEFT_OUTER, on=User.uid == commcount.c.uid) users = users.where(User.name.contains(term)).order_by( User.joindate.desc()).dicts() return render_template('admin/users.html', users=users, term=term, admin_route='admin.users_search') else: abort(404)
def users(page): """ WIP: View users. """ if not current_user.is_admin(): abort(404) postcount = ( SubPost.select(SubPost.uid, fn.Count(SubPost.pid).alias("post_count")) .group_by(SubPost.uid) .alias("post_count") ) commcount = ( SubPostComment.select( SubPostComment.uid, fn.Count(SubPostComment.cid).alias("comment_count") ) .group_by(SubPostComment.uid) .alias("j2") ) users = User.select( User.name, User.status, User.uid, User.joindate, postcount.c.post_count.alias("post_count"), commcount.c.comment_count, ) users = users.join(postcount, JOIN.LEFT_OUTER, on=User.uid == postcount.c.uid) users = users.join(commcount, JOIN.LEFT_OUTER, on=User.uid == commcount.c.uid) users = users.order_by(User.joindate.desc()).paginate(page, 50).dicts() return render_template( "admin/users.html", users=users, page=page, admin_route="admin.users" )
def adminhome(): room_count = 0 personnel_count = 0 views = 0 freq_rooms = RoomReport.select( fn.Count(RoomReport.id).alias('count'), Room.room_name).join(Room).group_by(RoomReport.room_report).order_by( fn.Count(RoomReport.id).desc()).limit(10) freq_services = ServiceReport.select( fn.Count(ServiceReport.id).alias('count'), Service.service_name).join(Service).group_by( ServiceReport.service_report).order_by( fn.Count(ServiceReport.id).desc()).limit(10) try: room_count = int(Room.select().count()) personnel_count = int(Personnel.select().count()) views = int(RoomReport.select().count()) latest_views = RoomReport.select().order_by( RoomReport.report_date.desc()).limit(10) return render_template('admindashboard.html', room_count=room_count, personnel_count=personnel_count, views=views, latest_views=latest_views, freq_rooms=freq_rooms, freq_services=freq_services) except Exception as e: flash(e) return render_template('admindashboard.html', room_count=room_count, personnel_count=personnel_count, views=views, latest_views=latest_views, freq_rooms=freq_rooms, freq_services=freq_services)
def users_search(term): """ WIP: Search users. """ if not current_user.is_admin(): abort(404) term = re.sub(r"[^A-Za-z0-9.\-_]+", "", term) postcount = ( SubPost.select(SubPost.uid, fn.Count(SubPost.pid).alias("post_count")) .group_by(SubPost.uid) .alias("post_count") ) commcount = ( SubPostComment.select( SubPostComment.uid, fn.Count(SubPostComment.cid).alias("comment_count") ) .group_by(SubPostComment.uid) .alias("j2") ) users = User.select( User.name, User.status, User.uid, User.joindate, postcount.c.post_count, commcount.c.comment_count, ) users = users.join(postcount, JOIN.LEFT_OUTER, on=User.uid == postcount.c.uid) users = users.join(commcount, JOIN.LEFT_OUTER, on=User.uid == commcount.c.uid) users = users.where(User.name.contains(term)).order_by(User.joindate.desc()).dicts() return render_template( "admin/users.html", users=users, term=term, admin_route="admin.users_search" )
def graphdata(operation): current_date = datetime.now() if operation == "weekly": start_of_week = current_date - timedelta(days=6) labels = [] days = [] services = [] for x in range(0, 7): date = start_of_week + timedelta(days=x) if date <= current_date: count = RoomReport.select( fn.Count(RoomReport.report_date.day).alias('count'), RoomReport.report_date).where( RoomReport.report_date.day == date.day).group_by( RoomReport.report_date.day) if count.exists(): for c in count: days.append(c.count) else: days.append(0) count = ServiceReport.select( fn.Count(ServiceReport.report_date.day).alias('count'), ServiceReport.report_date).where( ServiceReport.report_date.day == date.day).group_by( ServiceReport.report_date.day) if count.exists(): for c in count: services.append(c.count) else: services.append(0) labels.append(date.strftime('%b-%d-%Y')) return jsonify({ "labels": labels, "days": days, "services": services, }) elif operation == "monthly": months = [] labels = [] # for x in range(0, 12): # current_date.replace(month=x+1) # next_month = current_date.replace(month=x) # if current_date.month <= next_month.month: count = RoomReport.select( fn.Count(RoomReport.report_date.month).alias('count'), RoomReport.report_date).where( RoomReport.report_date.month <= current_date.month, RoomReport.report_date.year >= current_date.year).group_by( RoomReport.report_date.month) # labels.append(all_months.strftime("%B-%Y")) if count.exists(): for c in count: months.append(c.count) labels.append(c.report_date.strftime("%B-%Y")) return jsonify({ "labels": labels, "months": months, })
def context_list(): contexts = list( Entry.select(Entry.context, fn.Count(Entry.id).alias("entries"), fn.Count(fn.Distinct( Entry.line)).alias("lines")).group_by( Entry.context).dicts()) print(contexts) return jsonify(dict(ok=True, data=contexts))
def admin_api(function): if function == 'participants': parts = list(Participant.select().dicts()) return make_response(jsonify({'participants': parts}), 200) elif function == 'degrees': degrees = list(Degree.select().dicts()) return make_response(jsonify({'degrees': degrees}), 200) elif function == 'chairs': chairs = list(Chair.select().dicts()) return make_response(jsonify({'chairs': chairs}), 200) elif function == 'courses': courses = list(Course.select().dicts()) return make_response(jsonify({'courses': courses}), 200) elif function == 'toggle_registration': if registration_active(): open('./reg_inactive', 'a').close() else: os.remove('./reg_inactive') return make_response( jsonify({ 'status': 'success', 'registration': registration_active() }), 200 ) elif function == 'stats': degrees = Degree.select() degree_counts = { d.name: d.count for d in Degree.select( Degree, fn.Count(Participant.id).alias('count') ).join(Participant).group_by(Degree) } degree_guests = { d.name: d.guests for d in Degree.select( Degree, fn.Count(Participant.guests).alias('guests') ).join(Participant).group_by(Degree) } stats = { 'degree_counts': degree_counts, 'degree_guests': degree_guests, 'participant_count': Participant.select().count(), 'guest_count': (Participant .select(fn.SUM(Participant.guests)) .scalar()) } return make_response( jsonify(stats) ) abort(404)
def get_teams_within_org(organization, has_external_auth=False): """ Returns a AttrDict of team info (id, name, description), its role under the org, the number of repositories on which it has permission, and the number of members. """ query = Team.select().where( Team.organization == organization).join(TeamRole) def _team_view(team): return { "id": team.id, "name": team.name, "description": team.description, "role_name": Team.role.get_name(team.role_id), "repo_count": 0, "member_count": 0, "is_synced": False, } teams = {team.id: _team_view(team) for team in query} if not teams: # Just in case. Should ideally never happen. return [] # Add repository permissions count. permission_tuples = (RepositoryPermission.select( RepositoryPermission.team, fn.Count(RepositoryPermission.id)).where( RepositoryPermission.team << list(teams.keys())).group_by( RepositoryPermission.team).tuples()) for perm_tuple in permission_tuples: teams[perm_tuple[0]]["repo_count"] = perm_tuple[1] # Add the member count. members_tuples = (TeamMember.select(TeamMember.team, fn.Count( TeamMember.id)).where(TeamMember.team << list(teams.keys())).group_by( TeamMember.team).tuples()) for member_tuple in members_tuples: teams[member_tuple[0]]["member_count"] = member_tuple[1] # Add syncing information. if has_external_auth: sync_query = TeamSync.select( TeamSync.team).where(TeamSync.team << list(teams.keys())) for team_sync in sync_query: teams[team_sync.team_id]["is_synced"] = True return [AttrDict(team_info) for team_info in list(teams.values())]
def cache_namespace_repository_sizes(namespace_name): namespace = user.get_user_or_org(namespace_name) now_ms = get_epoch_timestamp_ms() subquery = (Tag.select(Tag.repository_id).where( Tag.hidden == False).where((Tag.lifetime_end_ms >> None) | (Tag.lifetime_end_ms > now_ms)).group_by( Tag.repository_id).having( fn.Count(Tag.name) > 0)) namespace_repo_sizes = (Manifest.select( (Repository.id).alias("repository_id"), (Repository.name).alias("repository_name"), fn.sum(Manifest.layers_compressed_size).alias("repository_size"), ).join(Repository).join( subquery, on=(subquery.c.repository_id == Repository.id)).where( Repository.namespace_user == namespace.id).group_by(Repository.id)) insert_query = (namespace_repo_sizes.select( Repository.id, fn.sum(Manifest.layers_compressed_size)).join_from( Repository, RepositorySize, JOIN.LEFT_OUTER).where(RepositorySize.repository_id.is_null())) RepositorySize.insert_from( insert_query, fields=[RepositorySize.repository_id, RepositorySize.size_bytes], ).execute()
def fetch_aggregated_pages(target_page): query = ( Repository.select( Page, Repository.status, fn.Count(Repository.name).alias("count") ) .join(PageTargetRepository) .group_by(PageTargetRepository.page, Repository.status) .join(Page) ) if target_page: query = query.where(PageTargetRepository.page == target_page) pages_stats = {} for page_stat in query.dicts(): page_id = page_stat["id"] page_status = page_stat["status"] repo_count = page_stat["count"] if page_id not in pages_stats: pages_stats[page_id] = page_stat pages_stats[page_id]["status"] = {} del pages_stats[page_id]["count"] pages_stats[page_id]["status"][page_status] = repo_count return sorted(pages_stats.values(), key=lambda x: x["name"])
def get_transaction_list_details(transaction_list): """Return complete data set on a specified transaction.""" query = (Files().select( Files.transaction.alias('upload_id'), fn.Max(Transactions.updated).alias('upload_date'), fn.Min(Files.mtime).alias('file_date_start'), fn.Max(Files.mtime).alias('file_date_end'), fn.Min(Transactions.submitter).alias('uploaded_by_id'), fn.Sum(Files.size).alias('bundle_size'), fn.Count(Files.id).alias('file_count'), fn.Min(Transactions.updated).alias('upload_datetime'), fn.Min(Transactions.proposal).alias('proposal_id'), fn.Min(Transactions.instrument).alias('instrument_id')).join( Transactions).where( Files.transaction << transaction_list).group_by( Files.transaction)) return { str(r['upload_id']): { 'upload_id': str(r['upload_id']), 'upload_date': r['upload_date'].date().strftime('%Y-%m-%d'), 'file_date_start': r['file_date_start'].date().strftime('%Y-%m-%d'), 'file_date_end': r['file_date_end'].date().strftime('%Y-%m-%d'), 'uploaded_by_id': int(r['uploaded_by_id']), 'bundle_size': int(r['bundle_size']), 'file_count': int(r['file_count']), 'upload_datetime': r['upload_date'].strftime('%Y-%m-%d %H:%M:%S'), 'proposal_id': r['proposal_id'], 'instrument_id': r['instrument_id'] } for r in query.dicts() }
def _common_comments(exercise_id=None, user_id=None): """ Most common comments throughout all exercises. Filter by exercise id when specified. """ query = CommentText.filter(**{ CommentText.flake8_key.name: None, }).select(CommentText.id, CommentText.text).join(Comment) if exercise_id is not None: query = ( query .join(SolutionFile) .join(Solution) .join(Exercise) .where(Exercise.id == exercise_id) ) if user_id is not None: query = ( query .filter(Comment.commenter == user_id) ) query = ( query .group_by(CommentText.id) .order_by(fn.Count(CommentText.id).desc()) .limit(5) ) return tuple(query.dicts())
def get_fail_counts(): for entry in (ScoreEntry .select(ScoreEntry.failed_at, fn.Count(ScoreEntry.failed_at) .alias('count')) .group_by(ScoreEntry.failed_at)): yield entry.failed_at, entry.count
def _get_transaction_info_blocks(transaction_list, option='details'): transactions = (Transactions .select( Transactions, fn.Sum(Files.size).alias('file_size_bytes'), fn.Count(Files.id).alias('file_count') ) .join(Files, JOIN.LEFT_OUTER) .group_by(Transactions) .where(Transactions.id << transaction_list)) transaction_results = {'transactions': {}, 'times': {}} for trans in transactions: kv_list = {} entry = trans.to_hash() metadata = QueryBase._get_base_transaction_metadata(entry, option) transaction = {} kvs = QueryBase._get_transaction_key_values(trans.id) for key_value in kvs: kv_list.update({key_value['key']: key_value['value']}) transaction['file_size_bytes'] = int( trans.file_size_bytes) if trans.file_size_bytes is not None else 0 transaction['file_count'] = int( trans.file_count) if trans.file_count is not None else 0 transaction['status'] = { 'trans_id': trans.id, 'person_id': trans.submitter_id, 'step': 6, 'message': 'verified', 'status': 'success' } transaction['metadata'] = metadata transaction['kv_pairs'] = kv_list transaction_results['transactions'][trans.id] = transaction transaction_results['times'][entry.get('updated')] = trans.id return transaction_results
def get_aggregated_logs( start_time, end_time, performer=None, repository=None, namespace=None, ignore=None, model=LogEntry3, ): """ Returns the count of logs, by kind and day, for the logs matching the given filters. """ date = db.extract_date("day", model.datetime) selections = [ model.kind, date.alias("day"), fn.Count(model.id).alias("count") ] query = _logs_query(selections, start_time, end_time, performer, repository, namespace, ignore, model=model) return query.group_by(date, model.kind)
def compute_moved_stock_overview(*, organisation_id, after, before): """Construct filter for date range, if at least one of `after` or `before` is given. Compute number of boxes, and contained items, moved by `organisation_id` that were served in that date range (default to all time). Group by ProductCategory. """ date_filter = _build_range_filter(Box.last_modified_on, low=after, high=before) boxes = (Box.select( ProductCategory.name, fn.sum(Box.items).alias("number_of_items"), fn.Count(Box.id).alias("number_of_boxes"), ).join(Location).join(Base).switch(Box).join(Product).join( ProductCategory).where((Base.organisation == organisation_id) & (date_filter) & (Location.visible == 1) & (Location.is_lost != 1) & (Location.is_scrap != 1)).group_by( ProductCategory.name)) overview = [] for box in boxes: overview.append({ "product_category_name": box.product.category.name, "number_of_boxes": box.number_of_boxes, "number_of_items": box.number_of_items, }) return overview
def report(): donations = Donation.select( Donation.donor, fn.Count(Donation.value).alias('count'), fn.Sum(Donation.value).alias('total'), fn.Avg(Donation.value).alias('average')).group_by(Donation.donor) return render_template('report.jinja2', donations=donations)
def get_post_list_response(self,topic_id, page, size): query = Post.select(Post, fn.Count(Pic.id).alias('pic_count')) \ .where(Post.topic_id == topic_id)\ .join(Pic, join_type=JOIN_INNER, on=(Post.id == Pic.post_id)) \ .group_by(Post.id).order_by(Post.create_time.desc()) query = query.paginate(page, size) logger.info(str(query)) post_view_list = [] for post in query: post_view = PostView() post_view.name = post.name post_view.id = post.id post_view.pic_count = post.pic_count post_view.create_time = post.create_time post_view_list.append(post_view) post_total = Post.select().where(Post.topic_id==topic_id).scalar() response = PostListResponse() response.current_page = page response.page_size = size response.total = post_total response.post_view_list = post_view_list return response
def _update_divergent_status_count(in_cve_list, rh_account_id): """Update cached count how many systems-CVE pairs have different status than CVE-level status is""" cve_ids = CveMetadata.select(CveMetadata.id).where(CveMetadata.cve << in_cve_list) # pylint: disable=singleton-comparison div_counts = (SystemVulnerabilities.select(SystemVulnerabilities.cve_id, fn.Count(SystemVulnerabilities.id).alias('systems_status_divergent')) .join(SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id)) .where(SystemPlatform.rh_account_id == rh_account_id) .where(SystemPlatform.opt_out == False) # noqa: E712 .where(SystemVulnerabilities.cve_id << cve_ids) .where(SystemVulnerabilities.when_mitigated.is_null(True)) .where(SystemVulnerabilities.status_id != fn.COALESCE( (CveAccountData.select(CveAccountData.status_id) .where(CveAccountData.rh_account_id == rh_account_id) .where(CveAccountData.cve_id == SystemVulnerabilities.cve_id)), 0)) .group_by(SystemVulnerabilities.cve_id)) divergent_systems = {} for cve_id in cve_ids: divergent_systems[cve_id] = 0 for div_count in div_counts: divergent_systems[div_count.cve_id] = div_count.systems_status_divergent if divergent_systems: values_to_update = ValuesList([(cve_id, systems_status_divergent) for cve_id, systems_status_divergent in divergent_systems.items()], columns=('cve_id', 'systems_status_divergent')) query = (CveAccountData.update(systems_status_divergent=values_to_update.c.systems_status_divergent) .from_(values_to_update) .where(CveAccountData.cve_id == values_to_update.c.cve_id) .where(CveAccountData.rh_account_id == rh_account_id)) query.execute()
def get_topic_list_response(self, page, size): query = Topic.select(Topic, fn.Count(Post.id).alias('post_count'))\ .join(Post,join_type=JOIN_INNER,on=(Topic.id==Post.topic_id))\ .group_by(Topic.id).order_by(Topic.create_time.desc()) query = query.paginate(page, size) logger.info(str(query)) topic_view_list = [] for topic in query: topic_view = TopicView() topic_view.name = topic.name topic_view.id = topic.id topic_view.post_count = topic.post_count topic_view.create_time = topic.create_time topic_view_list.append(topic_view) topic_total = Topic.select().scalar() response = TopicListResponse() response.current_page = page response.page_size = size response.total = topic_total response.topic_view_list = topic_view_list return response
def _count_subquery(rh_account_id): # pylint: disable=singleton-comparison return (SystemVulnerabilities.select( SystemVulnerabilities.cve_id.alias("cve_id_"), fn.Count(SystemVulnerabilities.id).alias("systems_affected_"), fn.Sum( Case(None, ((SystemVulnerabilities.status_id != CveAccountData.status_id, 1), ), 0)).alias("systems_status_divergent_") ).join( SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account_id) & (SystemPlatform.opt_out == False) & # noqa: E712 (SystemPlatform.stale == False) & # noqa: E712 (SystemPlatform.when_deleted.is_null(True)))).join( CveAccountData, JOIN.LEFT_OUTER, on=((SystemVulnerabilities.cve_id == CveAccountData.cve_id) & (CveAccountData.rh_account_id == rh_account_id))). where(SystemVulnerabilities.rh_account_id == rh_account_id). where((SystemVulnerabilities.mitigation_reason.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where((InsightsRule.active == False) & (~InsightsRule.rule_only)))) .where((SystemVulnerabilities.when_mitigated.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where( (InsightsRule.active == True) & (~InsightsRule.rule_only)))).group_by( SystemVulnerabilities.cve_id))
def user(): if not g.user: return redirect(url_for('c.front')) user = get_user() form = UserForm(obj=user) if form.is_submitted(): if not form.validate(): flash(_('There are some errors, please fix them.')) else: form.populate_obj(user) for k in ('country', 'email', 'description', 'address'): v = getattr(form, k).data if v is None or not v.strip(): setattr(user, k, None) user.save() flash(_('Profile has been updated.'), 'info') return redirect(url_for('c.user')) MailCodeAlias = MailCode.alias() count_confirmed = (User.select(fn.Count(fn.Distinct(User.id))).join_from( User, MailCode, on=((MailCode.sent_by == User.id) & (MailCode.received_on.is_null(False)))).join_from( User, MailCodeAlias, on=((MailCodeAlias.sent_to == User.id) & (MailCodeAlias.received_on.is_null(False)))).where( User.is_active == True).scalar()) return render_template('settings.html', form=form, count_confirmed=count_confirmed)
def get_top_members_per_message_count(self, top_n=9): query = Member.select(Member, fn.Count(Message.id).alias('count')) \ .join(Message, JOIN.RIGHT_OUTER) \ .group_by(Member) \ .order_by(SQL('count').desc()) \ .limit(top_n) return list(query)
def _common_comments(exercise_id=None, user_id=None): """ Most common comments throughout all exercises. Filter by exercise id when specified. """ is_moderator_comments = ( (Comment.commenter.role == Role.get_staff_role().id) | (Comment.commenter.role == Role.get_admin_role().id), ) query = (CommentText.select( CommentText.id, CommentText.text).join(Comment).join(User).join(Role).where( CommentText.flake8_key.is_null(True), is_moderator_comments, ).switch(Comment)) if exercise_id is not None: query = (query.join(SolutionFile).join(Solution).join(Exercise).where( Exercise.id == exercise_id)) if user_id is not None: query = (query.filter(Comment.commenter == user_id)) query = (query.group_by(CommentText.id).order_by( fn.Count(CommentText.id).desc()).limit(5)) return tuple(query.dicts())
def count_dialogs_between_time_advance(start_time_s, end_time_s, filter_items): start_time = datetime.strptime(start_time_s + " 00:00:00", "%Y-%m-%d %H:%M:%S") end_time = datetime.strptime(end_time_s + " 00:00:00", "%Y-%m-%d %H:%M:%S") + timedelta(days=1) filter_condi = [True, False] if 'is_manual_rated' in filter_items: if filter_items['is_manual_rated'] == 1: #Print all filtered human rated results filter_condi[1] = True if filter_items['is_manual_rated'] == -1: filter_condi[0] = False try: count = Dialogs.select(fn.Count(Dialogs.id)).where( (Dialogs.created_at >= start_time) & (Dialogs.created_at <= end_time) & ((Dialogs.is_manual_rated == filter_condi[0]) | (Dialogs.is_manual_rated == filter_condi[1]))).scalar() return count except: error_logger.error("从数据库读取对话时发生其他错误, %s", traceback.format_exc(), extra={"host": 'localhost'}) raise BaseException("从数据库读取对话时发生其他错误")
def _cve_details(cls, synopsis): retval = cls._get_cve_details(synopsis) cve_details = (CveAccountData.select( BusinessRisk.name.alias('risk'), CveAccountData.business_risk_id.alias('risk_id'), CveAccountData.business_risk_text.alias('risk_text'), Status.name.alias("status"), CveAccountData.status_id.alias("status_id"), CveAccountData.status_text.alias("status_text"), CveAccountData.systems_status_divergent).join( BusinessRisk, on=(CveAccountData.business_risk_id == BusinessRisk.id)).join( Status, on=(CveAccountData.status_id == Status.id)).join( CveMetadata, on=(CveAccountData.cve_id == CveMetadata.id)).where( CveAccountData.rh_account_id == ( RHAccount.select(RHAccount.id).where( RHAccount.name == connexion.context['user']))).where( CveMetadata.cve == synopsis)).dicts() if cve_details.count(): retval['business_risk'] = cve_details[0]['risk'] retval['business_risk_id'] = cve_details[0]['risk_id'] retval['business_risk_text'] = cve_details[0]['risk_text'] retval['status'] = cve_details[0]['status'] retval['status_id'] = cve_details[0]['status_id'] retval['status_text'] = cve_details[0]['status_text'] retval['systems_status_divergent'] = cve_details[0][ 'systems_status_divergent'] else: retval['business_risk'] = DEFAULT_BUSINESS_RISK retval['business_risk_id'] = 0 retval['business_risk_text'] = None retval['status'] = DEFAULT_STATUS retval['status_id'] = 0 retval['status_text'] = None retval['systems_status_divergent'] = 0 # add counts of systems with all statuses retval['systems_status_detail'] = {} # pylint: disable=singleton-comparison status_detail = ( SystemVulnerabilities.select( SystemVulnerabilities.status, fn.Count(SystemVulnerabilities.status).alias('systems')).join( SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id)). join(CveMetadata, on=(SystemVulnerabilities.cve_id == CveMetadata.id)).join( RHAccount, on=(SystemPlatform.rh_account_id == RHAccount.id)). where(CveMetadata.cve == synopsis).where( RHAccount.name == connexion.context['user']).where( SystemVulnerabilities.when_mitigated.is_null(True)).where( SystemPlatform.opt_out == False) # noqa: E712 .group_by(SystemVulnerabilities.status).dicts()) for row in status_detail: retval['systems_status_detail'][row['status']] = row['systems'] return retval
def handle_patch(cls, **kwargs): """Set status for a CVE""" data = kwargs['data'] cve_list = data['cve'] if isinstance(cve_list, str): cve_list = [cve_list] values = {} updated = [] if 'status_id' in data: values['status_id'] = data['status_id'] if 'status_text' in data: try: values['status_text'] = data['status_text'].strip() \ if data['status_text'].strip() else None except AttributeError: values['status_text'] = None if not values: return cls.format_exception( 'At least one of the "status_id" or "status_text" parameters is required.', 400) try: to_insert = [] cves = CveMetadata.select(CveMetadata.id, CveMetadata.cve).where(CveMetadata.cve << cve_list) rh_account = get_or_create_account() for cve in cves: updated.append(cve.cve) if 'status_id' in data: # refresh number of divergent statuses in CVE-system pairs # pylint: disable=singleton-comparison query = (SystemVulnerabilities.select(fn.Count(SystemVulnerabilities.id).alias('systems')) .join(SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id)) .join(InsightsRule, JOIN.LEFT_OUTER, on=(InsightsRule.id == SystemVulnerabilities.rule_id)) .where(SystemPlatform.rh_account_id == rh_account[0].id) .where((SystemPlatform.opt_out == False) # noqa: E712 & (SystemPlatform.stale == False) # noqa: E712 & (SystemPlatform.when_deleted.is_null(True))) .where(SystemVulnerabilities.cve_id == cve.id) .where((SystemVulnerabilities.mitigation_reason.is_null(True)) | (InsightsRule.active == False)) .where((SystemVulnerabilities.when_mitigated.is_null(True)) | (InsightsRule.active == True)) .where(SystemVulnerabilities.status_id != values.get('status_id', 0)) .dicts()) values['systems_status_divergent'] = query[0]['systems'] to_insert.append((cve.id, rh_account[0].id, values.get('status_id', 0), values.get('status_text', None), values.get('systems_status_divergent', 0))) if not to_insert: return cls.format_exception('At least one given CVE must exist', 404) (CveAccountData.insert_many(to_insert, fields=cls._fields) .on_conflict(conflict_target=cls._conflict_target, preserve=[], update=values).execute()) except (IntegrityError, psycopg2IntegrityError, DataError): # usually means bad status LOGGER.exception('Error during setting status (IntegrityError):') DB.rollback() return cls.format_exception(f"status_id={data['status_id']} is invalid", 400) except ValueError as value_error: LOGGER.exception('Error during setting status (ValueError):') DB.rollback() return cls.format_exception(f'status_text or other key value is invalid ({value_error})', 400) return {'updated': updated}