class Meta: constraints = [ SQL('CONSTRAINT pv_value_cp_id_unique UNIQUE (coding_property_id, pv_value)' ), SQL('CONSTRAINT source_id_cp_id_unique UNIQUE (source_id, coding_property_id)' ) ]
class Meta: constraints = [ SQL('CONSTRAINT gv_value_gp_id_unique UNIQUE(gv_value, global_property_id)' ), SQL('CONSTRAINT gv_source_id_gp_id_unique UNIQUE(source_id, global_property_id)' ) ]
class Meta: constraints = [ SQL('CONSTRAINT utt_enum_iv_id_unique UNIQUE (utt_enum, interview_id)' ), SQL('CONSTRAINT source_id_iv_id_unique UNIQUE (source_id, interview_id)' ) ]
class Meta: constraints = [ SQL('CONSTRAINT cp_name_cs_id_unique UNIQUE(cp_name, coding_system_id)' ), SQL('CONSTRAINT cp_source_id_cs_id_unique UNIQUE(source_id, coding_system_id)' ) ]
class ProductGender(db.Model): adult = IntegerField(constraints=[SQL("DEFAULT 0")]) baby = IntegerField(constraints=[SQL("DEFAULT 0")]) child = IntegerField(constraints=[SQL("DEFAULT 0")]) color = CharField() created = DateTimeField(null=True) created_by = UIntForeignKeyField( column_name="created_by", field="id", model=User, null=True, on_delete="SET NULL", on_update="CASCADE", ) female = IntegerField(constraints=[SQL("DEFAULT 0")]) label = CharField() male = IntegerField(constraints=[SQL("DEFAULT 0")]) modified = DateTimeField(null=True) modified_by = UIntForeignKeyField( column_name="modified_by", field="id", model=User, null=True, on_delete="SET NULL", on_update="CASCADE", ) seq = IntegerField(null=True) short_label = CharField(null=True, column_name="shortlabel") class Meta: table_name = "genders"
def on_trivia_leaderboard(self, event, args): try: if args.g: users = list( Trivia.select(Trivia.user_id, Trivia.correct_answers, Trivia.incorrect_answers, Trivia.points).order_by( SQL('points').desc()).limit(5).tuples()) else: users = list( Trivia.select( Trivia.user_id, Trivia.correct_answers, Trivia.incorrect_answers, Trivia.points).where( event.guild.id == Trivia.guild_id).order_by( SQL('points').desc()).limit(5).tuples()) except Exception as e: print(traceback.format_exc()) return event.msg.reply( 'Failed to grab leaderboard stats: ```{}```'.format(e)) event.msg.reply('**TOP TRIVIA EXPERTS**\n' + ( len(users) > 0 and '\n'.join( '{}. **{}** {}\n <:green_tick:435164337167138826> {} <:red_tick:435164344125489155> {}\n**{}** points' .format( # noqa i + 1, (self.state.users.get(row[0]) and self.state.users. get(row[0]).username or 'Invalid User'), ':crown:' if i == 0 else '', row[1], row[2], row[3]) for i, row in enumerate(users)) or '***No trivia stats found for this server***'))
class TaskTable(db.Model): class Meta: db_table = "task" uuid = UUIDField(unique=True, default=uuid.uuid4) audit_id = ForeignKeyField(AuditTable, null=True, on_delete="SET NULL", on_update="CASCADE") scan_id = ForeignKeyField(ScanTable, null=True, on_delete="SET NULL", on_update="CASCADE") target = CharField(default="") start_at = DateTimeField(default=Utils.get_default_datetime) end_at = DateTimeField(default=Utils.get_default_datetime) started_at = DateTimeField(default=Utils.get_default_datetime) ended_at = DateTimeField(default=Utils.get_default_datetime) error_reason = CharField(default="") session = TextField(default="") progress = CharField(default="") slack_webhook_url = CharField(default="") results = TextField(default="") created_at = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")]) updated_at = DateTimeField(constraints=[ SQL("DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") ])
def get_query(model, info, filters={}, order_by=[], page=None, paginate_by=None, total_query=None): query = None orig_query = copy(model) if isinstance(model, Query): query = model model = query.objects().model if isinstance(model, (Model, ModelBase)): alias_map = {} selections = next(field for field in info.field_asts if field.name.value == info.field_name).selection_set.selections requested_model, requested_joins, requested_fields = get_requested_models(model, selections, alias_map) if query is None: query = requested_model.select(*requested_fields) if not requested_fields: query._returning = () query = join(query, requested_joins) query = filter(query, filters, alias_map) query = order(requested_model, query, order_by, alias_map) query = paginate(query, page, paginate_by) if page and paginate_by or get_field_from_selections(selections, 'total'): # TODO: refactor 'total' if total_query: total = NodeList([total_query]).alias(TOTAL_FIELD) else: count = orig_query.select().count() count_node = [SQL(f'{count}')] total = NodeList(count_node).alias(TOTAL_FIELD) query._returning = tuple(query._returning) + (total,) if not query._returning: query = query.select(SQL('1')) # bottleneck return query return model
class ScanTable(db.Model): class Meta: db_table = "scan" uuid = UUIDField(unique=True) audit_id = ForeignKeyField(AuditTable, backref="scans", on_delete="CASCADE", on_update="CASCADE") target = CharField() start_at = DateTimeField(default=Utils.get_default_datetime) end_at = DateTimeField(default=Utils.get_default_datetime) started_at = DateTimeField(default=Utils.get_default_datetime) ended_at = DateTimeField(default=Utils.get_default_datetime) error_reason = CharField(default="") scheduled = BooleanField(default=False) task_uuid = UUIDField(unique=True, null=True, default=None) processed = BooleanField(default=False) comment = TextField(default="") source_ip = CharField(default="") slack_webhook_url = CharField(default="") created_at = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")]) updated_at = DateTimeField(constraints=[ SQL("DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") ])
def create_airport_data(airport, with_flights=False): """ Creates dict with needed information of given Airport instance """ data = {'city': airport.city, 'name': airport.name, 'code': airport.code} if with_flights: data.update({'departures': [], 'arrivals': []}) for r in airport.departures: for ra in RouteAssignment.select().where(SQL( 'route_id = %s', r.id)): data['departures'].append( create_route_assignment_data(ra, from_location=False)) for r in airport.arrivals: for ra in RouteAssignment.select().where(SQL( 'route_id = %s', r.id)): data['arrivals'].append( create_route_assignment_data(ra, to_location=False)) data.update({ 'inbound_routes': len(data['arrivals']), 'outbound_routes': len(data['departures']) }) return data
def __init__(self, rh_account_id, list_args, query_args, filter_args, parsed_args, uri, ids_only=False, cve_advisories=None): query = self._full_query(rh_account_id, query_args) if not ids_only else self._id_query(rh_account_id, query_args) query = cyndi_join(query) filters = [filter_types.CVE_BUSINESS_RISK, filter_types.CVE_CVSS, filter_types.CVE_IMPACT, filter_types.CVE_PUBLIC_DATE, filter_types.SYSTEM_CVE_STATUS, filter_types.SYSTEM_CVE_RULE_PRESENCE, filter_types.SYSTEM_CVE_RULE, filter_types.CVE_KNOWN_EXPLOITS, filter_types.CVE_BY_ADVISORY, filter_types.SYSTEM_CVE_FIRST_REPORTED, filter_types.SYSTEM_CVE_ADVISORY_AVAILABLE, filter_types.SYSTEM_CVE_REMEDIATION, ] query = apply_filters(query, filter_args, filters, {'cve_advisories': cve_advisories}) query = query.dicts() sortable_columns = { 'id': CveMetadata.id, 'cve': CVE_SYNOPSIS_SORT, 'synopsis': CVE_SYNOPSIS_SORT, 'public_date': CveMetadata.public_date, # This assumes we only show one score, and that cvss3 wins over cvss2 'cvss_score': Case(None, ((CveMetadata.cvss3_score.is_null(True), CveMetadata.cvss2_score),), \ CveMetadata.cvss3_score), "cvss3_score": CveMetadata.cvss3_score, "cvss2_score": CveMetadata.cvss2_score, 'impact_id': CveMetadata.impact_id, 'impact': CveMetadata.impact_id, 'status_id': Status.id, 'status': Status.id, 'business_risk_id': SQL('business_risk_id'), 'business_risk': SQL('business_risk_id'), 'first_reported': SystemVulnerabilities.first_reported, 'advisory_available': SQL('advisory_available'), 'remediation': SQL('remediation_type_id'), } default_sort_columns = { 'default': 'id', 'cvss_score': 'public_date', 'cvss2_score': 'public_date', 'cvss3_score': 'public_date', 'public_date': 'synopsis', 'impact': 'public_date', 'business_risk': 'public_date', 'status': 'public_date', } filterable_columns = { 'cve': CveMetadata.cve, 'description': CveMetadata.description, 'status': Status.name, "rule_description": InsightsRule.description_text } filter_expressions = {} super().__init__(query, sortable_columns, default_sort_columns, filterable_columns, filter_expressions, list_args, parsed_args, uri)
def _orphaned_storage_query(candidate_ids): """ Returns the subset of the candidate ImageStorage IDs representing storages that are no longer referenced by images. """ # Issue a union query to find all storages that are still referenced by a candidate storage. This # is much faster than the group_by and having call we used to use here. nonorphaned_queries = [] for counter, candidate_id in enumerate(candidate_ids): query_alias = 'q{0}'.format(counter) # TODO: remove the join with Image once fully on the OCI data model. storage_subq = (ImageStorage.select(ImageStorage.id).join(Image).where( ImageStorage.id == candidate_id).limit(1).alias(query_alias)) nonorphaned_queries.append( ImageStorage.select(SQL('*')).from_(storage_subq)) manifest_storage_subq = (ImageStorage.select( ImageStorage.id).join(ManifestBlob).where( ImageStorage.id == candidate_id).limit(1).alias(query_alias)) nonorphaned_queries.append( ImageStorage.select(SQL('*')).from_(manifest_storage_subq)) # Build the set of storages that are missing. These storages are orphaned. nonorphaned_storage_ids = { storage.id for storage in _basequery.reduce_as_tree(nonorphaned_queries) } return list(candidate_ids - nonorphaned_storage_ids)
class Base(Model): name = TextField() created = DateTimeField(constraints=[SQL('DEFAULT CURRENT_TIMESTAMP')]) modified = DateTimeField(constraints=[SQL('DEFAULT CURRENT_TIMESTAMP')]) class Meta: database = db
def check_if_amendement_are_in_db(start_date, end_date, size, output_file): service = AmendementSearchService() print u'Nombre total d\'amendement à checker : %s' % service.total_count(start_date=start_date, end_date=end_date) amendements_summary_iterator = service.iter(start_date=start_date, end_date=end_date, size=size) all_missing_urls = [] for amendements_summary in amendements_summary_iterator: print "Page %s / %s" % (amendements_summary.start / size, amendements_summary.total_count / size) amendement_hashes = [amendement_hash(a.url_amend) for a in amendements_summary.results] sql_amendement_hash = SQL('CONCAT(legislature, texteloi_id, numero)') db_amendement_hashes = [unicode(a.hash) for a in Amendement.select(sql_amendement_hash.alias('hash')).where(sql_amendement_hash << amendement_hashes)] missing_amendement_hashes = set(amendement_hashes) - set(db_amendement_hashes) missing_urls = [a.url for a in amendements_summary.results if amendement_hash(a.url) in missing_amendement_hashes] for missing_url in missing_urls: print u'Amendement manquant : %s' % missing_url all_missing_urls += list(missing_urls) print u'Nombre total d\'amendements manquants : %s' % len(all_missing_urls) with open(output_file, 'w') as f: f.write('\n'.join(all_missing_urls))
def __init__(self, list_args, query_args, uri, args, ids_only=False): join_type = JOIN.INNER cve_count = CveAccountData.systems_affected if 'show_all' in args and args['show_all']: join_type = JOIN.RIGHT_OUTER cve_count = fn.COALESCE(cve_count, 0) query = self._full_query(cve_count, join_type, query_args) if not ids_only else self._id_query(join_type, query_args) query = apply_filters(query, args, [filter_types.CVE_BUSINESS_RISK, filter_types.CVE_CVSS, filter_types.CVE_IMPACT, filter_types.CVE_PUBLIC_DATE, filter_types.CVE_RULE_PRESENCE, filter_types.CVE_SHOW_ALL, filter_types.CVE_STATUS, filter_types.CVE_RULE_PRESENCE_OLD]) query = query.dicts() sortable_columns = { "systems_affected": SQL('systems_affected'), "id": CveMetadata.id, "synopsis": CVE_SYNOPSIS_SORT, "public_date": CveMetadata.public_date, # This assumes we only show one score, and that cvss3 wins over cvss2 "cvss_score": Case(None, ((CveMetadata.cvss3_score.is_null(True), CveMetadata.cvss2_score),), \ CveMetadata.cvss3_score), "cvss3_score": CveMetadata.cvss3_score, "cvss2_score": CveMetadata.cvss2_score, "impact_id": CveMetadata.impact_id, "impact": CveMetadata.impact_id, "business_risk_id": SQL('business_risk_id'), "business_risk": SQL('business_risk_id'), "status_id": SQL('status_id'), "status": SQL('status_id'), } default_sort_columns = ['id'] filterable_columns = { "synopsis": CveMetadata.cve, "description": CveMetadata.description } super(CvesListView, self).__init__(query, sortable_columns, default_sort_columns, filterable_columns, list_args, args, uri)
class PatrolMeta(ModelBase): """ 巡检的meta资料 patrol_id: 特殊的工单ID yyyymm + 3位index pid: 当前处理人的ID total: 计划内的设备总数 status: 0 进行中,1 已完成,2已取消 unfinished: 未完成的数量,每次check都会更新这个数 start_time: 计划开始时间 end_time: 计划结束时间 """ patrol_id = CharField(max_length=12) pid = IntegerField() total = IntegerField() unfinished = IntegerField() status = TinyInt(constraints=[SQL('DEFAULT 0')]) start_time = CharField() end_time = CharField() del_flag = BooleanField(constraints=[SQL('DEFAULT 0')]) gmt_modified = DateTimeField( formats='%Y-%m-%d %H:%M:%S', constraints=[ SQL('DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP') ]) class Meta: table_name = 'patrol_meta' indexes = ((('patrol_id', ), True), )
def stats(): data = {} # select cookie.name, count(cookie.name) as count from cookie group # by cookie.name order by count desc limit 25; data['cookie_names'] = (Cookie.select( Cookie.name, fn.COUNT(Cookie.name).alias('num_count')).group_by( Cookie.name).order_by(SQL('num_count').desc()).limit(10)) data['cookie_values'] = (Cookie.select( Cookie.value, fn.COUNT(Cookie.value).alias('num_count')).group_by( Cookie.value).order_by(SQL('num_count').desc()).limit(10)) data['header_names'] = (Header.select( Header.name, fn.COUNT(Header.name).alias('num_count')).group_by( Header.name).order_by(SQL('num_count').desc()).limit(10)) data['header_values'] = (Header.select( Header.value, fn.COUNT(Header.value).alias('num_count')).group_by( Header.value).order_by(SQL('num_count').desc()).limit(10)) return render_template('stats.html', data=data)
class ComputerDetail(ModelBase): """ 电脑细节 eid: 对应的设备ID ip_address: ip地址 cpu: cpu gpu: 显卡 disk: 硬盘 memory: 内存 main_board: 主板 monitor: 显示器 # todo remark: 备注 del_flag: 删除标记 """ eid = IntegerField() ip_address = CharField(max_length=64, null=True) cpu = CharField(max_length=32, null=True) gpu = CharField(max_length=32, null=True) disk = CharField(max_length=32, null=True) memory = CharField(max_length=32, null=True) main_board = CharField(max_length=32, null=True) monitor = CharField(max_length=32, null=True) remark = CharField(max_length=128, null=True) del_flag = BooleanField(constraints=[SQL('DEFAULT 0')]) gmt_modified = DateTimeField( formats='%Y-%m-%d %H:%M:%S', constraints=[SQL('DEFAULT CURRENT_TIMESTAMP')]) class Meta: indexes = ((('eid', ), True), ) table_name = 'computer_detail'
def get_weekly_comments_count(since=None): approved = Comment.select(fn.date_trunc('week', Comment.created), fn.count(Comment.id)).group_by( fn.date_trunc('week', Comment.created)).order_by( SQL('date_trunc').asc()) rejected = RejectedComment.select( fn.date_trunc('week', RejectedComment.created), fn.count(RejectedComment.id)).group_by( fn.date_trunc('week', RejectedComment.created)).order_by( SQL('date_trunc').asc()) if since: approved = approved.where(Comment.created >= since) rejected = rejected.where(RejectedComment.created >= since) approved = approved.tuples() rejected = rejected.tuples() first_week = min(approved[0][0], rejected[0][0]) last_week = max(approved[-1][0], rejected[-1][0]) weeks = get_week_or_month_counter(metric='week', first_metric_value=first_week, last_metric_value=last_week) ret = merge_approved_rejected_list(metric_counter=weeks, approved=approved, rejected=rejected) return ret
def get_monthly_top_commented_articles(top=10, since=None): months = Comment.select(fn.date_trunc('month', Comment.created)) if since: months = months.where(Comment.created >= since) months = months.group_by(fn.date_trunc('month', Comment.created)).order_by( SQL('date_trunc').asc()).tuples() month_top_commented_articles_map = {m[0]: [] for m in months} output_formatter = lambda mtc: (Asset.get_by_id(mtc[1]).url, mtc[2]) for month in months: month_top_commented_articles_map[month[0]].extend( list( map( output_formatter, Comment.select( fn.date_trunc('month', Comment.created), Comment.asset_id, fn.count(Comment.id)).group_by( fn.date_trunc('month', Comment.created), Comment.asset_id).where( fn.date_trunc('month', Comment.created) == month).order_by((SQL('date_trunc')), (SQL('count')).desc()).limit( int(top)).tuples()))) first_month = min(month_top_commented_articles_map.keys()) last_month = max(month_top_commented_articles_map.keys()) months = get_week_or_month_counter(metric='month', first_metric_value=first_month, last_metric_value=last_month) monthly_top_commented_articles = list( month_top_commented_articles_map.items()) ret = fill_output_with_default_values( metric_counter=months, output=monthly_top_commented_articles, default_value=[]) return ret
class ScanTable(db.Model): class Meta: db_table = "scan" uuid = UUIDField(unique=True) audit_id = ForeignKeyField(AuditTable, backref="scans", on_delete="CASCADE", on_update="CASCADE") name = CharField(default="") description = CharField(default="") target = CharField() scheduled_at = DateTimeField(null=True, default=None) max_duration = IntegerField(default=0) started_at = DateTimeField(null=True, default=None) ended_at = DateTimeField(null=True, default=None) error_reason = CharField(default="") task_uuid = UUIDField(unique=True, null=True, default=None) rrule = CharField(default="") detection_module = CharField(default="") detection_mode = CharField(default="") created_at = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")]) updated_at = DateTimeField(constraints=[ SQL("DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") ]) created_by = CharField(default="") updated_by = CharField(default="")
def __init__(self, list_args, query_args, filter_args, parsed_args, uri, ids_only=False): query = self._full_query(query_args) if not ids_only else self._id_query(query_args) query = cyndi_query(query) query = apply_filters(query, filter_args, [filter_types.CVE_BUSINESS_RISK, filter_types.CVE_CVSS, filter_types.CVE_IMPACT, filter_types.CVE_PUBLIC_DATE, filter_types.SYSTEM_CVE_STATUS, filter_types.SYSTEM_CVE_RULE_PRESENCE, filter_types.SYSTEM_CVE_RULE_OLD]) query = query.dicts() sortable_columns = { 'id': CveMetadata.id, 'cve': CVE_SYNOPSIS_SORT, 'synopsis': CVE_SYNOPSIS_SORT, 'public_date': CveMetadata.public_date, # This assumes we only show one score, and that cvss3 wins over cvss2 'cvss_score': Case(None, ((CveMetadata.cvss3_score.is_null(True), CveMetadata.cvss2_score),), \ CveMetadata.cvss3_score), "cvss3_score": CveMetadata.cvss3_score, "cvss2_score": CveMetadata.cvss2_score, 'impact_id': CveMetadata.impact_id, 'impact': CveMetadata.impact_id, 'status_id': Status.id, 'status': Status.id, 'business_risk_id': SQL('business_risk_id'), 'business_risk': SQL('business_risk_id'), } default_sort_columns = ['id'] filterable_columns = { 'cve': CveMetadata.cve, 'description': CveMetadata.description, 'status': Status.name } super(SystemCvesView, self).__init__(query, sortable_columns, default_sort_columns, filterable_columns, list_args, parsed_args, uri)
class TaskTable(db.Model): class Meta: db_table = "task" uuid = UUIDField(unique=True, default=uuid.uuid4) audit_id = ForeignKeyField(AuditTable, null=True, on_delete="SET NULL", on_update="CASCADE") scan_id = ForeignKeyField(ScanTable, null=True, on_delete="SET NULL", on_update="CASCADE") scan_uuid = UUIDField(null=True, default=None) target = CharField(default="") scheduled_at = DateTimeField(default=None) max_duration = IntegerField(default=0) started_at = DateTimeField(default=None) ended_at = DateTimeField(default=None) error_reason = CharField(default="") detection_module = CharField(default="") detection_mode = CharField(default="") session = TextField(default="") progress = CharField(default="") results = TextField(default="") created_at = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")]) updated_at = DateTimeField(constraints=[ SQL("DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") ])
class BaseDAO(Model): _create_time = DateTimeField( default=datetime.datetime.now, null=False, constraints=[SQL('DEFAULT CURRENT_TIMESTAMP')]) _update_time = DateTimeField( default=datetime.datetime.now, null=False, constraints=[ SQL('DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP') ]) class Meta: database = db def save(self, *args, **kwargs): now = datetime.datetime.now() if hasattr(self, 'update_time'): self.update_time = now if hasattr(self, '_update_time'): self._update_time = now return super().save(*args, **kwargs) def update_from_dict(self, data) -> Model: update_model_from_dict(self, data) self.save() return self def to_dict(self, **kwargs) -> dict: return model_to_dict(self, **kwargs)
def client(): # print(current_user.username) limit = int(request.values.get('limit', cfg.ITEMS_PER_PAGE)) limit = limit if limit > 0 else cfg.ITEMS_PER_PAGE offset = (int(request.values.get('offset', 0)) / limit) order = 'asc' if request.args.get('order', 'asc') == 'asc' else 'desc' query = Client.select() search = request.args.get('search') if search is not None and '' != search: query = query.where(Client.name % ('*'+search+'*')) sort = request.args.get('sort') if sort is not None: sql_sort = SQL(sort).asc() if order == 'asc' else SQL(sort).desc() query = query.order_by(sql_sort) all = query.paginate(offset+1, limit) total = Client.select().count() result = { 'total': total, 'totalNotFiltered': total, 'rows': [p.to_dict() for p in all] } return result
def _create_table(self, model_class, safe=False): statement = 'CREATE TABLE IF NOT EXISTS' if safe else 'CREATE TABLE' meta = model_class._meta columns, constraints = [], [] if meta.composite_key: pk_cols = [ meta.fields[f].as_entity() for f in meta.primary_key.field_names ] constraints.append( Clause(SQL('PRIMARY KEY'), EnclosedClause(*pk_cols))) for field in meta.sorted_fields: columns.append(self.field_definition(field)) ## No ForeignKeyField support #if isinstance(field, ForeignKeyField) and not field.deferred: # constraints.append(self.foreign_key_constraint(field)) if model_class._meta.constraints: for constraint in model_class._meta.constraints: if not isinstance(constraint, Node): constraint = SQL(constraint) constraints.append(constraint) return Clause(SQL(statement), model_class.as_entity(), EnclosedClause(*(columns + constraints)))
def number_order() -> 'ModelSelect': """按列位置排序. SELECT prod_name, prod_price FROM product ORDER BY 2, 1; """ return (Product.select(Product.prod_name, Product.prod_price).order_by(SQL('2'), SQL('1')))
def __init__(self, rh_account_id, synopsis, list_args, filter_args, parsed_args, uri, ids_only=False, system_advisories=None): query = self._full_query(rh_account_id, synopsis) if not ids_only else self._id_query( rh_account_id, synopsis) query = cyndi_join(query) filters = [ filter_types.SYSTEM_CVE_RULE, filter_types.SYSTEM_CVE_RULE_KEY, filter_types.SYSTEM_CVE_STATUS, filter_types.SYSTEM_UUID, filter_types.SYSTEM_CVE_RULE_PRESENCE, filter_types.SYSTEM_BY_ADVISORY, filter_types.SYSTEM_CVE_FIRST_REPORTED, filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS, filter_types.SYSTEM_RHEL_VERSION, filter_types.SYSTEM_CVE_ADVISORY_AVAILABLE, filter_types.SYSTEM_CVE_REMEDIATION, ] query = apply_filters(query, filter_args, filters, {'system_advisories': system_advisories}) query = query.dicts() sortable_columns = { 'display_name': SystemPlatform.display_name, 'id': SystemPlatform.id, 'inventory_id': SystemPlatform.inventory_id, 'last_evaluation': SystemPlatform.last_evaluation, 'last_upload': SystemPlatform.last_upload, 'rules_evaluation': SystemPlatform.advisor_evaluated, 'status_id': Status.id, 'status': Status.id, 'updated': InventoryHosts.updated, 'first_reported': SystemVulnerabilities.first_reported, 'os': OS_INFO_SORT, 'advisory_available': SQL('advisory_available'), 'remediation': SQL('remediation_type_id'), } default_sort_columns = { 'default': ['-updated', 'id'], } filterable_columns = {'display_name': SystemPlatform.display_name} filter_expressions = {} super().__init__(query, sortable_columns, default_sort_columns, filterable_columns, filter_expressions, list_args, parsed_args, uri)
class Product(db.Model): base = UIntForeignKeyField( column_name="camp_id", field="id", model=Base, null=True, on_update="CASCADE", object_id_name="base_id", ) category = UIntForeignKeyField( column_name="category_id", field="id", model=ProductCategory, null=True, ) comments = CharField(null=True) created_on = DateTimeField(column_name="created", null=True) created_by = UIntForeignKeyField( model=User, column_name="created_by", field="id", null=True, on_delete="SET NULL", on_update="CASCADE", ) deleted = DateTimeField(null=True, default=None) gender = UIntForeignKeyField( column_name="gender_id", field="id", model=ProductGender, on_update="CASCADE", ) last_modified_on = DateTimeField(column_name="modified", null=True) last_modified_by = UIntForeignKeyField( model=User, column_name="modified_by", field="id", null=True, on_delete="SET NULL", on_update="CASCADE", ) name = CharField() size_range = UIntForeignKeyField( column_name="sizegroup_id", field="id", model=SizeRange, null=True, on_update="CASCADE", ) in_shop = IntegerField( column_name="stockincontainer", constraints=[SQL("DEFAULT 0")] ) price = IntegerField(column_name="value", constraints=[SQL("DEFAULT 0")]) class Meta: table_name = "products"
def count(self, database, clear_limit=False): clone = self.order_by().alias('_wrapped') if clear_limit: clone._limit = clone._offset = None try: if clone._having is None and clone._windows is None and \ clone._distinct is None and clone._simple_distinct is not True: clone = clone.select(SQL('1')) except AttributeError: pass return Select([clone], [fn.COUNT(SQL('1'))]).scalar(database)
class _BaseRecordModel(peewee.Model): id_ = AutoField(column_name='id', primary_key=True) created_at = DateTimeField(constraints=[SQL('DEFAULT NOW()')]) updated_at = DateTimeField(constraints=[SQL('DEFAULT NOW()')]) class Meta: database = database def save(self, *args, **kwargs): self.updated_at = datetime.now(tzlocal()) return super().save(*args, **kwargs)