def query_entity_string_cnt_limited_by_entity_type(entity_type): print("#### Entity count by entity type: {}".format(entity_type)) for i in EntityT1.select(EntityT1.canonical_name,fn.COUNT(EntityT1.canonical_name).alias("count")).where(EntityT1.entity_type == entity_type ).group_by(EntityT1.canonical_name).order_by(SQL("count").desc()).limit(10): print("{}\t{}".format(i.canonical_name,i.count)) print("#### End Entity count by entity type: {}".format(entity_type))
def generate_select(self, query, alias_map=None): model = query.model_class db = model._meta.database alias_map = self.calculate_alias_map(query, alias_map) if isinstance(query, CompoundSelect): clauses = [_StripParens(query)] else: if not query._distinct: clauses = [SQL('SELECT')] else: clauses = [SQL('SELECT DISTINCT')] if query._distinct not in (True, False): clauses += [SQL('ON'), EnclosedClause(*query._distinct)] # basic support for query limit if query._limit is not None or (query._offset and db.limit_max): limit = query._limit if query._limit is not None else db.limit_max clauses.append(SQL('TOP %s' % limit)) select_clause = Clause(*query._select) select_clause.glue = ', ' clauses.extend((select_clause, SQL('FROM'))) if query._from is None: clauses.append(model.as_entity().alias(alias_map[model])) else: clauses.append(CommaClause(*query._from)) if query._windows is not None: clauses.append(SQL('WINDOW')) clauses.append( CommaClause(*[ Clause(SQL(window._alias), SQL('AS'), window.__sql__()) for window in query._windows ])) join_clauses = self.generate_joins(query._joins, model, alias_map) if join_clauses: clauses.extend(join_clauses) if query._where is not None: clauses.extend([SQL('WHERE'), query._where]) if query._group_by: clauses.extend([SQL('GROUP BY'), CommaClause(*query._group_by)]) if query._having: clauses.extend([SQL('HAVING'), query._having]) if query._order_by: clauses.extend([SQL('ORDER BY'), CommaClause(*query._order_by)]) # NO OFFSET SUPPORT if query._for_update: for_update, no_wait = query._for_update if for_update: stmt = 'FOR UPDATE NOWAIT' if no_wait else 'FOR UPDATE' clauses.append(SQL(stmt)) return self.build_query(clauses, alias_map)
class TradeRecord(BaseModel): account = CharField(constraints=[SQL("DEFAULT ''")]) amount = DecimalField(10, 2, constraints=[SQL("DEFAULT 0.00")]) created_at = DateTimeField(null=True) updated_at = DateTimeField(null=True) deal_num = IntegerField(constraints=[SQL("DEFAULT 0")]) deal_price = DecimalField(10, 3, constraints=[SQL("DEFAULT 0.000")]) entrust_no = CharField(constraints=[SQL("DEFAULT ''")]) entrust_num = IntegerField(constraints=[SQL("DEFAULT 0")]) entrust_price = DecimalField(10, 3, constraints=[SQL("DEFAULT 0.000")]) entrust_type = IntegerField(constraints=[SQL("DEFAULT 0")]) fee = DecimalField(10, 2, constraints=[SQL("DEFAULT 0.00")]) security_code = CharField(constraints=[SQL("DEFAULT ''")]) security_exchange = CharField(constraints=[SQL("DEFAULT ''")]) security_name = CharField(constraints=[SQL("DEFAULT ''")]) security_type = IntegerField(constraints=[SQL("DEFAULT 0")]) status = IntegerField(constraints=[SQL("DEFAULT 0")]) triggered_by = CharField(constraints=[SQL("DEFAULT ''")]) class Meta: table_name = 'trade_record'
class Meta: constraints = [ SQL('CONSTRAINT utt_id_pv_iq_unique UNIQUE(utterance_id, property_value_id)' ) ]
mytree = html.parse(StringIO(a.content_raw)) mytree_parsed = mytree.xpath("//*[not(script)]") ##text only # remove multiple line breaks #### DONE a.replace("\r", "\n").replace("\n\n", "").replace(" ", " ").replace(" ", "") ## parse articles # get number of images per article # get number of iframes # get number of twitter / youtube embeds sql = "SELECT * FROM article a1 INNER JOIN article a2 WHERE a1.url=a2.url" w = Article.select(SQL(sql)) w.count() sql_2 = "SELECT url, count(*) FROM article GROUP BY 1 HAVING count(*) > 0" # w2 = Article.select(Article.url).count().group_ # w2 sql_3 = "SELECT url FROM article WHERE url IN (SELECT url FROM article GROUP BY url HAVING count(*) > 1)" sql_3 = "SELECT url FROM article WHERE website_id = 2" rq = RawQuery(sql_3) aq = Article.select(SQL(sql_3)) i = 0 c = db_connection.db_connection.execute_sql(sql_3)
class Meta: constraints = [SQL("UNIQUE(user_id,device_id)")]
def get_spawnpoints(cls, southBoundary, westBoundary, northBoundary, eastBoundary): query = Pokemon.select(Pokemon.latitude, Pokemon.longitude, Pokemon.spawnpoint_id, ((Pokemon.disappear_time.minute * 60) + Pokemon.disappear_time.second).alias('time'), fn.Count(Pokemon.spawnpoint_id).alias('count')) if None not in (northBoundary, southBoundary, westBoundary, eastBoundary): query = (query .where((Pokemon.latitude <= northBoundary) & (Pokemon.latitude >= southBoundary) & (Pokemon.longitude >= westBoundary) & (Pokemon.longitude <= eastBoundary) )) query = query.group_by(Pokemon.latitude, Pokemon.longitude, Pokemon.spawnpoint_id, SQL('time')) queryDict = query.dicts() spawnpoints = {} for sp in queryDict: key = sp['spawnpoint_id'] disappear_time = cls.get_spawn_time(sp.pop('time')) count = int(sp['count']) if key not in spawnpoints: spawnpoints[key] = sp else: spawnpoints[key]['special'] = True if 'time' not in spawnpoints[key] or count >= spawnpoints[key]['count']: spawnpoints[key]['time'] = disappear_time spawnpoints[key]['count'] = count for sp in spawnpoints.values(): del sp['count'] return list(spawnpoints.values())
class Meta: constraints = [SQL("UNIQUE(server_id, mxc_server, mxc_path)")]
class Meta: constraints = [SQL("UNIQUE(server_id, content_uri)")]
def given_event_str_and_location_output_count_of_actors(event_str,location): print("#### Actor counts given event string: {} and location: {}".format(event_str,location)) for i in EventT1.select(EventT1.actor_str,fn.COUNT(EventT1.actor_str).alias("count")).where((EventT1.event_mention_str.contains(event_str)) & (EventT1.location_str.contains(location)) & (EventT1.actor_str != "")).group_by(EventT1.actor_str).order_by(SQL("count").desc()).limit(20): print("{}\t{}".format(i.actor_str,i.count)) print("#### End Actor counts given event string: {} and location: {}".format(event_str,location))
class Meta: constraints = [SQL("UNIQUE(user_id, room_id, token)")]
def query_entity_string_cnt(): print("#### Entity string counts:") for i in EntityT1.select(EntityT1.canonical_name,fn.COUNT(EntityT1.canonical_name).alias("count")).group_by(EntityT1.canonical_name).order_by(SQL("count").desc()).limit(10): print("{}\t{}".format(i.canonical_name,i.count)) print("#### End Entity string counts:")
def query_location_cnt_in_event(): print("#### Location cnt in event") for i in EventT1.select(EventT1.location_str,fn.COUNT(EventT1.location_str).alias("count")).where(EventT1.location_str != "").group_by(EventT1.location_str).order_by(SQL("count").desc()).limit(20): print("{}\t{}".format(i.location_str,i.count)) print("#### End Location cnt in event")
def query_anchor_string_cnt(): print("#### Event anchor string count") for i in EventT1.select(EventT1.event_mention_str,fn.COUNT(EventT1.event_mention_str).alias("count")).where((EventT1.actor_str == "") & (EventT1.location_str == "")).group_by(EventT1.event_mention_str).order_by(SQL("count").desc()).limit(20): print("{}\t{}".format(i.event_mention_str,i.count)) print("#### End Event anchor and string count")
class Meta: constraints = [SQL("UNIQUE(request_id,account_id)")]
class Meta: constraints = [SQL("UNIQUE(name)")]
class Meta: constraints = [SQL("UNIQUE(account_id,user_id)")]
class Meta: constraints = [SQL("UNIQUE(user_id,server_id)")]
async def skills(self, context, recheck=True, send_message=True, since: str = "2019-06-06", limit: int = None, channel_name: str = None): channel = await self.get_channel( context, channel_name) if channel_name else context.channel if recheck: await channel.send("Checking raidlogs, back in a bit!") await self.monitor_clans(self.clans) since = datetime.strptime( since, "%Y-%m-%d") if since is not None else datetime.now() - timedelta( days=365) if limit is None: limit = len(self.clans) * 3 elif limit == 0: limit = None Skills = Log.alias() skills_query = Skills.select(Skills.user_id, (fn.COUNT(Skills.id) + (fn.IFNULL(PriorActivity.skills, 0))).alias("skills"))\ .join_from(Skills, PriorActivity, JOIN.LEFT_OUTER, on=(Skills.user_id == PriorActivity.id))\ .join_from(Skills, Raid)\ .where(Skills.action == RaidAction.DreadMachineUse, Raid.start >= since, Raid.clan_name << [x[0] for x in self.clans])\ .group_by(Skills.user_id)\ .alias("sq") right_joined_skills_query = PriorActivity.select((PriorActivity.id).alias("user_id"), (fn.IFNULL(PriorActivity.skills, skills_query.c.skills)).alias("skills"))\ .join_from(PriorActivity, skills_query, JOIN.LEFT_OUTER, on=(skills_query.c.user_id == PriorActivity.id)) skills_query = skills_query | right_joined_skills_query #DIY FULL OUTER JOIN kills_query = Log.select(Log.user_id, Log.username.alias("Username"), (fn.SUM(Log.turns)+ (fn.IFNULL(PriorActivity.kills, 0))).alias("kills"))\ .join_from(Log, PriorActivity, JOIN.LEFT_OUTER, on=(Log.user_id == PriorActivity.id))\ .join_from(Log, Raid)\ .where(Log.action == RaidAction.Victory, Raid.name == "dreadsylvania", Raid.start >= since, Raid.clan_name in [x[0] for x in self.clans])\ .group_by(Log.user_id) rankings_query = Log.select(kills_query.c.username.alias("Username"), kills_query.c.kills.alias("Kills"), fn.IFNULL(skills_query.c.skills, 0).alias("Skills"), (kills_query.c.kills / (fn.IFNULL(skills_query.c.skills, 0) + 0.5)).alias("KillsPerSkill"))\ .join_from(Log, skills_query, JOIN.LEFT_OUTER, on=(Log.user_id == skills_query.c.user_id))\ .join_from(Log, kills_query, JOIN.LEFT_OUTER, on=(Log.user_id == kills_query.c.user_id))\ .group_by(kills_query.c.user_id)\ .order_by(SQL("KillsPerSkill").desc())\ rankings = [ x for x in [r for r in rankings_query.dicts()] if x["Username"] and not x["Username"].lower() in excluded_list ] table = tabulate(rankings, headers="keys") table = table[:1900] message = "__SKILL RANKINGS__ \n```\n{}\n```".format(table) if channel_name: await context.send("Sending skills to {}".format(channel.name)) if send_message: await channel.send(message) else: return message
def handle_get(cls, **kwargs): # pylint: disable=singleton-comparison, too-many-branches, too-many-statements retval = { 'system_count': 0, 'cves_total': 0, 'cves_by_severity': { '0to3.9': { 'percentage': 0, 'count': 0, 'known_exploit_count': 0 }, '4to7.9': { 'percentage': 0, 'count': 0, 'known_exploit_count': 0 }, '8to10': { 'percentage': 0, 'count': 0, 'known_exploit_count': 0 }, 'na': { 'percentage': 0, 'count': 0, 'known_exploit_count': 0 } }, 'recent_cves': { 'last7days': 0, 'last30days': 0, 'last90days': 0 }, 'rules_total': 0, 'rules_by_severity': { 1: { 'rule_count': 0, 'systems_affected': 0 }, 2: { 'rule_count': 0, 'systems_affected': 0 }, 3: { 'rule_count': 0, 'systems_affected': 0 }, 4: { 'rule_count': 0, 'systems_affected': 0 }, }, 'top_cves': [], 'top_rules': [], } rh_account, cve_cache_from, cve_cache_keepalive = get_account_data(connexion.context['user']) if rh_account is None: return retval retval['system_count'] = get_system_count(rh_account) if retval['system_count'] == 0: return retval # API using cache, set keepalive for account to enable maintaining cache update_cve_cache_keepalive(rh_account, cve_cache_keepalive) # Use cache if not disabled + cache exists if not DISABLE_ACCOUNT_CACHE and cve_cache_from: count_query = (CveAccountCache .select(CveAccountCache.cve_id.alias("cve_id_"), CveAccountCache.systems_affected.alias("systems_affected_")) .where(CveAccountCache.rh_account_id == rh_account)) else: count_query = (SystemVulnerabilities .select(SystemVulnerabilities.cve_id.alias("cve_id_"), fn.Count(SystemVulnerabilities.id).alias("systems_affected_")) .join(SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)) & (fn.COALESCE(SystemPlatform.host_type, 'null') != HostType.EDGE)) .where(SystemVulnerabilities.rh_account_id == rh_account) .where((SystemVulnerabilities.mitigation_reason.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select(InsightsRule.id).where( (InsightsRule.active == False) & (~InsightsRule.rule_only)))) .where((SystemVulnerabilities.when_mitigated.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select(InsightsRule.id).where( (InsightsRule.active == True) & (~InsightsRule.rule_only)))) .group_by(SystemVulnerabilities.cve_id)) count_query = cyndi_join(count_query) cve_query = (CveMetadata .select(CveMetadata.id.alias("cve_id"), fn.COALESCE(CveMetadata.cvss3_score, CveMetadata.cvss2_score).alias('cvss_score'), CveMetadata.public_date, CveMetadata.exploits) .join(count_query, JOIN.INNER, on=(CveMetadata.id == count_query.c.cve_id_)) .dicts()) cve_data = [(cve["cvss_score"], cve["public_date"], cve["exploits"]) for cve in cve_query] cves_total = len(cve_data) if cves_total == 0: return retval retval['cves_total'] = cves_total today = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0) # offset-aware last7 = today - timedelta(days=7) last30 = today - timedelta(days=30) last90 = today - timedelta(days=90) for cvss_score, public_date, exploits in cve_data: if cvss_score is None: retval["cves_by_severity"]["na"]["count"] += 1 if exploits: retval["cves_by_severity"]["na"]["known_exploit_count"] += 1 elif cvss_score < 4: retval["cves_by_severity"]["0to3.9"]["count"] += 1 if exploits: retval["cves_by_severity"]["0to3.9"]["known_exploit_count"] += 1 elif 4 <= cvss_score < 8: retval["cves_by_severity"]["4to7.9"]["count"] += 1 if exploits: retval["cves_by_severity"]["4to7.9"]["known_exploit_count"] += 1 elif cvss_score >= 8: retval["cves_by_severity"]["8to10"]["count"] += 1 if exploits: retval["cves_by_severity"]["8to10"]["known_exploit_count"] += 1 if public_date is not None: if public_date >= last7: retval["recent_cves"]["last7days"] += 1 if public_date >= last30: retval["recent_cves"]["last30days"] += 1 if public_date >= last90: retval["recent_cves"]["last90days"] += 1 cve_count_by_severity = [v['count'] for v in retval['cves_by_severity'].values()] rounded_percentage = round_to_100_percent(cve_count_by_severity) for indx, keys in enumerate(retval['cves_by_severity']): retval['cves_by_severity'][keys]['percentage'] = rounded_percentage[indx] # The algorithm searches for CVEs with cvss score between 8 and 10, and then sort by a number of affected # systems if there are not 3 CVE in the 8 to 10 range, then it looks for CVEs in 4 to 8 range, sorted by a # number of systems affected. The high-end range check is exclusive that is why 11 here. cves_limit = 3 top_cves = cls._get_top_cves_by_cvss(8.0, 11, count_query, limit=cves_limit) cls._build_top_cves(top_cves, retval) cves_count = top_cves.count() if cves_count < cves_limit: next_tier_top = cls._get_top_cves_by_cvss(4.0, 8.0, count_query, limit=cves_limit - cves_count) cls._build_top_cves(next_tier_top, retval) next_cves_count = next_tier_top.count() if next_cves_count < (cves_limit - cves_count): last_tier_top = cls._get_top_cves_by_cvss(0.0, 4.0, count_query, limit=cves_limit - (cves_count + next_cves_count)) cls._build_top_cves(last_tier_top, retval) rules_breakdown = (SystemVulnerabilities.select(fn.COUNT(fn.Distinct(InsightsRule.id)).alias('rule_count'), InsightsRule.rule_impact.alias('severity'), fn.COUNT(fn.Distinct(SystemVulnerabilities.system_id)).alias('systems_affected')) .join(InsightsRule, on=(SystemVulnerabilities.rule_id == InsightsRule.id)) .join(SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)) & (SystemPlatform.last_evaluation.is_null(False) | SystemPlatform.advisor_evaluated.is_null(False)) & (fn.COALESCE(SystemPlatform.host_type, 'null') != HostType.EDGE)) .where(SystemVulnerabilities.rh_account_id == rh_account) .where((SystemVulnerabilities.mitigation_reason.is_null(True)) & (SystemVulnerabilities.rule_id << InsightsRule.select(InsightsRule.id).where((InsightsRule.active == True) & (~InsightsRule.rule_only)))) .group_by(InsightsRule.rule_impact) .dicts()) rules_breakdown = cyndi_join(rules_breakdown) for section in rules_breakdown: retval['rules_by_severity'][section['severity']]['rule_count'] = section['rule_count'] retval['rules_by_severity'][section['severity']]['systems_affected'] = section['systems_affected'] retval['rules_total'] = sum([item['rule_count'] for item in rules_breakdown]) if not DISABLE_ACCOUNT_CACHE and cve_cache_from: top_rules = (RuleAccountCache.select(InsightsRule.name.alias('rule_id'), InsightsRule.description_text.alias('name'), InsightsRule.rule_impact.alias('severity'), InsightsRule.summary_text.alias('description'), RuleAccountCache.systems_affected, fn.ARRAY_AGG(fn.Distinct(CveMetadata.cve)).alias('associated_cves')) .join(InsightsRule, on=(RuleAccountCache.rule_id == InsightsRule.id)) .join(CveRuleMapping, on=(InsightsRule.id == CveRuleMapping.rule_id)) .join(CveMetadata, on=(CveRuleMapping.cve_id == CveMetadata.id)) .where(RuleAccountCache.rh_account_id == rh_account) .group_by(InsightsRule.name, InsightsRule.description_text, InsightsRule.rule_impact, InsightsRule.summary_text, RuleAccountCache.systems_affected) .order_by(InsightsRule.rule_impact.desc(), RuleAccountCache.systems_affected.desc(), InsightsRule.description_text, InsightsRule.name) .limit(3) .dicts()) else: top_rules = (SystemVulnerabilities.select(InsightsRule.name.alias('rule_id'), InsightsRule.description_text.alias('name'), InsightsRule.rule_impact.alias('severity'), InsightsRule.summary_text.alias('description'), fn.COUNT(fn.Distinct(SystemVulnerabilities.system_id)).alias('systems_affected'), fn.ARRAY_AGG(fn.Distinct(CveMetadata.cve)).alias('associated_cves')) .join(InsightsRule, on=(SystemVulnerabilities.rule_id == InsightsRule.id)) .join(CveRuleMapping, on=(InsightsRule.id == CveRuleMapping.rule_id)) .join(CveMetadata, on=(CveRuleMapping.cve_id == CveMetadata.id)) .join(SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)) & (SystemPlatform.last_evaluation.is_null(False) | SystemPlatform.advisor_evaluated.is_null(False)) & (fn.COALESCE(SystemPlatform.host_type, 'null') != HostType.EDGE)) .where(SystemVulnerabilities.rh_account_id == rh_account) .where((SystemVulnerabilities.mitigation_reason.is_null(True)) & (SystemVulnerabilities.rule_id << InsightsRule.select(InsightsRule.id).where((InsightsRule.active == True) & (~InsightsRule.rule_only)))) .group_by(InsightsRule.name, InsightsRule.description_text, InsightsRule.rule_impact, InsightsRule.summary_text) .order_by(InsightsRule.rule_impact.desc(), SQL('systems_affected desc'), InsightsRule.description_text, InsightsRule.name) .limit(3) .dicts()) top_rules = cyndi_join(top_rules) for top_rule in top_rules: retval['top_rules'].append(top_rule) return retval
class Meta: constraints = [ SQL('CONSTRAINT gp_source_id_cs_id_unique UNIQUE(source_id, coding_system_id)' ) ]
def __init__(self, rh_account_id, cve_cache_from, cve_cache_keepalive, list_args, uri, args): join_type = JOIN.INNER if args['affecting'] is None or False in args[ 'affecting'] or True not in args['affecting']: join_type = JOIN.LEFT_OUTER # API using cache, set keepalive for account to enable maintaining cache update_cve_cache_keepalive(rh_account_id, cve_cache_keepalive) # Use cache if not disabled + systems are not filtered + cache exists if not DISABLE_ACCOUNT_CACHE and not is_cyndi_request( args) and cve_cache_from: count_subquery = self._cached_count_subquery(rh_account_id) else: count_subquery = self._count_subquery(rh_account_id) count_subquery = cyndi_join(count_subquery) if is_cyndi_request(args): count_subquery = apply_filters(count_subquery, args, [ filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS, filter_types.SYSTEM_RHEL_VERSION ], {}) query = self._full_query(rh_account_id, join_type, count_subquery) query = apply_filters(query, args, [ filter_types.CVE_BUSINESS_RISK, filter_types.CVE_CVSS, filter_types.CVE_IMPACT, filter_types.CVE_PUBLIC_DATE, filter_types.CVE_RULE_PRESENCE, filter_types.CVE_STATUS, filter_types.CVE_AFFECTING, filter_types.CVE_KNOWN_EXPLOITS ], {"count_subquery": count_subquery}) query = query.dicts() sortable_columns = { "systems_affected": SQL('systems_affected'), "id": CveMetadata.id, "synopsis": CVE_SYNOPSIS_SORT, "public_date": CveMetadata.public_date, # This assumes we only show one score, and that cvss3 wins over cvss2 "cvss_score": Case(None, ((CveMetadata.cvss3_score.is_null(True), CveMetadata.cvss2_score),), \ CveMetadata.cvss3_score), "cvss3_score": CveMetadata.cvss3_score, "cvss2_score": CveMetadata.cvss2_score, "impact_id": CveMetadata.impact_id, "impact": CveMetadata.impact_id, "business_risk_id": SQL('business_risk_id'), "business_risk": SQL('business_risk_id'), "status_id": SQL('status_id'), "status": SQL('status_id'), } default_sort_columns = { 'default': 'id', 'cvss_score': 'public_date', 'cvss2_score': 'public_date', 'cvss3_score': 'public_date', 'public_date': 'synopsis', 'impact': 'public_date', 'systems_affected': 'public_date', 'business_risk': 'public_date', 'status': 'public_date', } filterable_columns = { "synopsis": CveMetadata.cve, "description": CveMetadata.description, } if list_args["filter"]: filter_expressions = { "rule_description": fn.Exists( CveRuleMapping.select(CveRuleMapping.cve_id).join( InsightsRule, on=(CveRuleMapping.rule_id == InsightsRule.id)).where( CveRuleMapping.cve_id == CveMetadata.id).where( InsightsRule.description_text.contains( list_args["filter"]))) } else: filter_expressions = {} super().__init__(query, sortable_columns, default_sort_columns, filterable_columns, filter_expressions, list_args, args, uri)
class Meta: constraints = [ SQL('CONSTRAINT gv_id_iv_id_unique UNIQUE (global_value_id, interview_id)' ) ]
class Meta: constraints = [SQL("UNIQUE(account_id,user_id,device_id)")] table_name = "devicekeys"
def handle_get(cls, **kwargs): # pylint: disable=singleton-comparison, too-many-branches, too-many-statements retval = { 'cves_total': 0, 'cves_by_severity': { '0to3.9': { 'percentage': 0, 'count': 0, 'known_exploits': 0 }, '4to7.9': { 'percentage': 0, 'count': 0, 'known_exploits': 0 }, '8to10': { 'percentage': 0, 'count': 0, 'known_exploits': 0 }, 'na': { 'percentage': 0, 'count': 0, 'known_exploits': 0 } }, 'exploited_cves_count': 0, 'system_count': 0, 'recent_cves': { 'last7days': 0, 'last30days': 0, 'last90days': 0 }, 'recent_rules': [], 'rules_cves_total': 0, } args_desc = [{ 'arg_name': 'tags', 'convert_func': parse_tags }, { 'arg_name': 'sap_system', 'convert_func': None }, { 'arg_name': 'sap_sids', 'convert_func': None }] args = cls._parse_arguments(kwargs, args_desc) cyndi_request = is_cyndi_request(args) rh_account, cve_cache_from, cve_cache_keepalive = get_account_data( connexion.context['user']) retval['system_count'] = get_system_count(rh_account, True, [ filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS ], args) # API using cache, set keepalive for account to enable maintaining cache update_cve_cache_keepalive(rh_account, cve_cache_keepalive) # Use cache if not disabled + systems are not filtered + cache exists if not DISABLE_ACCOUNT_CACHE and not cyndi_request and cve_cache_from: active_cves_subquery = (CveAccountCache.select( CveAccountCache.cve_id.alias("cve_id_")).where( CveAccountCache.rh_account_id == rh_account)) else: active_cves_subquery = ( SystemVulnerabilities.select( fn.Distinct(SystemVulnerabilities.cve_id).alias("cve_id_") ).join( SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & # noqa: E712 (SystemPlatform.stale == False) & # noqa: E712 (SystemPlatform.when_deleted.is_null(True)))).where( SystemVulnerabilities.rh_account_id == rh_account). where((SystemVulnerabilities.mitigation_reason.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where((InsightsRule.active == False) & (~InsightsRule.rule_only)))) .where((SystemVulnerabilities.when_mitigated.is_null(True)) | ( SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where((InsightsRule.active == True) & (~InsightsRule.rule_only))))) if cyndi_request: active_cves_subquery = cyndi_join(active_cves_subquery) active_cves_subquery = apply_filters( active_cves_subquery, args, [ filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS ], {}) query = (CveMetadata.select( CveMetadata.cve, fn.COALESCE(CveMetadata.cvss3_score, CveMetadata.cvss2_score).alias('cvss_score'), CveMetadata.public_date, CveMetadata.id, CveMetadata.exploits).join( active_cves_subquery, on=(CveMetadata.id == active_cves_subquery.c.cve_id_)).dicts()) cve_data = [(cve["cvss_score"], cve["public_date"], cve["exploits"]) for cve in query] retval["cves_total"] = len(cve_data) retval["exploited_cves_count"] = len( [row[2] for row in cve_data if row[2] is True]) today = datetime.now(timezone.utc).replace( hour=0, minute=0, second=0, microsecond=0) # offset-aware last7 = today - timedelta(days=7) last30 = today - timedelta(days=30) last90 = today - timedelta(days=90) rules_date = today - timedelta(days=CFG.dashboard_rules_age) for cvss_score, public_date, exploit in cve_data: if cvss_score is None: retval["cves_by_severity"]["na"]["count"] += 1 if exploit: retval["cves_by_severity"]["na"]["known_exploits"] += 1 elif cvss_score < 4: retval["cves_by_severity"]["0to3.9"]["count"] += 1 if exploit: retval["cves_by_severity"]["0to3.9"]["known_exploits"] += 1 elif 4 <= cvss_score < 8: retval["cves_by_severity"]["4to7.9"]["count"] += 1 if exploit: retval["cves_by_severity"]["4to7.9"]["known_exploits"] += 1 elif cvss_score >= 8: retval["cves_by_severity"]["8to10"]["count"] += 1 if exploit: retval["cves_by_severity"]["8to10"]["known_exploits"] += 1 if public_date is not None: if public_date >= last7: retval["recent_cves"]["last7days"] += 1 if public_date >= last30: retval["recent_cves"]["last30days"] += 1 if public_date >= last90: retval["recent_cves"]["last90days"] += 1 rounded_percentage = round_to_100_percent( [v['count'] for v in retval['cves_by_severity'].values()]) for indx, keys in enumerate(retval['cves_by_severity']): retval['cves_by_severity'][keys][ 'percentage'] = rounded_percentage[indx] rules_breakdown = ( SystemVulnerabilities.select( fn.COUNT(fn.Distinct(SystemVulnerabilities.cve_id)). alias('rules_cves_count')).join( CveRuleMapping, on=(SystemVulnerabilities.cve_id == CveRuleMapping.cve_id )).join(InsightsRule, on=((CveRuleMapping.rule_id == InsightsRule.id) & (InsightsRule.active == True) & (~InsightsRule.rule_only))). join( SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.when_deleted.is_null(True)) & (SystemPlatform.stale == False) & (SystemPlatform.opt_out == False) & # noqa: E712 (SystemPlatform.last_evaluation.is_null(False) | SystemPlatform.advisor_evaluated.is_null(False))) ).where(SystemVulnerabilities.rh_account_id == rh_account).where( SystemVulnerabilities.mitigation_reason.is_null(True))) if cyndi_request: rules_breakdown = cyndi_join(rules_breakdown) rules_breakdown = apply_filters(rules_breakdown, args, [ filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS ], {}) rules_breakdown = rules_breakdown.first() retval['rules_cves_total'] = rules_breakdown.rules_cves_count counts_query = (SystemVulnerabilities.select( SystemVulnerabilities.rule_id.alias("rule_id_"), fn.Count(fn.Distinct( SystemVulnerabilities.system_id)).alias("systems_affected_") ).join( SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)))).where( SystemVulnerabilities.rh_account_id == rh_account ).where((SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where((InsightsRule.active == True) & (~InsightsRule.rule_only))) & (SystemVulnerabilities.mitigation_reason.is_null( True))).group_by(SystemVulnerabilities.rule_id)) if cyndi_request: counts_query = cyndi_join(counts_query) counts_query = apply_filters(counts_query, args, [ filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS ], {}) recent_rules = ( InsightsRule.select( InsightsRule.description_text.alias('name'), InsightsRule.summary_text.alias('description'), counts_query.c.systems_affected_.alias('systems_affected'), InsightsRule.rule_impact.alias('severity'), InsightsRule.kbase_node_id.alias('node_id'), fn.ARRAY_AGG(fn.Distinct( CveMetadata.cve)).alias('associated_cves'), InsightsRule.name.alias('id'), InsightsRule.publish_date.alias('public_date')).join( CveRuleMapping, on=(InsightsRule.id == CveRuleMapping.rule_id)).join( counts_query, on=(InsightsRule.id == counts_query.c.rule_id_)).join( CveMetadata, on=(CveRuleMapping.cve_id == CveMetadata.id )).where( (InsightsRule.publish_date >= rules_date) & (InsightsRule.active == True) & (~InsightsRule.rule_only)) # noqa: E712 .group_by(InsightsRule.description_text, InsightsRule.publish_date, InsightsRule.rule_impact, InsightsRule.kbase_node_id, SQL('systems_affected'), InsightsRule.name, InsightsRule.publish_date, InsightsRule.summary_text).order_by( InsightsRule.publish_date.desc(), InsightsRule.rule_impact, InsightsRule.description_text).dicts()) recent_rules = apply_filters(recent_rules, args, [], {"count_subquery": counts_query}) for rule in recent_rules: retval['recent_rules'].append(rule) return retval
class Meta: constraints = [SQL("UNIQUE(device_id,key_type)")]
class Meta: table_name = "dict" schema = "system" constraints = [ SQL("CONSTRAINT dict_unique UNIQUE (layer_id, column_name, enumerator_name)") ]
class Meta: constraints = [SQL("UNIQUE(sender_key,session_id)")]
class TradePosition(BaseModel): account = CharField(constraints=[SQL("DEFAULT ''")]) cost_price = DecimalField(constraints=[SQL("DEFAULT 0.000")]) cost_value = DecimalField(constraints=[SQL("DEFAULT 0.000")]) created_at = DateTimeField(null=True) current_price = DecimalField(constraints=[SQL("DEFAULT 0.000")]) current_value = DecimalField(constraints=[SQL("DEFAULT 0.000")]) num = IntegerField(constraints=[SQL("DEFAULT 0")]) security_code = CharField(constraints=[SQL("DEFAULT ''")]) security_exchange = CharField(constraints=[SQL("DEFAULT ''")]) security_name = CharField(constraints=[SQL("DEFAULT ''")]) security_type = IntegerField(constraints=[SQL("DEFAULT 0")]) strategy_code = CharField(constraints=[SQL("DEFAULT ''")]) class Meta: table_name = 'trade_position'
def trending_topic(region_id, unit: str, search: str = None, start: datetime = None, end: datetime = None, sum: bool = False, topic_limit=100, lw: float = 1, vw: float = 1, cw: float = 1, rw: float = 1, dw: float = 1): today = datetime.now() today = datetime(year=today.year, month=today.month, day=today.day) if end is None: end = today if start is None: start = end - relativedelta(days=unit_value[unit] + 2) print(start, end) region = Region.get(Region.region_id == region_id) result = { 'id': region.region_id, 'name': region.name, 'topic': [], 'geo': { 'lat': region.lat, 'lon': region.lon } } daily_trends = DailyTrend.select().where((DailyTrend.time >= start) & (DailyTrend.time <= end) & (DailyTrend.region == region)) if search is not None and len(search) > 0: exp = NodeList([ SQL("jsonb_message_to_tsvector("), DailyTrend.metrics, SQL(") @@ '{}'".format(search)) ], glue='') daily_trends = daily_trends.where(exp) print('size', len(daily_trends)) daily_metrics = [] for trend in daily_trends: stats = [] for metric in trend.metrics: m_ = metric['stats'] m_['tag'] = metric['tag'].replace('#', '') m_['date'] = trend.time if 'category' not in metric: m_['category'] = [-1] else: m_['category'] = metric['category'] stats.append(m_) df = pd.DataFrame(stats) if len(df) > 0: daily_metrics.append(df) if end >= today: from cache import LatestTrend try: trend = LatestTrend.get(LatestTrend.region_id == region_id) today_stats = trend.metrics except: today_stats = [] stats = [] for metric in today_stats: m_ = metric['stats'] m_['tag'] = metric['tag'].replace('#', '') m_['date'] = today if 'category' not in metric: m_['category'] = [-1] else: m_['category'] = metric['category'] stats.append(m_) if len(stats): df = pd.DataFrame(stats) if len(df) > 0: daily_metrics.append(df) print('m size', len(daily_metrics)) if len(daily_metrics) > 0: df = pd.concat(daily_metrics, axis=0) if search is not None and len(search) > 0: df = df.loc[df['tag'].str.contains(search, regex=False)] df.set_index('tag') df = df.drop(columns=["date"]) if 'category' in df.columns: # df['category'] = [','.join(map(str, l)) for l in df['category']] # df = df.groupby(['tag', 'category'],as_index=False).mean() f2 = lambda x: [z for y in x for z in y] f1 = lambda x: ', '.join(x.dropna()) d = dict.fromkeys( df[['tag', 'category']].columns.difference(['tag', 'category']), f1) d['category'] = f2 df1 = df.groupby('tag', as_index=False).agg(d) df2 = df[['tag', 'rank', 'view', 'comment', 'like', 'dislike']].groupby(['tag'], as_index=False).mean() df = pd.concat([df1.set_index('tag'), df2.set_index('tag')], axis=1, join='inner').reset_index() else: df = df.groupby(['tag'], as_index=False).mean() df['weight'] = (101 - df['rank']) * rw + ( (df['view']) * vw + (df['comment']) * cw + (df['like']) * lw - (df['dislike'] * dw)) / df['view'] # df['tag'] = [ r[0] for r in df.index] # df['category'] = [ r[1] for r in df.index] topics = df.to_dict(orient='records') topics.sort(key=lambda x: x['weight'], reverse=True) result['topic'] = [] for t in topics[:topic_limit]: e = { 'tag': t['tag'], 'weight': t['weight'], 'rank': t['rank'], 'view': t['view'], 'like': t['like'], 'dislike': t['like'], 'comment': t['comment'] } if 'category' in t: e['category'] = list(set(t['category'])) result['topic'].append(e) return result