def query(self): logger.info('Resolver run') self.query_v4() if self.cfg.IPv6(): self.query_v6() dns_sql_new = DNSResolver.select(fn.Distinct(DNSResolver.ip)).where( DNSResolver.purge >> None, DNSResolver.add == self.code_id) dns_sql_last = DNSResolver.select(fn.Distinct(DNSResolver.ip)).where( DNSResolver.purge >> None, DNSResolver.add != self.code_id) # dns_sql_add = DNSResolver.select(fn.Distinct(DNSResolver.ip)).where(DNSResolver.purge >> None, # DNSResolver.add == self.code_id, # ~(DNSResolver.ip << dns_sql_last)) dns_sql_purge = DNSResolver.select(fn.Distinct(DNSResolver.ip)).where( DNSResolver.purge >> None, DNSResolver.add != self.code_id, ~(DNSResolver.ip << dns_sql_new)) count_purge = DNSResolver.update(purge=self.code_id).where( DNSResolver.purge >> None, DNSResolver.ip << dns_sql_purge).execute() logger.info('Resolver mark ip as old in table DNSResolver: %d', count_purge) count_dup = DNSResolver.delete().where( DNSResolver.purge >> None, DNSResolver.add == self.code_id, DNSResolver.ip << dns_sql_last).execute() logger.info('Resolver delete dup ip in table DNSResolver: %d', count_dup)
def wait(): uid = session['osm_uid'] if 'osm_uid' in session else 0 isadmin = uid in config.ADMINS nominees = Nominee.select(Nominee, Vote.user.alias('voteuser')).where(Nominee.status == Nominee.Status.CHOSEN).join( Vote, JOIN.LEFT_OUTER, on=((Vote.nominee == Nominee.id) & (Vote.user == uid) & (~Vote.preliminary))).naive() # For admin, populate the dict of votes winners = {x: [0, 0] for x in config.NOMINATIONS} if isadmin or config.STAGE == 'results': votesq = Nominee.select(Nominee.id, Nominee.category, fn.COUNT(Vote.id).alias('num_votes')).where(Nominee.status == Nominee.Status.CHOSEN).join( Vote, JOIN.LEFT_OUTER, on=((Vote.nominee == Nominee.id) & (~Vote.preliminary))).group_by(Nominee.id) votes = {} for v in votesq: votes[v.id] = v.num_votes if v.num_votes > winners[v.category][1]: winners[v.category] = (v.id, v.num_votes) else: votes = None # Count total number of voters total = Vote.select(fn.Distinct(Vote.user)).where(~Vote.preliminary).group_by(Vote.user).count() # Update a link in the description desc = g.lang['stages'][config.STAGE]['description'] desc = desc.replace('{', '<a href="{}">'.format( url_for('static', filename='osmawards2020.txt'))).replace('}', '</a>') # Yay, done return render_template('wait.html', nominees=nominees, description=desc, isadmin=isadmin, votes=votes, stage=config.STAGE, total=total, winners=winners, isresults=config.STAGE == 'results', nominations=config.NOMINATIONS, lang=g.lang)
def user(): if not g.user: return redirect(url_for('c.front')) user = get_user() form = UserForm(obj=user) if form.is_submitted(): if not form.validate(): flash(_('There are some errors, please fix them.')) else: form.populate_obj(user) for k in ('country', 'email', 'description', 'address'): v = getattr(form, k).data if v is None or not v.strip(): setattr(user, k, None) user.save() flash(_('Profile has been updated.'), 'info') return redirect(url_for('c.user')) MailCodeAlias = MailCode.alias() count_confirmed = (User.select(fn.Count(fn.Distinct(User.id))).join_from( User, MailCode, on=((MailCode.sent_by == User.id) & (MailCode.received_on.is_null(False)))).join_from( User, MailCodeAlias, on=((MailCodeAlias.sent_to == User.id) & (MailCodeAlias.received_on.is_null(False)))).where( User.is_active == True).scalar()) return render_template('settings.html', form=form, count_confirmed=count_confirmed)
def get_results(party, electiondate): ap_party = PARTY_MAPPING[party]['AP'] race_ids = models.Result.select(fn.Distinct( models.Result.raceid), models.Result.statename).where( models.Result.electiondate == electiondate, models.Result.party == ap_party, models.Result.level == 'state', models.Result.officename == 'President', ) blacklist = app_config.RACE_BLACKLIST.get(electiondate) if blacklist: race_ids = race_ids.where(~(models.Result.raceid << blacklist)) race_ids.order_by(models.Result.statename, models.Result.raceid) # Get copy once copy_obj = copytext.Copy(app_config.COPY_PATH) copy = copy_obj['meta']._serialize() output = [] for race in race_ids: output.append( get_race_results(race.raceid, ap_party, copy, race.statename)) sorted_output = sorted(output, key=lambda k: k['statename']) return sorted_output
def setup_keyword(self, board_type): config_list = webapp_keyword.select( fn.Distinct(webapp_keyword.keyword).alias('keyword')) for config in config_list: self.KEYWORD.append(config.keyword) self.tree.add(config.keyword.encode('utf-8')) for kw in ['疫情', '流感', 'A肝', 'A型肝炎', '登革熱', '腸病毒']: self.tree.append(kw)
def ip_show(): ip_sql = IP.select(fn.Distinct(IP.ip)) for ip_row in ip_sql: if ip_row.mask < 32: print(ip_row.ip + '/' + str(ip_row.mask)) else: print(ip_row.ip) return True
def set_data_for(self, value=None): products = [(Product.get(id=rpt.product_id).name) for rpt in Report.select(fn.Distinct(Report.product))] if value: products = [(prod.name) for prod in Product.select().where( Product.name.contains(value)).where( Product.name << products).order_by(Product.name.desc())] self.data = [(prd, "") for prd in products]
def get_yearly_unique_commenters_count(): yearly_commenters_count = Comment.select( fn.date_trunc('year', Comment.created), fn.count(fn.Distinct(Comment.commenter_id))).group_by( fn.date_trunc('year', Comment.created)).order_by(SQL('date_trunc').asc()) return [(m.date().isoformat(), c) for m, c in yearly_commenters_count.tuples()]
def dependents(self): """Find the `Collections` that are consumed by `Activities` in this `Collection`.""" qs = (Exchange.select(fn.Distinct(Flow.collection_id)).join( Flow, on=(Exchange.flow_id == Flow.id)).join( Activity, on=(Exchange.activity_id == Activity.id)).where( Activity.collection_id == self.id).tuples()) ids = [i for i in qs if i != self.id] return Collection.select().where(Collection.id << ids)
def context_list(): contexts = list( Entry.select(Entry.context, fn.Count(Entry.id).alias("entries"), fn.Count(fn.Distinct( Entry.line)).alias("lines")).group_by( Entry.context).dicts()) print(contexts) return jsonify(dict(ok=True, data=contexts))
def play_fake_results(update_interval=60): """ Play back faked results, poll closing time by poll closing time """ import models from peewee import fn from app_utils import eastern_now print "Playing back results, ctrl-c to stop" ct_query = models.Race.select(fn.Distinct( models.Race.poll_closing_time)).order_by(models.Race.poll_closing_time) closing_times = [ct.poll_closing_time for ct in ct_query] closing_times = closing_times * 2 closing_times.sort() try: for i, ct in enumerate(closing_times): races = models.Race.select().where( models.Race.poll_closing_time == ct) if i % 2 == 0: print "Polls close at %s, precincts reporting" % ct for race in races: race.precincts_total = random.randint(2000, 4000) race.precincts_reporting = random.randint( 200, race.precincts_total - 200) _fake_results(race) race.last_updated = eastern_now() race.save() else: print "Races are called!" for race in races: race.ap_called = True race.ap_called_time = datetime.utcnow() race.precincts_reporting = random.randint( race.precincts_total - 500, race.precincts_total) _fake_results(race) race.last_updated = eastern_now() race.save() #if app_config.DEPLOYMENT_TARGET: #execute('liveblog.update') #execute('deploy_liveblog') #execute('deploy_bop') #execute('deploy_big_boards') #execute('deploy_results') sleep(float(update_interval)) print "All done, resetting results" reset_results() play_fake_results() except KeyboardInterrupt: print "ctrl-c pressed, resetting results" reset_results()
def url_show(): url_sql = URL.select(fn.Distinct(URL.url)) for url_row in url_sql: print(url_row.url) item_sql = Item.select() for item_row in item_sql: if item_row.blockType == 'domain': print('http://' + Domain.get(Domain.item == item_row.content_id).domain) return True
def check_diff(self): idx_list = [ idx.id for idx in History.select(History.id).order_by( History.id.desc()).limit(self.cfg.DiffCount()) ] ip_diff_add_sql = IP.select(fn.Count(fn.Distinct( IP.ip))).join(Item).where(IP.add == idx_list[0]).scalar() ip_diff_purge_sql = IP.select(fn.Count(fn.Distinct( IP.ip))).join(Item).where(IP.purge == idx_list[0]).scalar() domain_diff_add_sql = Domain.select(fn.Count(fn.Distinct(Domain.domain)))\ .join(Item).where(Domain.add == idx_list[0]).scalar() domain_diff_purge_sql = Domain.select(fn.Count(fn.Distinct(Domain.domain)))\ .join(Item).where(Domain.purge == idx_list[0]).scalar() url_diff_add_sql = URL.select(fn.Count(fn.Distinct(URL.url)))\ .join(Item).where(URL.add == idx_list[0]).scalar() url_diff_purge_sql = URL.select(fn.Count(fn.Distinct(URL.url)))\ .join(Item).where(URL.purge == idx_list[0]).scalar() if ip_diff_add_sql or ip_diff_purge_sql or domain_diff_add_sql or \ domain_diff_purge_sql or url_diff_add_sql or url_diff_purge_sql: History.update(dump=True).where( History.id == idx_list[0]).execute() return True else: # History.update(dump=False).where(History.id == idx_list[0]).execute() return False
def _jobs(self): for cs in self.single_cb_challenge_sets(): # does this fetch blobs? can we do the filter with the query? crashes = self._filter_non_exploitable(cs.crashes) encountered_subquery = crashes.select(fn.Distinct(Crash.crash_pc)) \ .where((Crash.explored) | (Crash.exploited)) categories = dict() for vulnerability in PRIORITY_MAP.keys(): high_priority = crashes.select(Crash.id).where((Crash.kind == vulnerability) \ & ~(Crash.crash_pc << encountered_subquery)).order_by(Crash.bb_count.asc()) low_priority = crashes.select(Crash.id).where((Crash.kind == vulnerability) \ & (Crash.crash_pc << encountered_subquery)).order_by(Crash.bb_count.asc()) if high_priority or low_priority: sliced = itertools.islice( itertools.chain(high_priority, low_priority), FEED_LIMIT) categories[vulnerability] = sliced type1_exists = cs.has_type1 type2_exists = cs.has_type2 # normalize by ids for kind in categories: for priority, crash in self._normalize_sort( BASE_PRIORITY, categories[kind]): job = RexJob(cs=cs, payload={'crash_id': crash.id}, request_cpu=1, request_memory=4096, limit_memory=25600, limit_time=30 * 60) if type1_exists and type2_exists: priority = BASE_PRIORITY # we have type1s? lower the priority of ip_overwrites if type1_exists and crash.kind == 'ip_overwrite': priority = BASE_PRIORITY # we have types2? lower the priority if type2_exists and crash.kind == 'arbitrary_read': priority = BASE_PRIORITY LOG.debug( "Yielding RexJob for %s with crash %s priority %d", cs.name, crash.id, priority) yield (job, priority)
def get_accounts_types(bank=None, db_user=None): accounts_types = Account.select(fn.Distinct(Account.label.strip(Account.account_id))) if db_user is not None: accounts_types = accounts_types.where(Account.user == db_user) if bank is not None: db_bank = Bank.get_or_none(name=bank) if db_bank is None: raise BankNotFoundError(bank) accounts_types = accounts_types.where(Account.bank == db_bank) return [account.strip("()") for account in accounts_types]
def get_bank_users(bank_name=None): nb_users = (User.select(fn.count(fn.Distinct(User.id)), Bank.name).join( Account, on=(User.id == Account.user_id)).join(Bank).group_by(Bank)) if bank_name is not None: db_bank = Bank.get_or_none(name=bank_name) if db_bank is None: raise BankNotFoundError(bank_name) else: nb_users = nb_users.where(Bank.name == bank_name) print(nb_users) return [{ "name": nb_user.account.bank.name, "number of users": nb_user.count } for nb_user in nb_users]
def voting(): """Called from login(), a convenience method.""" if 'osm_token' not in session: return redirect(url_for('login')) if config.STAGE != 'voting': return redirect(url_for('login')) uid = session['osm_uid'] isadmin = uid in config.ADMINS nominees_list = Nominee.select(Nominee, Vote.user.alias('voteuser')).where( Nominee.chosen).join( Vote, JOIN.LEFT_OUTER, on=((Vote.nominee == Nominee.id) & (Vote.user == uid) & (~Vote.preliminary))).naive() # Shuffle the nominees nominees = [n for n in nominees_list] rnd = Random() rnd.seed(uid) rnd.shuffle(nominees) # For admin, populate the dict of votes if isadmin: votesq = Nominee.select( Nominee.id, fn.COUNT(Vote.id).alias('num_votes')).where(Nominee.chosen).join( Vote, JOIN.LEFT_OUTER, on=((Vote.nominee == Nominee.id) & (~Vote.preliminary))).group_by(Nominee.id) votes = {} for v in votesq: votes[v.id] = v.num_votes else: votes = None # Count total number of voters total = Vote.select(fn.Distinct( Vote.user)).where(~Vote.preliminary).group_by(Vote.user).count() # Yay, done return render_template('voting.html', nominees=nominees, year=date.today().year, isadmin=isadmin, votes=votes, stage=config.STAGE, total=total, nominations=config.NOMINATIONS, lang=g.lang)
def get_bank_transactions(bank_name=None): transactions = (Transaction.select(fn.COUNT(fn.Distinct( Transaction.id)), fn.sum(Transaction.amount), Bank.name).join( Account, on=(Transaction.account_id == Account.id)).join(Bank).group_by( Bank.name)) if bank_name is not None: db_bank = Bank.get_or_none(name=bank_name) if db_bank is None: raise BankNotFoundError(bank_name) else: transactions = transactions.where(Bank.name == bank_name) return [{ "name": transaction.account.bank.name, "number of transactions": transaction.count, "total amount": transaction.sum } for transaction in transactions]
def revert(): if 'osm_token' not in session: session['objects'] = request.args.get('objects') return openstreetmap.authorize(callback=url_for('oauth')) objects = request.args.get('objects').split(',') q = Change.select(fn.Distinct(Change.user)).where(Change.id << objects).tuples() names = [ch[0].encode('utf-8') for ch in q] return ''' You are to revert {count} edit{s} by {names}.<br> <a href="javascript:window.close();">Close this window</a> if you pressed the button by mistake.<br><br> <form action="{action}" method="get"> <input type="hidden" name="objects" value="{objects}"> <input type="submit" value="Continue with the revert"> </form>'''.format( count=len(objects), objects=','.join(objects), s=('' if len(objects) == 1 else 's'), names=', '.join(names), action=url_for('actual_revert'))
def get_monthly_unique_commenters_count(since=None): monthly_commenters_count = Comment.select( fn.date_trunc('month', Comment.created), fn.count(fn.Distinct(Comment.commenter_id))).group_by( fn.date_trunc('month', Comment.created)).order_by(SQL('date_trunc').asc()) if since: monthly_commenters_count = monthly_commenters_count.where( Comment.created >= since) monthly_commenters_count = monthly_commenters_count.tuples() first_month = monthly_commenters_count[0][0] last_month = monthly_commenters_count[-1][0] months = get_week_or_month_counter(metric='month', first_metric_value=first_month, last_metric_value=last_month) ret = fill_output_with_default_values(metric_counter=months, output=monthly_commenters_count) return ret
def delegates_json(): whitelist = DELEGATE_WHITELIST['gop'] + DELEGATE_WHITELIST['dem'] data = OrderedDict() data['nation'] = OrderedDict((('dem', []), ('gop', []))) for party in ['dem', 'gop']: national_candidates = models.CandidateDelegates.select().where( models.CandidateDelegates.party == utils.PARTY_MAPPING[party] ['AP'], models.CandidateDelegates.level == 'nation', models.CandidateDelegates.last << whitelist).order_by( -models.CandidateDelegates.delegates_count, models.CandidateDelegates.last) data['nation'][party] = [] for result in national_candidates: data['nation'][party].append(model_to_dict(result)) states = models.CandidateDelegates \ .select(fn.Distinct(models.CandidateDelegates.state)) \ .order_by(models.CandidateDelegates.state) for state_obj in states: data[state_obj.state] = OrderedDict() for party in ['dem', 'gop']: state_candidates = models.CandidateDelegates.select().where( models.CandidateDelegates.party == utils.PARTY_MAPPING[party] ['AP'], models.CandidateDelegates.state == state_obj.state, models.CandidateDelegates.level == 'state', models.CandidateDelegates.last << whitelist).order_by( -models.CandidateDelegates.delegates_count, models.CandidateDelegates.last) data[state_obj.state][party] = [] for result in state_candidates: data[state_obj.state][party].append(model_to_dict(result)) data['last_updated'] = utils.get_delegates_updated_time() return json.dumps(data, use_decimal=True, cls=utils.APDatetimeEncoder)
def voting(): """Called from login(), a convenience method.""" if 'osm_token' not in session: return redirect(url_for('list_chosen')) if config.STAGE != 'voting': return redirect(url_for('login')) uid = session['osm_uid'] isadmin = uid in config.ADMINS nominees_list = Nominee.select(Nominee, Vote.user.alias('voteuser')).where(Nominee.status == Nominee.Status.CHOSEN).join( Vote, JOIN.LEFT_OUTER, on=((Vote.nominee == Nominee.id) & (Vote.user == uid) & (~Vote.preliminary))).naive() # Shuffle the nominees nominees = [n for n in nominees_list] rnd = Random() rnd.seed(uid) rnd.shuffle(nominees) # Make a dict of categories user voted in cats = set([x.category for x in Nominee.select(Nominee.category).join(Vote, JOIN.INNER, on=((Vote.nominee == Nominee.id) & (~Vote.preliminary) & (Vote.user == uid))).distinct()]) # For admin, populate the dict of votes if isadmin: votesq = Nominee.select(Nominee.id, fn.COUNT(Vote.id).alias('num_votes')).where(Nominee.status == Nominee.Status.CHOSEN).join( Vote, JOIN.LEFT_OUTER, on=((Vote.nominee == Nominee.id) & (~Vote.preliminary))).group_by(Nominee.id) votes = {} for v in votesq: votes[v.id] = v.num_votes else: votes = None # Count total number of voters total = Vote.select(fn.Distinct(Vote.user)).where(~Vote.preliminary).group_by(Vote.user).count() readmore = (g.lang['stages']['voting']['readmore'] .replace('{', '<a href="{}">'.format( g.lang['stages']['voting']['readmore_link'])) .replace('}', '</a>')) # Yay, done return render_template('voting.html', nominees=nominees, isadmin=isadmin, votes=votes, stage=config.STAGE, total=total, voted_cats=cats, readmore=readmore, nominations=config.NOMINATIONS, lang=g.lang)
print('') print('date filter') for t in Tweet.select().join(User).where( User.username == 'Arthur', Tweet.created_date > datetime(2015, 10, 10)).order_by(Tweet.created_date): print(t) print('') print('count tweets') print(Tweet.select().count()) print('') print('distinct') print(Tweet.select(fn.Distinct(Tweet.user)).scalar()) print('') print('group by') for t in Tweet.select(Tweet.user, fn.Count(Tweet.id).alias('ct'))\ .group_by(Tweet.user)\ .tuples(): print(t) print('') print('extract') for t in Tweet.select(Tweet).where(Tweet.created_date.year == '2015'): print(t)
def handle_get(cls, **kwargs): # pylint: disable=singleton-comparison, too-many-branches, too-many-statements retval = { 'cves_total': 0, 'cves_by_severity': { '0to3.9': { 'percentage': 0, 'count': 0, 'known_exploits': 0 }, '4to7.9': { 'percentage': 0, 'count': 0, 'known_exploits': 0 }, '8to10': { 'percentage': 0, 'count': 0, 'known_exploits': 0 }, 'na': { 'percentage': 0, 'count': 0, 'known_exploits': 0 } }, 'exploited_cves_count': 0, 'system_count': 0, 'recent_cves': { 'last7days': 0, 'last30days': 0, 'last90days': 0 }, 'recent_rules': [], 'rules_cves_total': 0, } args_desc = [ {'arg_name': 'tags', 'convert_func': parse_tags}, {'arg_name': 'sap_system', 'convert_func': None}, {'arg_name': 'sap_sids', 'convert_func': None} ] args = cls._parse_arguments(kwargs, args_desc) cyndi_request = is_cyndi_request(args) rh_account, cve_cache_from, cve_cache_keepalive = get_account_data(connexion.context['user']) retval['system_count'] = get_system_count(rh_account, True, [filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS], args) # API using cache, set keepalive for account to enable maintaining cache update_cve_cache_keepalive(rh_account, cve_cache_keepalive) # Use cache if not disabled + systems are not filtered + cache exists if not DISABLE_ACCOUNT_CACHE and not cyndi_request and cve_cache_from: active_cves_subquery = (CveAccountCache .select(CveAccountCache.cve_id.alias("cve_id_")) .where(CveAccountCache.rh_account_id == rh_account)) else: active_cves_subquery = (SystemVulnerabilities .select(fn.Distinct(SystemVulnerabilities.cve_id).alias("cve_id_")) .join(SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & # noqa: E712 (SystemPlatform.stale == False) & # noqa: E712 (SystemPlatform.when_deleted.is_null(True)) & (fn.COALESCE(SystemPlatform.host_type, 'null') != HostType.EDGE))) .where(SystemVulnerabilities.rh_account_id == rh_account) .where((SystemVulnerabilities.mitigation_reason.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select(InsightsRule.id) .where((InsightsRule.active == False) & (~InsightsRule.rule_only)))) .where((SystemVulnerabilities.when_mitigated.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select(InsightsRule.id) .where((InsightsRule.active == True) & (~InsightsRule.rule_only))))) if cyndi_request: active_cves_subquery = cyndi_join(active_cves_subquery) active_cves_subquery = apply_filters(active_cves_subquery, args, [filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS], {}) query = (CveMetadata .select(CveMetadata.cve, fn.COALESCE(CveMetadata.cvss3_score, CveMetadata.cvss2_score).alias('cvss_score'), CveMetadata.public_date, CveMetadata.id, CveMetadata.exploits) .join(active_cves_subquery, on=(CveMetadata.id == active_cves_subquery.c.cve_id_)) .dicts()) cve_data = [(cve["cvss_score"], cve["public_date"], cve["exploits"]) for cve in query] retval["cves_total"] = len(cve_data) retval["exploited_cves_count"] = len([row[2] for row in cve_data if row[2] is True]) today = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0) # offset-aware last7 = today - timedelta(days=7) last30 = today - timedelta(days=30) last90 = today - timedelta(days=90) rules_date = today - timedelta(days=CFG.dashboard_rules_age) for cvss_score, public_date, exploit in cve_data: if cvss_score is None: retval["cves_by_severity"]["na"]["count"] += 1 if exploit: retval["cves_by_severity"]["na"]["known_exploits"] += 1 elif cvss_score < 4: retval["cves_by_severity"]["0to3.9"]["count"] += 1 if exploit: retval["cves_by_severity"]["0to3.9"]["known_exploits"] += 1 elif 4 <= cvss_score < 8: retval["cves_by_severity"]["4to7.9"]["count"] += 1 if exploit: retval["cves_by_severity"]["4to7.9"]["known_exploits"] += 1 elif cvss_score >= 8: retval["cves_by_severity"]["8to10"]["count"] += 1 if exploit: retval["cves_by_severity"]["8to10"]["known_exploits"] += 1 if public_date is not None: if public_date >= last7: retval["recent_cves"]["last7days"] += 1 if public_date >= last30: retval["recent_cves"]["last30days"] += 1 if public_date >= last90: retval["recent_cves"]["last90days"] += 1 rounded_percentage = round_to_100_percent([v['count'] for v in retval['cves_by_severity'].values()]) for indx, keys in enumerate(retval['cves_by_severity']): retval['cves_by_severity'][keys]['percentage'] = rounded_percentage[indx] if not DISABLE_ACCOUNT_CACHE and not cyndi_request and cve_cache_from: rules_breakdown = (CveAccountCache.select(fn.COUNT(fn.Distinct(CveAccountCache.cve_id)).alias("rules_cves_count")) .join(CveRuleMapping, on=(CveAccountCache.cve_id == CveRuleMapping.cve_id)) .join(InsightsRule, on=((CveRuleMapping.rule_id == InsightsRule.id) & (InsightsRule.active == True) & (~InsightsRule.rule_only))) .where(CveAccountCache.rh_account_id == rh_account)) else: rules_breakdown = (SystemVulnerabilities.select(fn.COUNT(fn.Distinct(SystemVulnerabilities.cve_id)).alias('rules_cves_count')) .join(CveRuleMapping, on=(SystemVulnerabilities.cve_id == CveRuleMapping.cve_id)) .join(InsightsRule, on=((CveRuleMapping.rule_id == InsightsRule.id) & (InsightsRule.active == True) & (~InsightsRule.rule_only))) .join(SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.when_deleted.is_null(True)) & (SystemPlatform.stale == False) & (SystemPlatform.opt_out == False) & # noqa: E712 (SystemPlatform.last_evaluation.is_null(False) | SystemPlatform.advisor_evaluated.is_null(False)) & (fn.COALESCE(SystemPlatform.host_type, 'null') != HostType.EDGE))) .where(SystemVulnerabilities.rh_account_id == rh_account) .where(SystemVulnerabilities.mitigation_reason.is_null(True)) ) if cyndi_request: rules_breakdown = cyndi_join(rules_breakdown) rules_breakdown = apply_filters(rules_breakdown, args, [filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS], {}) rules_breakdown = rules_breakdown.first() retval['rules_cves_total'] = rules_breakdown.rules_cves_count if not DISABLE_ACCOUNT_CACHE and not cyndi_request and cve_cache_from: counts_query = (RuleAccountCache.select(RuleAccountCache.rule_id.alias("rule_id_"), RuleAccountCache.systems_affected.alias("systems_affected_")) .where(RuleAccountCache.rh_account_id == rh_account)) else: counts_query = (SystemVulnerabilities .select(SystemVulnerabilities.rule_id.alias("rule_id_"), fn.Count(fn.Distinct(SystemVulnerabilities.system_id)).alias("systems_affected_")) .join(SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)) & (fn.COALESCE(SystemPlatform.host_type, 'null') != HostType.EDGE))) .where(SystemVulnerabilities.rh_account_id == rh_account) .where((SystemVulnerabilities.rule_id << InsightsRule.select(InsightsRule.id).where((InsightsRule.active == True) & (~InsightsRule.rule_only))) & (SystemVulnerabilities.mitigation_reason.is_null(True))) .group_by(SystemVulnerabilities.rule_id)) if cyndi_request: counts_query = cyndi_join(counts_query) counts_query = apply_filters(counts_query, args, [filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS], {}) recent_rules = (InsightsRule.select(InsightsRule.description_text.alias('name'), InsightsRule.summary_text.alias('description'), counts_query.c.systems_affected_.alias('systems_affected'), InsightsRule.rule_impact.alias('severity'), InsightsRule.kbase_node_id.alias('node_id'), fn.ARRAY_AGG(fn.Distinct(CveMetadata.cve)).alias('associated_cves'), InsightsRule.name.alias('id'), InsightsRule.publish_date.alias('public_date')) .join(CveRuleMapping, on=(InsightsRule.id == CveRuleMapping.rule_id)) .join(counts_query, on=(InsightsRule.id == counts_query.c.rule_id_)) .join(CveMetadata, on=(CveRuleMapping.cve_id == CveMetadata.id)) .where((InsightsRule.publish_date >= rules_date) & (InsightsRule.active == True) & (~InsightsRule.rule_only)) # noqa: E712 .group_by(InsightsRule.description_text, InsightsRule.publish_date, InsightsRule.rule_impact, InsightsRule.kbase_node_id, SQL('systems_affected'), InsightsRule.name, InsightsRule.publish_date, InsightsRule.summary_text) .order_by(InsightsRule.publish_date.desc(), InsightsRule.rule_impact, InsightsRule.description_text) .dicts()) recent_rules = apply_filters(recent_rules, args, [], {"count_subquery": counts_query}) for rule in recent_rules: retval['recent_rules'].append(rule) return retval
def handle_get(cls, **kwargs): # pylint: disable=singleton-comparison, too-many-branches, too-many-statements retval = { 'system_count': 0, 'cves_total': 0, 'cves_by_severity': { '0to3.9': { 'percentage': 0, 'count': 0, 'known_exploit_count': 0 }, '4to7.9': { 'percentage': 0, 'count': 0, 'known_exploit_count': 0 }, '8to10': { 'percentage': 0, 'count': 0, 'known_exploit_count': 0 }, 'na': { 'percentage': 0, 'count': 0, 'known_exploit_count': 0 } }, 'recent_cves': { 'last7days': 0, 'last30days': 0, 'last90days': 0 }, 'rules_total': 0, 'rules_by_severity': { 1: { 'rule_count': 0, 'systems_affected': 0 }, 2: { 'rule_count': 0, 'systems_affected': 0 }, 3: { 'rule_count': 0, 'systems_affected': 0 }, 4: { 'rule_count': 0, 'systems_affected': 0 }, }, 'top_cves': [], 'top_rules': [], } rh_account, cve_cache_from, cve_cache_keepalive = get_account_data( connexion.context['user']) if rh_account is None: return retval retval['system_count'] = get_system_count(rh_account) if retval['system_count'] == 0: return retval # API using cache, set keepalive for account to enable maintaining cache update_cve_cache_keepalive(rh_account, cve_cache_keepalive) # Use cache if not disabled + cache exists if not DISABLE_ACCOUNT_CACHE and cve_cache_from: count_query = (CveAccountCache.select( CveAccountCache.cve_id.alias("cve_id_"), CveAccountCache.systems_affected.alias("systems_affected_")). where(CveAccountCache.rh_account_id == rh_account)) else: count_query = ( SystemVulnerabilities.select( SystemVulnerabilities.cve_id.alias("cve_id_"), fn.Count( SystemVulnerabilities.id).alias("systems_affected_")). join(SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True))).where( SystemVulnerabilities.rh_account_id == rh_account). where((SystemVulnerabilities.mitigation_reason.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where((InsightsRule.active == False) & (~InsightsRule.rule_only)))) .where((SystemVulnerabilities.when_mitigated.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where( (InsightsRule.active == True) & (~InsightsRule.rule_only)))).group_by( SystemVulnerabilities.cve_id)) count_query = cyndi_join(count_query) cve_query = (CveMetadata.select( CveMetadata.id.alias("cve_id"), fn.COALESCE(CveMetadata.cvss3_score, CveMetadata.cvss2_score).alias('cvss_score'), CveMetadata.public_date, CveMetadata.exploits).join( count_query, JOIN.INNER, on=(CveMetadata.id == count_query.c.cve_id_)).dicts()) cve_data = [(cve["cvss_score"], cve["public_date"], cve["exploits"]) for cve in cve_query] cves_total = len(cve_data) if cves_total == 0: return retval retval['cves_total'] = cves_total today = datetime.now(timezone.utc).replace( hour=0, minute=0, second=0, microsecond=0) # offset-aware last7 = today - timedelta(days=7) last30 = today - timedelta(days=30) last90 = today - timedelta(days=90) for cvss_score, public_date, exploits in cve_data: if cvss_score is None: retval["cves_by_severity"]["na"]["count"] += 1 if exploits: retval["cves_by_severity"]["na"][ "known_exploit_count"] += 1 elif cvss_score < 4: retval["cves_by_severity"]["0to3.9"]["count"] += 1 if exploits: retval["cves_by_severity"]["0to3.9"][ "known_exploit_count"] += 1 elif 4 <= cvss_score < 8: retval["cves_by_severity"]["4to7.9"]["count"] += 1 if exploits: retval["cves_by_severity"]["4to7.9"][ "known_exploit_count"] += 1 elif cvss_score >= 8: retval["cves_by_severity"]["8to10"]["count"] += 1 if exploits: retval["cves_by_severity"]["8to10"][ "known_exploit_count"] += 1 if public_date is not None: if public_date >= last7: retval["recent_cves"]["last7days"] += 1 if public_date >= last30: retval["recent_cves"]["last30days"] += 1 if public_date >= last90: retval["recent_cves"]["last90days"] += 1 cve_count_by_severity = [ v['count'] for v in retval['cves_by_severity'].values() ] rounded_percentage = round_to_100_percent(cve_count_by_severity) for indx, keys in enumerate(retval['cves_by_severity']): retval['cves_by_severity'][keys][ 'percentage'] = rounded_percentage[indx] # The algorithm searches for CVEs with cvss score between 8 and 10, and then sort by a number of affected # systems if there are not 3 CVE in the 8 to 10 range, then it looks for CVEs in 4 to 8 range, sorted by a # number of systems affected. The high-end range check is exclusive that is why 11 here. cves_limit = 3 top_cves = cls._get_top_cves_by_cvss(8.0, 11, count_query, limit=cves_limit) cls._build_top_cves(top_cves, retval) cves_count = top_cves.count() if cves_count < cves_limit: next_tier_top = cls._get_top_cves_by_cvss(4.0, 8.0, count_query, limit=cves_limit - cves_count) cls._build_top_cves(next_tier_top, retval) next_cves_count = next_tier_top.count() if next_cves_count < (cves_limit - cves_count): last_tier_top = cls._get_top_cves_by_cvss( 0.0, 4.0, count_query, limit=cves_limit - (cves_count + next_cves_count)) cls._build_top_cves(last_tier_top, retval) rules_breakdown = (SystemVulnerabilities.select( fn.COUNT(fn.Distinct(InsightsRule.id)).alias('rule_count'), InsightsRule.rule_impact.alias('severity'), fn.COUNT(fn.Distinct( SystemVulnerabilities.system_id)).alias('systems_affected') ).join( InsightsRule, on=(SystemVulnerabilities.rule_id == InsightsRule.id) ).join( SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)) & (SystemPlatform.last_evaluation.is_null(False) | SystemPlatform.advisor_evaluated.is_null(False))).where( SystemVulnerabilities.rh_account_id == rh_account).where( (SystemVulnerabilities.mitigation_reason.is_null(True)) & (SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where( (InsightsRule.active == True) & (~InsightsRule.rule_only)))).group_by( InsightsRule.rule_impact).dicts()) rules_breakdown = cyndi_join(rules_breakdown) for section in rules_breakdown: retval['rules_by_severity'][ section['severity']]['rule_count'] = section['rule_count'] retval['rules_by_severity'][section['severity']][ 'systems_affected'] = section['systems_affected'] retval['rules_total'] = sum( [item['rule_count'] for item in rules_breakdown]) top_rules = (SystemVulnerabilities.select( InsightsRule.name.alias('rule_id'), InsightsRule.description_text.alias('name'), InsightsRule.rule_impact.alias('severity'), InsightsRule.summary_text.alias('description'), fn.COUNT(fn.Distinct( SystemVulnerabilities.system_id)).alias('systems_affected'), fn.ARRAY_AGG(fn.Distinct(CveMetadata.cve)).alias('associated_cves') ).join( InsightsRule, on=(SystemVulnerabilities.rule_id == InsightsRule.id)).join( CveRuleMapping, on=(InsightsRule.id == CveRuleMapping.rule_id)).join( CveMetadata, on=(CveRuleMapping.cve_id == CveMetadata.id) ).join( SystemPlatform, on=(SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)) & (SystemPlatform.last_evaluation.is_null(False) | SystemPlatform.advisor_evaluated.is_null(False))).where( SystemVulnerabilities.rh_account_id == rh_account). where(( SystemVulnerabilities.mitigation_reason.is_null(True)) & (SystemVulnerabilities.rule_id << InsightsRule.select(InsightsRule.id).where( (InsightsRule.active == True) & (~InsightsRule.rule_only)))).group_by( InsightsRule.name, InsightsRule.description_text, InsightsRule.rule_impact, InsightsRule.summary_text).order_by( InsightsRule.rule_impact.desc(), SQL('systems_affected desc'), InsightsRule.description_text, InsightsRule.name).limit(3).dicts()) top_rules = cyndi_join(top_rules) for top_rule in top_rules: retval['top_rules'].append(top_rule) return retval
def update_features(project, features, audit): curfeats = Feature.select(Feature).where(Feature.project == project) ref2feat = {f.ref: f for f in curfeats} if features: deleted = set(ref2feat.keys()) else: deleted = set() updated = set() minlat = minlon = 180.0 maxlat = maxlon = -180.0 for f in features: data = json.dumps(f, ensure_ascii=False, sort_keys=True) md5 = hashlib.md5() md5.update(data.encode('utf-8')) md5_hex = md5.hexdigest() coord = f['geometry']['coordinates'] if coord[0] < minlon: minlon = coord[0] if coord[0] > maxlon: maxlon = coord[0] if coord[1] < minlat: minlat = coord[1] if coord[1] > maxlat: maxlat = coord[1] if 'ref_id' in f['properties']: ref = f['properties']['ref_id'] else: ref = '{}{}'.format(f['properties']['osm_type'], f['properties']['osm_id']) update = False if ref in ref2feat: deleted.remove(ref) feat = ref2feat[ref] if feat.feature_md5 != md5_hex: update = True update = True else: feat = Feature(project=project, ref=ref) feat.validates_count = 0 update = True f_audit = audit.get(ref) if f_audit: f_audit = json.dumps(f_audit, ensure_ascii=False, sort_keys=True) if f_audit != feat.audit: feat.audit = f_audit update = True if update: feat.feature = data feat.feature_md5 = md5_hex feat.lon = round(coord[0] * 1e7) feat.lat = round(coord[1] * 1e7) feat.action = f['properties']['action'][0] feat.region = f['properties'].get('region') if feat.validates_count > 0: feat.validates_count = 0 if not feat.audit else 1 Task.delete().where(Task.feature == feat).execute() feat.save() updated.add(ref) if deleted: q = Feature.delete().where(Feature.ref << list(deleted)) q.execute() for ref, f_audit in audit.items(): if ref in ref2feat and ref not in updated: if not f_audit: f_audit = None else: f_audit = json.dumps(f_audit, ensure_ascii=False, sort_keys=True) feat = ref2feat[ref] if f_audit != feat.audit: feat.audit = f_audit if feat.validates_count == 0 and f_audit: feat.validates_count = 1 feat.save() project.bbox = ','.join([str(x) for x in (minlon, minlat, maxlon, maxlat)]) project.feature_count = Feature.select().where(Feature.project == project).count() project.features_js = None if Feature.select(fn.Count(fn.Distinct(Feature.region))).where( Feature.project == project).scalar() <= 1: project.regional = False project.save()
def handle_get(cls, **kwargs): # pylint: disable=singleton-comparison args_desc = [{ 'arg_name': 'tags', 'convert_func': parse_tags }, { 'arg_name': 'sap_system', 'convert_func': None }, { 'arg_name': 'sap_sids', 'convert_func': None }] args = cls._parse_arguments(kwargs, args_desc) cyndi_reuest = is_cyndi_request(args) filters = [ filter_types.SYSTEM_TAGS, filter_types.SYSTEM_SAP, filter_types.SYSTEM_SAP_SIDS ] rh_account, cve_cache_from, cve_cache_keepalive = get_account_data( connexion.context["user"]) update_cve_cache_keepalive(rh_account, cve_cache_keepalive) if not DISABLE_ACCOUNT_CACHE and not cyndi_reuest and cve_cache_from: query = (CveAccountCache.select( fn.Count( fn.Distinct( Case(CveMetadata.exploits, ((True, CveMetadata.id), )))).alias("exploitable_cves"), fn.Count( fn.Distinct( Case(CveMetadata.impact_id, ((7, CveMetadata.id), )))).alias("critical_cves"), fn.Count( fn.Distinct( Case( CveMetadata.impact_id, ((5, CveMetadata.id), )))).alias("important_cves"), fn.Count( fn.Distinct( Case(None, ((InsightsRule.id.is_null(False), CveMetadata.id), )))).alias("cves_with_rule") ).join(CveMetadata, on=(CveAccountCache.cve_id == CveMetadata.id)).join( CveRuleMapping, JOIN.LEFT_OUTER, on=((CveAccountCache.cve_id == CveRuleMapping.cve_id))). join(InsightsRule, JOIN.LEFT_OUTER, on=((CveRuleMapping.rule_id == InsightsRule.id) & (InsightsRule.active == True) & (~InsightsRule.rule_only))).where( CveAccountCache.rh_account_id == rh_account)) else: query = (SystemVulnerabilities.select( fn.Count( fn.Distinct( Case(CveMetadata.exploits, ((True, CveMetadata.id), )))).alias("exploitable_cves"), fn.Count( fn.Distinct( Case(CveMetadata.impact_id, ((7, CveMetadata.id), )))).alias("critical_cves"), fn.Count( fn.Distinct( Case( CveMetadata.impact_id, ((5, CveMetadata.id), )))).alias("important_cves"), fn.Count( fn.Distinct( Case(None, ((InsightsRule.id.is_null(False), CveMetadata.id), )))).alias("cves_with_rule") ).join( CveMetadata, on=(SystemVulnerabilities.cve_id == CveMetadata.id)).join( CveRuleMapping, JOIN.LEFT_OUTER, on=(( SystemVulnerabilities.cve_id == CveRuleMapping.cve_id)) ).join( InsightsRule, JOIN.LEFT_OUTER, on=((CveRuleMapping.rule_id == InsightsRule.id) & (InsightsRule.active == True) & (~InsightsRule.rule_only)) ).join( SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account) & (SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)) & (fn.COALESCE(SystemPlatform.host_type, 'null') != HostType.EDGE)) ).where( SystemVulnerabilities.rh_account_id == rh_account).where( SystemVulnerabilities.when_mitigated.is_null(True))) if cyndi_reuest: query = cyndi_join(query) query = apply_filters(query, args, filters, {}) res = query.first() return { "exploitable_cves": res.exploitable_cves, "critical_cves": res.critical_cves, "important_cves": res.important_cves, "cves_with_rule": res.cves_with_rule }
def actual_revert(): if 'osm_token' not in session: return 'Not authenticated' objects = [int(x) for x in request.args.get('objects').split(',')] q = Change.select().where(Change.id << objects) changes = [ch for ch in q] # Build a list of objects and request latest versions nwr_list = {'n': [], 'w': [], 'r': []} ch_list = {} notes = [] for ch in changes: if ch.action in ('c', 'm') and ch.obj_type in ('n', 'w', 'r'): nwr_list[ch.obj_type].append(ch.obj_id) if ch.action == 'm': ch_list['{0}{1}'.format(ch.obj_type, ch.obj_id)] = json.loads(ch.changes) elif ch.action == 'n': notes.append(ch.changeset) # First, close notes for note in notes: # We don't care for any errors, notes are not important openstreetmap.post('notes/{0}/close?text=Closed+with+MMWatch+Reverter'.format(note)) if reduce(lambda s, l: s+len(l), nwr_list.values(), 0) == 0: return 'All notes have been closed.' # Make three requests for all objects from lists # For each object, revert unchanged tags and coords, prepare osc etree = get_etree() osc = etree.Element('osmChange', {'version': '0.6'}) for typ in ('node', 'way', 'relation'): if len(nwr_list[typ[0]]) == 0: continue resp = openstreetmap.get('{0}s?{0}s={1}'.format(typ, ','.join((str(x) for x in nwr_list[typ[0]])))) if resp.status != 200: return 'Failed to get {0}s: {1} {2}'.format(typ, resp.status, resp.data) for obj in resp.data: if obj.get('visible') == 'false': # Not undeleting objects continue v = int(obj.get('version')) ref = '{0}{1}'.format(typ[0], obj.get('id')) if v == 1 and ref not in ch_list: # First version that was created, deleting it d = etree.SubElement(osc, 'delete') etree.SubElement(d, obj.tag, { 'id': obj.get('id'), 'version': obj.get('version') }) elif v > 1 and ref in ch_list: # Reverting tag and coord changes m = etree.Element('modify') rev = revert_change(obj, ch_list[ref]) if rev is not None: m.append(rev) osc.append(m) if len(osc) == 0: return 'These changes have already been reverted.' # Create a changeset q = Change.select(fn.Distinct(Change.user)).where(Change.id << objects).tuples() names = [ch[0].encode('utf-8') for ch in q] comment = 'Reverting MAPS.ME changes by {0}'.format(', '.join(names)) create_xml = etree.Element('osm') ch = etree.SubElement(create_xml, 'changeset') etree.SubElement(ch, 'tag', {'k': 'created_by', 'v': 'MMWatch Reverter'}) etree.SubElement(ch, 'tag', {'k': 'comment', 'v': comment.decode('utf-8')}) changeset_xml = etree.tostring(create_xml) resp = openstreetmap.put('changeset/create', changeset_xml, format=None, content_type='application/xml') if resp.status != 200: return 'Failed to open a changeset: {0} {1}'.format(resp.status, resp.data) changeset_id = int(resp.raw_data) # Upload changes fill_changeset(osc, changeset_id) print etree.tostring(osc) try: resp = openstreetmap.post('changeset/{0}/upload'.format(changeset_id), data=etree.tostring(osc), format=None, content_type='application/xml') if resp.status != 200: return 'Failed to upload changes: {0} {1}'.format(resp.status, resp.data) finally: # Close the changeset openstreetmap.put('changeset/{0}/close'.format(changeset_id)) return redirect('https://www.openstreetmap.org/changeset/{0}'.format(changeset_id))
def domain_show(): domain_sql = Domain.select(fn.Distinct(Domain.domain)) for domain_row in domain_sql: print(domain_row.domain) return True
def gen_report(**data): domain_count = Domain.select(fn.Count(fn.Distinct(Domain.domain))).scalar() url_count = URL.select(fn.Count(fn.Distinct(URL.url))).scalar() ip_count = IP.select(fn.Count(fn.Distinct(IP.ip))).scalar() id_count = Item.select(fn.Count(fn.Distinct(Item.content_id))).scalar() date_time = datetime.fromtimestamp( int(Dump.get( Dump.param == 'lastDumpDate').value)).strftime('%Y-%m-%d %H:%M:%S') message = 'vigruzki.rkn.gov.ru update: ' + date_time + '\n' url_inform_add = data.get('url_add') if len(url_inform_add) > 0: message += '\nURLs added: \n\n' for url_a in url_inform_add: message += url_a + '\n' ip_inform_add = data.get('ip_add') if len(ip_inform_add) > 0: message += '\nIPs added: \n\n' for ip_a in ip_inform_add: message += ip_a + '\n' sub_ip_inform_add = data.get('sub_ip_add') if len(sub_ip_inform_add) > 0: message += '\nSUBNETs added: \n\n' for sub_ip_a in sub_ip_inform_add: message += sub_ip_a + '\n' domain_inform_add = data.get('domain_add') if len(domain_inform_add) > 0: message += '\nDOMAINs added: \n\n' for domain_a in domain_inform_add: message += domain_a + '\n' url_inform_del = data.get('url_del') if len(url_inform_del) > 0: message += '\nURLs deleted: \n\n' for url_d in url_inform_del: message += url_d + '\n' ip_inform_del = data.get('ip_del') if len(ip_inform_del) > 0: message += '\nIPs deleted: \n\n' for ip_d in ip_inform_del: message += ip_d + '\n' sub_ip_inform_del = data.get('sub_ip_del') if len(sub_ip_inform_del) > 0: message += '\nSUBNETs deleted: \n\n' for sub_ip_d in sub_ip_inform_del: message += sub_ip_d + '\n' domain_inform_del = data.get('domain_del') if len(domain_inform_del) > 0: message += '\nDOMAINs deleted: \n\n' for domain_d in domain_inform_del: message += domain_d + '\n' message += '\nURLs count: ' + str(url_count) + '\n' message += 'IPs count: ' + str(ip_count) + '\n' message += 'DOMAINs count: ' + str(domain_count) + '\n' message += 'Item count: ' + str(id_count) + '\n' id_inform_add = data.get('id_add') if len(id_inform_add) > 0: message += 'Items added: ' + str(len(id_inform_add)) + '\n' id_inform_del = data.get('id_del') if len(id_inform_del) > 0: message += 'Items deleted: ' + str(len(id_inform_del)) + '\n' return message