def prepare_mail(entry): to_return = {} to_return['subject'] = \ 'UrlQuery report for {}'.format(entry.get('ip').get('addr')) to_return['body'] = json.dumps(entry, sort_keys=True, indent=4) reports = urlquery.search(entry.get('ip').get('addr'), urlquery_from = datetime.datetime.now() - datetime.timedelta(hours=1)) # FIXME: the output of a search is undefined if reports is None: response = urlquery.submit(entry['url']) queue_id = response.get('queue_id') report_id = response.get('report_id') i = 0 while report_id is None: print 'Waiting for', entry.get('url').get('addr') time.sleep(30) response = urlquery.queue_status(queue_id) report_id = response.get('report_id') i += 1 if i >= 5: return to_return full_report = urlquery.report(status['report_id'], include_details=True) to_return['body'] += '\n' + json.dumps(full_report, sort_keys=True, indent=4) else: for report in reports: try: full_report = urlquery.report(report['id'], include_details=True) to_return['body'] += '\n' + json.dumps(full_report, sort_keys=True, indent=4) except: print report return to_return
def urlquery_query(url, key, query): cached = _cache_get(query, 'urlquery') if cached is not None: return cached try: urlquery.url = url urlquery.key = key response = urlquery.search(query) except: return None if response['_response_']['status'] == 'ok': if response.get('reports') is not None: total_alert_count = 0 for r in response['reports']: total_alert_count += r['urlquery_alert_count'] total_alert_count += r['ids_alert_count'] total_alert_count += r['blacklist_alert_count'] _cache_set(query, total_alert_count, 'urlquery') return total_alert_count else: return None