def index(request): SM = ScrapyManager("malspider", "full_domain", "http://0.0.0.0:6802") crawl_jobs = SM.get_all_jobs_by_project() pending_jobs = 0 running_jobs = 0 finished_jobs = 0 scrapyd_connected = 0 crawl_jobs = SM.get_all_jobs_by_project() if crawl_jobs is not None: pending_jobs = len(crawl_jobs['pending']) running_jobs = len(crawl_jobs['running']) finished_jobs = len(crawl_jobs['finished']) scrapyd_connected = 1 unique_alerts = ModelQuery.get_num_unique_alerts() alert_count_by_reason = ModelQuery.get_num_alerts_by_reason() elems_count = Element.objects.count pages_count = Page.objects.count template = loader.get_template('dashboard_index.html') all_orgs = ModelQuery.get_all_organizations() top_offenders = ModelQuery.get_top_offenders() my_url = urlparse(request.get_full_path()).hostname or '' if my_url == '': scrapyd_url = 'http://localhost:6802' else: scrapyd_url = my_url + ":6802" context = RequestContext(request, {'elems_count':elems_count,'pages_count':pages_count, 'pending_jobs':pending_jobs, 'running_jobs':running_jobs, 'finished_jobs':finished_jobs, 'unique_alerts':unique_alerts, 'alert_count_by_reason':alert_count_by_reason, 'all_orgs':all_orgs, 'top_offenders':top_offenders, 'scrapyd_connected':scrapyd_connected, 'my_url':scrapyd_url}) return HttpResponse(template.render(context))
def pages(request, time_frame="last_24_hours"): all_orgs = ModelQuery.get_all_organizations() alerts = ModelQuery.get_alerts_by_timeframe(time_frame) template = loader.get_template("dashboard_pages.html") context = RequestContext(request, { 'all_orgs': all_orgs, 'time_frame': time_frame, 'alerts': alerts }) return HttpResponse(template.render(context))
def page(request, org_id): org_obj = ModelQuery.get_org_by_id(org_id) if org_obj: pages = ModelQuery.get_pages_by_org(org_id) alerts = ModelQuery.get_alerts_by_org(org_id) context = RequestContext(request, {'org':org_obj, 'urls_requested':pages, 'alerts':alerts}) else: context = RequestContext(request, {'error':'Could not find any analysis for this page id'}) template = loader.get_template('dashboard_page.html') return HttpResponse(template.render(context))
def index(request, org_id=None): SM = ScrapyManager("malspider", "full_domain", "http://0.0.0.0:6802") crawl_jobs = SM.get_all_jobs_by_project() pending_jobs = 0 running_jobs = 0 finished_jobs = 0 scrapyd_connected = 0 crawl_jobs = SM.get_all_jobs_by_project() if crawl_jobs is not None: pending_jobs = len(crawl_jobs['pending']) running_jobs = len(crawl_jobs['running']) finished_jobs = len(crawl_jobs['finished']) scrapyd_connected = 1 unique_alerts = ModelQuery.get_num_unique_alerts() alert_count_by_reason = ModelQuery.get_num_alerts_by_reason() elems_count = Element.objects.count pages_count = Page.objects.count template = loader.get_template('dashboard_index.html') all_orgs = ModelQuery.get_all_organizations() top_offenders = ModelQuery.get_top_offenders() org_obj = ModelQuery.get_org_by_id(org_id) jobid = None scan_domain = None error = None if org_obj: output = SM.schedule_job(org_id, org_obj.domain) if 'jobid' in output: jobid = output['jobid'] scan_domain = org_obj.domain elif org_obj is None and org_id is not None: error = "Error: Invalid Organization ID!" context = RequestContext( request, { 'elems_count': elems_count, 'pages_count': pages_count, 'pending_jobs': pending_jobs, 'running_jobs': running_jobs, 'finished_jobs': finished_jobs, 'unique_alerts': unique_alerts, 'alert_count_by_reason': alert_count_by_reason, 'all_orgs': all_orgs, 'top_offenders': top_offenders, 'scrapyd_connected': scrapyd_connected, 'jobid': jobid, 'scan_domain': scan_domain, 'error': error }) return HttpResponse(template.render(context))
def index(request): SM = ScrapyManager("malspider", "full_domain", "http://0.0.0.0:6802") crawl_jobs = SM.get_all_jobs_by_project() pending_jobs = 0 running_jobs = 0 finished_jobs = 0 scrapyd_connected = 0 crawl_jobs = SM.get_all_jobs_by_project() if crawl_jobs is not None: pending_jobs = len(crawl_jobs['pending']) running_jobs = len(crawl_jobs['running']) finished_jobs = len(crawl_jobs['finished']) scrapyd_connected = 1 unique_alerts = ModelQuery.get_num_unique_alerts() alert_count_by_reason = ModelQuery.get_num_alerts_by_reason() elems_count = Element.objects.count pages_count = Page.objects.count template = loader.get_template('dashboard_index.html') all_orgs = ModelQuery.get_all_organizations() top_offenders = ModelQuery.get_top_offenders() my_url = urlparse(request.get_full_path()).hostname or '' if my_url == '': scrapyd_url = 'http://localhost:6802' else: scrapyd_url = my_url + ":6802" context = RequestContext( request, { 'elems_count': elems_count, 'pages_count': pages_count, 'pending_jobs': pending_jobs, 'running_jobs': running_jobs, 'finished_jobs': finished_jobs, 'unique_alerts': unique_alerts, 'alert_count_by_reason': alert_count_by_reason, 'all_orgs': all_orgs, 'top_offenders': top_offenders, 'scrapyd_connected': scrapyd_connected, 'my_url': scrapyd_url }) return HttpResponse(template.render(context))
def alert_export_view(request,time_frame="last_24_hours"): response = HttpResponse(content_type="text/csv") response['Content-Disposition'] = 'attachment; filename="malspider_alerts.csv"' writer = csv.writer(response) writer.writerow(['alert_reason', 'source_page','requested_resource','raw_html']) alerts = ModelQuery.get_alerts_by_timeframe(time_frame) for alert in alerts: writer.writerow([unicode(alert.reason).encode("utf-8"),unicode(alert.page).encode("utf-8"), unicode(alert.uri).encode("utf-8"), unicode(alert.raw).encode("utf-8")]) return response
def index(request, org_id=None): SM = ScrapyManager("malspider", "full_domain", "http://0.0.0.0:6802") crawl_jobs = SM.get_all_jobs_by_project() pending_jobs = 0 running_jobs = 0 finished_jobs = 0 scrapyd_connected = 0 crawl_jobs = SM.get_all_jobs_by_project() if crawl_jobs is not None: pending_jobs = len(crawl_jobs['pending']) running_jobs = len(crawl_jobs['running']) finished_jobs = len(crawl_jobs['finished']) scrapyd_connected = 1 unique_alerts = ModelQuery.get_num_unique_alerts() alert_count_by_reason = ModelQuery.get_num_alerts_by_reason() elems_count = Element.objects.count pages_count = Page.objects.count template = loader.get_template('dashboard_index.html') all_orgs = ModelQuery.get_all_organizations() top_offenders = ModelQuery.get_top_offenders() org_obj = ModelQuery.get_org_by_id(org_id) jobid = None scan_domain = None error = None if org_obj: output = SM.schedule_job(org_id,org_obj.domain) if 'jobid' in output: jobid = output['jobid'] scan_domain = org_obj.domain elif org_obj is None and org_id is not None: error = "Error: Invalid Organization ID!" context = RequestContext(request, {'elems_count':elems_count,'pages_count':pages_count, 'pending_jobs':pending_jobs, 'running_jobs':running_jobs, 'finished_jobs':finished_jobs, 'unique_alerts':unique_alerts, 'alert_count_by_reason':alert_count_by_reason, 'all_orgs':all_orgs, 'top_offenders':top_offenders, 'scrapyd_connected':scrapyd_connected, 'jobid':jobid,'scan_domain':scan_domain, 'error':error}) return HttpResponse(template.render(context))
def alert_export_view(request, time_frame="last_24_hours"): response = HttpResponse(content_type="text/csv") response[ 'Content-Disposition'] = 'attachment; filename="malspider_alerts.csv"' writer = csv.writer(response) writer.writerow( ['alert_reason', 'source_page', 'requested_resource', 'raw_html']) alerts = ModelQuery.get_alerts_by_timeframe(time_frame) for alert in alerts: writer.writerow([ unicode(alert.reason).encode("utf-8"), unicode(alert.page).encode("utf-8"), unicode(alert.uri).encode("utf-8"), unicode(alert.raw).encode("utf-8") ]) return response
def pages(request, time_frame="last_24_hours"): all_orgs = ModelQuery.get_all_organizations() alerts = ModelQuery.get_alerts_by_timeframe(time_frame) template = loader.get_template("dashboard_pages.html") context = RequestContext(request, {'all_orgs':all_orgs, 'time_frame':time_frame, 'alerts':alerts}) return HttpResponse(template.render(context))