def index(request): SM = ScrapyManager("malspider", "full_domain", "http://0.0.0.0:6802") crawl_jobs = SM.get_all_jobs_by_project() pending_jobs = 0 running_jobs = 0 finished_jobs = 0 scrapyd_connected = 0 crawl_jobs = SM.get_all_jobs_by_project() if crawl_jobs is not None: pending_jobs = len(crawl_jobs['pending']) running_jobs = len(crawl_jobs['running']) finished_jobs = len(crawl_jobs['finished']) scrapyd_connected = 1 unique_alerts = ModelQuery.get_num_unique_alerts() alert_count_by_reason = ModelQuery.get_num_alerts_by_reason() elems_count = Element.objects.count pages_count = Page.objects.count template = loader.get_template('dashboard_index.html') all_orgs = ModelQuery.get_all_organizations() top_offenders = ModelQuery.get_top_offenders() my_url = urlparse(request.get_full_path()).hostname or '' if my_url == '': scrapyd_url = 'http://localhost:6802' else: scrapyd_url = my_url + ":6802" context = RequestContext(request, {'elems_count':elems_count,'pages_count':pages_count, 'pending_jobs':pending_jobs, 'running_jobs':running_jobs, 'finished_jobs':finished_jobs, 'unique_alerts':unique_alerts, 'alert_count_by_reason':alert_count_by_reason, 'all_orgs':all_orgs, 'top_offenders':top_offenders, 'scrapyd_connected':scrapyd_connected, 'my_url':scrapyd_url}) return HttpResponse(template.render(context))
def index(request, org_id=None): SM = ScrapyManager("malspider", "full_domain", "http://0.0.0.0:6802") crawl_jobs = SM.get_all_jobs_by_project() pending_jobs = 0 running_jobs = 0 finished_jobs = 0 scrapyd_connected = 0 crawl_jobs = SM.get_all_jobs_by_project() if crawl_jobs is not None: pending_jobs = len(crawl_jobs['pending']) running_jobs = len(crawl_jobs['running']) finished_jobs = len(crawl_jobs['finished']) scrapyd_connected = 1 unique_alerts = ModelQuery.get_num_unique_alerts() alert_count_by_reason = ModelQuery.get_num_alerts_by_reason() elems_count = Element.objects.count pages_count = Page.objects.count template = loader.get_template('dashboard_index.html') all_orgs = ModelQuery.get_all_organizations() top_offenders = ModelQuery.get_top_offenders() org_obj = ModelQuery.get_org_by_id(org_id) jobid = None scan_domain = None error = None if org_obj: output = SM.schedule_job(org_id, org_obj.domain) if 'jobid' in output: jobid = output['jobid'] scan_domain = org_obj.domain elif org_obj is None and org_id is not None: error = "Error: Invalid Organization ID!" context = RequestContext( request, { 'elems_count': elems_count, 'pages_count': pages_count, 'pending_jobs': pending_jobs, 'running_jobs': running_jobs, 'finished_jobs': finished_jobs, 'unique_alerts': unique_alerts, 'alert_count_by_reason': alert_count_by_reason, 'all_orgs': all_orgs, 'top_offenders': top_offenders, 'scrapyd_connected': scrapyd_connected, 'jobid': jobid, 'scan_domain': scan_domain, 'error': error }) return HttpResponse(template.render(context))
def index(request): SM = ScrapyManager("malspider", "full_domain", "http://0.0.0.0:6802") crawl_jobs = SM.get_all_jobs_by_project() pending_jobs = 0 running_jobs = 0 finished_jobs = 0 scrapyd_connected = 0 crawl_jobs = SM.get_all_jobs_by_project() if crawl_jobs is not None: pending_jobs = len(crawl_jobs['pending']) running_jobs = len(crawl_jobs['running']) finished_jobs = len(crawl_jobs['finished']) scrapyd_connected = 1 unique_alerts = ModelQuery.get_num_unique_alerts() alert_count_by_reason = ModelQuery.get_num_alerts_by_reason() elems_count = Element.objects.count pages_count = Page.objects.count template = loader.get_template('dashboard_index.html') all_orgs = ModelQuery.get_all_organizations() top_offenders = ModelQuery.get_top_offenders() my_url = urlparse(request.get_full_path()).hostname or '' if my_url == '': scrapyd_url = 'http://localhost:6802' else: scrapyd_url = my_url + ":6802" context = RequestContext( request, { 'elems_count': elems_count, 'pages_count': pages_count, 'pending_jobs': pending_jobs, 'running_jobs': running_jobs, 'finished_jobs': finished_jobs, 'unique_alerts': unique_alerts, 'alert_count_by_reason': alert_count_by_reason, 'all_orgs': all_orgs, 'top_offenders': top_offenders, 'scrapyd_connected': scrapyd_connected, 'my_url': scrapyd_url }) return HttpResponse(template.render(context))
def index(request, org_id=None): SM = ScrapyManager("malspider", "full_domain", "http://0.0.0.0:6802") crawl_jobs = SM.get_all_jobs_by_project() pending_jobs = 0 running_jobs = 0 finished_jobs = 0 scrapyd_connected = 0 crawl_jobs = SM.get_all_jobs_by_project() if crawl_jobs is not None: pending_jobs = len(crawl_jobs['pending']) running_jobs = len(crawl_jobs['running']) finished_jobs = len(crawl_jobs['finished']) scrapyd_connected = 1 unique_alerts = ModelQuery.get_num_unique_alerts() alert_count_by_reason = ModelQuery.get_num_alerts_by_reason() elems_count = Element.objects.count pages_count = Page.objects.count template = loader.get_template('dashboard_index.html') all_orgs = ModelQuery.get_all_organizations() top_offenders = ModelQuery.get_top_offenders() org_obj = ModelQuery.get_org_by_id(org_id) jobid = None scan_domain = None error = None if org_obj: output = SM.schedule_job(org_id,org_obj.domain) if 'jobid' in output: jobid = output['jobid'] scan_domain = org_obj.domain elif org_obj is None and org_id is not None: error = "Error: Invalid Organization ID!" context = RequestContext(request, {'elems_count':elems_count,'pages_count':pages_count, 'pending_jobs':pending_jobs, 'running_jobs':running_jobs, 'finished_jobs':finished_jobs, 'unique_alerts':unique_alerts, 'alert_count_by_reason':alert_count_by_reason, 'all_orgs':all_orgs, 'top_offenders':top_offenders, 'scrapyd_connected':scrapyd_connected, 'jobid':jobid,'scan_domain':scan_domain, 'error':error}) return HttpResponse(template.render(context))