def runmodules(hostlist=""): import datetime import django, os, sys os.environ["DJANGO_SETTINGS_MODULE"] = "Kraken.settings" sys.path.append("/opt/Kraken") django.setup() from Web_Scout.models import Hosts from importlib import import_module import datetime if not hostlist: hostlist = Hosts.objects.exclude(Module__exact='') total_count = len(hostlist) LogKrakenEvent('Celery', 'Running modules on ' + str(total_count) + ' hosts.', 'info') start_time = datetime.datetime.now() jobs = group(runmodule.s(host.HostID) for host in hostlist) result = jobs.apply_async() while not result.ready(): print 'Failed Tasks? ' + str(result.failed()) print 'Waiting? ' + str(result.waiting()) print 'Completed: ' + str(result.completed_count()) print 'Total: ' + str(total_count) process_percent = int((result.completed_count() / total_count) * 100) sleep(.1) print 'Percentage Complete: ' + str(process_percent) + '%' current_task.update_state(state='PROGRESS', meta={'process_percent': process_percent }) sleep(5) end_time = datetime.datetime.now() total_time = end_time - start_time LogKrakenEvent('Celery', 'Mass Module Execution Complete. Elapsed time: ' + str(total_time) + ' to test ' + str(total_count) + ' interfaces', 'info')
def scan(addresses): from subprocess import Popen import datetime import django os.environ["DJANGO_SETTINGS_MODULE"] = "Kraken.settings" sys.path.append("/opt/Kraken") django.setup() from Web_Scout.models import Addresses, Hosts current_task.update_state(state='SCANNING') timestamp = datetime.datetime.now() initial_host_count = len(Hosts.objects.all()) total_count = len(addresses) # Perform scan jobs = group(nmap_web.s(address) for address in addresses) result = jobs.apply_async() while not result.ready(): process_percent = int((result.completed_count() / total_count) * 100) sleep(.1) print 'Percentage Complete: ' + str(process_percent) + '%' current_task.update_state(state='PROGRESS', meta={'process_percent': process_percent}) sleep(5) for address in addresses: try: filepath = '/opt/Kraken/tmp/' + address.replace('/', '-').replace( '.', '-') + '.xml' print 'deleting ' + filepath # os.remove(filepath) except: print 'No nmap.xml to remove' # Figure out how to tie supplied ranges/hostnames to individual records print 'Checking for stale hosts' try: for host in Addresses.objects.get(AddressID=address.replace( '.', '-').replace('/', '-')).hosts_set.all(): print 'host ' + host.IP + ' found.' if datetime.datetime.strptime( host.LastSeen, '%Y-%m-%d %H:%M:%S.%f') < timestamp: print 'Host is stale' host.Stale = True host.StaleLevel += 1 host.save() else: print 'host ' + host.IP + ' is not stale.' except: LogKrakenEvent( 'Celery', 'Unable to find Address record during stale check.', 'error') post_scan_host_count = len(Hosts.objects.all()) LogKrakenEvent( 'Celery', 'Scanning Complete. ' + str(post_scan_host_count - initial_host_count) + ' new hosts found.', 'info')
def cleardb(): import django os.environ["DJANGO_SETTINGS_MODULE"] = "Kraken.settings" sys.path.append("/opt/Kraken") django.setup() from Web_Scout.models import Hosts, Interfaces # Delete all recods in the Hosts and Interfaces tables. try: Hosts.objects.all().delete() Interfaces.objects.all().delete() LogKrakenEvent('Celery', 'Hosts and interfaces cleared.', 'info') except: LogKrakenEvent('Celery', 'Error clearing database.', 'error')
def removescreenshots(): # Build list of all current screenshots. screenshotlist = [f for f in os.listdir("/opt/Kraken/static/Web_Scout/")] # Remove each screenshot from the list. for screenshot in screenshotlist: os.remove('/opt/Kraken/static/Web_Scout/' + screenshot) LogKrakenEvent('Celery', 'Screenshots deleted.', 'info')
def startscreenshot(overwrite=False): import datetime import django, os, sys os.environ["DJANGO_SETTINGS_MODULE"] = "Kraken.settings" sys.path.append("/opt/Kraken") django.setup() from Web_Scout.models import Hosts, Interfaces def signal_handler(signal, frame): print "[-] Ctrl-C received! Killing Thread(s)..." os._exit(0) signal.signal(signal.SIGINT, signal_handler) start_time = datetime.datetime.now() # Fire up the workers urlQueue = [] total_count = 0 for host in Hosts.objects.all(): for interface in host.interfaces_set.all(): urlQueue.append([interface.Url, interface.IntID]) total_count += 1 jobs = group( getscreenshot.s(item, 20, True, None, overwrite) for item in urlQueue) result = jobs.apply_async() while not result.ready(): print 'Failed Tasks? ' + str(result.failed()) print 'Waiting? ' + str(result.waiting()) print 'Completed: ' + str(result.completed_count()) print 'Total: ' + str(total_count) process_percent = int((result.completed_count() / total_count) * 100) sleep(.1) print 'Percentage Complete: ' + str(process_percent) + '%' current_task.update_state(state='PROGRESS', meta={'process_percent': process_percent}) sleep(30) for interface in Interfaces.objects.all(): if not os.path.exists('/opt/Kraken/static/Web_Scout/' + interface.IntID + '.png'): interface.Retry = True interface.save() shutil.copy( '/opt/Kraken/static/blank.png', '/opt/Kraken/static/Web_Scout/' + interface.IntID + '.png') end_time = datetime.datetime.now() total_time = end_time - start_time number_of_interfaces = Interfaces.objects.all().count() LogKrakenEvent( 'Celery', 'Screenshots Complete. Elapsed time: ' + str(total_time) + ' to screenshot ' + str(number_of_interfaces) + ' interfaces', 'info')
def viewer(request): RecordID = request.GET['destination'] InterfaceRecord = Interfaces.objects.get(IntID=RecordID) HostRecord = InterfaceRecord.hosts HostRecord.Reviewed = True HostRecord.save() LogKrakenEvent(request.user, 'Reviewed - ' + HostRecord.IP + ' (' + HostRecord.Hostname + ')', 'info') external = request.GET.get('external', '') if external == 'true': return redirect(InterfaceRecord.Url) else: return render(request, 'Web_Scout/viewer.html', {'interface':InterfaceRecord, 'host':HostRecord})
def setup(request): if request.method == 'POST': if request.POST.get('action') == 'cleardb': job = tasks.cleardb.delay() try: task = Tasks.objects.get(Task='cleardb') except: task = Tasks() task.Task = 'cleardb' task.Task_Id = job.id task.Count = 0 task.save() LogKrakenEvent(request.user, 'Database Cleared', 'info') return HttpResponse() elif request.POST.get('action') == 'removescreenshots': job = tasks.removescreenshots.delay() try: task = Tasks.objects.get(Task='removescreenshots') except: task = Tasks() task.Task = 'removescreenshots' task.Task_Id = job.id task.Count = 0 task.save() LogKrakenEvent(request.user, 'Screenshots Deleted', 'info') return HttpResponse() elif request.POST.get('action') == 'parse': form = ParseForm(request.POST, request.FILES) if form.is_valid: with open('/opt/Kraken/tmp/nmap.xml', 'wb+') as destination: for chunk in request.FILES["parsefile"].chunks(): destination.write(chunk) job = tasks.nmap_parse.delay('/opt/Kraken/tmp/nmap.xml') try: task = Tasks.objects.get(Task='parse') except: task = Tasks() task.Task = 'parse' task.Task_Id = job.id task.Count = 0 task.save() form = ParseForm() addresses = Addresses.objects.all() return render(request, 'Web_Scout/setup.html', {'addresses':addresses, 'form':form, 'uploaded':False, 'failedupload':False}) else: return render(request, 'Web_Scout/setup.html', {'form':form, 'uploaded':False, 'failedupload':True}) elif request.POST.get('action') == 'screenshot': overwrite = request.POST.get('overwrite') ipPattern = re.compile("^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$") hostnamePattern = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)+([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$") proxy = "" proxyHost = request.POST.get("proxyhost", "") proxyPort = request.POST.get("proxyport", "") if proxyHost != "" and (ipPattern.match(proxyHost) or hostnamePattern.match(proxyHost)) and int(proxyPort) > 0 and int(proxyPort) < 65536: proxy = request.POST.get('proxyhost') + ":" + request.POST.get('proxyport') elif proxyHost: return HttpResponse(status=500) if overwrite == 'True': job = tasks.startscreenshot.delay(True, proxy) else: job = tasks.startscreenshot.delay(False, proxy) try: task = Tasks.objects.get(Task='screenshot') except: task = Tasks() task.Task = 'screenshot' task.Task_Id = job.id task.Count = 0 task.save() LogKrakenEvent(request.user, 'Screenshot taking task initiated', 'info') return HttpResponse() elif request.POST.get('action') == 'addurl': raw_list = request.POST.get('address-textarea') address_data = AddUrl(raw_list) json_data = json.dumps(address_data) return HttpResponse(json_data, content_type='application/json') elif request.POST.get('action') == 'runmodules': job = tasks.runmodules.delay() try: task = Tasks.objects.get(Task='runmodules') except: task = Tasks() task.Task = 'runmodules' task.Task_Id = job.id task.Count = 0 task.save() LogKrakenEvent(request.user, 'Running default credential checks.', 'info') return HttpResponse() elif request.POST.get('action') == 'scan': address_list = [] error_message = [] for key,value in request.POST.items(): if str(value) == "on": try: address_object = Addresses.objects.get(AddressID=key) if address_object.Hostname: address_list.append(address_object.Hostname) else: address_list.append(address_object.Address + '/' + address_object.Cidr) except: error_message.append(key + ' not found in database.') continue job = tasks.scan.delay(address_list) try: task = Tasks.objects.get(Task='scan') except: task = Tasks() task.Task = 'scan' task.Task_Id = job.id task.Count = 0 task.save() json_data = json.dumps(error_message) return HttpResponse(json_data, content_type='application/json') elif request.POST.get('action') == 'addaddress': raw_list = request.POST.get('address-textarea') print raw_list address_data = AddAddress(raw_list) json_data = json.dumps(address_data) return HttpResponse(json_data, content_type='application/json') elif request.POST.get('action') == 'addhostname': raw_list = request.POST.get('address-textarea') address_data = AddHostname(raw_list) json_data = json.dumps(address_data) return HttpResponse(json_data, content_type='application/json') elif request.POST.get('action') == 'delete': address_list = [] for key,value in request.POST.items(): if str(value) == "on": address_list.append(key) deleted_addresses = DeleteAddress(address_list) json_data = json.dumps(deleted_addresses) return HttpResponse(json_data, content_type='application/json') else: return HttpResponse("Failure.") else: form = ParseForm() addresses = Addresses.objects.all() return render(request, 'Web_Scout/setup.html', {'addresses':addresses, 'form':form, 'uploaded':False, 'failedupload':False})
def index(request): if request.method == 'POST': if request.POST.get('action') in ["bulknote", "bulkreviewed", "bulkdelete", "bulkscreenshot", "bulkrunmodule"]: data = BulkAction(request.POST.items(), request.POST.get('action'), request.POST.get('note', '')) json_data = json.dumps(data) return HttpResponse(json_data, content_type='application/json') elif request.POST.get('action') == "runmodule": interfaceid = request.POST.get('interfaceid') interface_record = Interfaces.objects.get(IntID=interfaceid) result, credentials = tasks.runmodule(interface_record.hosts.HostID) data = [result, credentials] json_data = json.dumps(data) return HttpResponse(json_data, content_type='application/json') elif request.POST.get('action') == "screenshothost": hostid = request.POST.get('host') host = Hosts.objects.get(HostID=hostid) for interface in host.interfaces_set.all(): item = [interface.Url, interface.IntID] tasks.getscreenshot.delay(item, 20, True, None, True) return HttpResponse() elif request.POST.get('action') == "note": note = request.POST.get('note') record = request.POST.get('record') default_creds = request.POST.get('default-creds') http_auth = request.POST.get('http-auth') reviewed = request.POST.get('reviewed') interface = Interfaces.objects.get(IntID=record) host = interface.hosts interface.Notes = note if http_auth == "Yes": interface.HttpAuth = True else: interface.HttpAuth = False if default_creds == "Yes": interface.DefaultCreds = True else: interface.DefaultCreds = False if reviewed == "Yes": host.Reviewed = True LogKrakenEvent(request.user, 'Reviewed - ' + host.IP + ' (' + host.Hostname + ')', 'info') else: host.Reviewed = False interface.save() host.save() return HttpResponse() elif request.POST.get('action') == "deletehost": host = request.POST.get('host') data = DeleteHost(host) json_data = json.dumps(data) return HttpResponse(json_data, content_type='application/json') else: search = request.GET.get('search', '') reviewed = request.GET.get('hide_reviewed', '') org = request.GET.get('organize_by', 'IP') hosts_per_page = request.GET.get('hosts_per_page', '20') nav_list = [-10,-9,-8,-7,-6,-5,-4,-3,-2,-1] temp_host_array = [] host_array = [] if search: entry_query = BuildQuery(search, ['IP', 'Hostname', 'Category', 'interfaces__Product']) if entry_query: temp_host_array = Hosts.objects.all().filter(entry_query).distinct() else: temp_host_array = "" else: temp_host_array = Hosts.objects.all() if org in ("IP", "Hostname", "Rating"): if temp_host_array: temp_host_array = temp_host_array.order_by(org) if reviewed == 'on': if temp_host_array: temp_host_array = temp_host_array.exclude(Reviewed=True) for host in temp_host_array: if len(host.interfaces_set.all()) > 0: host_array.append(host) if int(hosts_per_page) in (20, 30, 40, 50, 100): paginator = Paginator(host_array, hosts_per_page) else: paginator = Paginator(host_array, 20) parameters = '' for key,value in request.GET.items(): if not key == 'page' and not value == "": parameters = parameters + '&' + key + '=' + value page = request.GET.get('page') try: hosts = paginator.page(page) except PageNotAnInteger: hosts = paginator.page(1) except EmptyPage: hosts = paginator.page(paginator.num_pages) return render(request, 'Web_Scout/index.html', {'hosts':hosts, 'nav_list':nav_list, 'pagination_parameters': parameters, 'hosts_per_page': int(hosts_per_page), 'search':search, 'reviewed':reviewed, 'org':org})
def nmap_parse(filepath, targetaddress=''): import xml.etree.cElementTree as ET import os import datetime import django os.environ["DJANGO_SETTINGS_MODULE"] = "Kraken.settings" sys.path.append("/opt/Kraken") django.setup() from Web_Scout.models import Addresses, Hosts, Interfaces # Known HTTP ports. HttpPorts = [ 80, 280, 443, 591, 593, 981, 1311, 2031, 2480, 3181, 4444, 4445, 4567, 4711, 4712, 5104, 5280, 7000, 7001, 7002, 8000, 8008, 8011, 8012, 8013, 8014, 8042, 8069, 8080, 8081, 8243, 8280, 8281, 8443, 8531, 8887, 8888, 9080, 9443, 11371, 12443, 16080, 18091, 18092 ] timestamp = datetime.datetime.now() # Parse Nmap XML using provided path. Uses xml.etree.cElementTree to parse. print('parsing ' + filepath) nmap = ET.parse(filepath) root = nmap.getroot() # Loop through all hosts found. for host in root.findall('host'): print('Host ' + host[1].get('addr') + ' found') # Extract IP address ipaddress = host[1].get('addr') try: # Attempt to locate the each host in the Hosts database table. host_object = Hosts.objects.get(HostID=ipaddress.replace('.', '-')) print host_object print('Existing host') # If found, the host is no longer stale. If a host cannot be reached, # it will not be in the Nmap XML. host_object.Stale = False host_object.StaleLevel = 0 # After the initial scan, which designates each host as 'new', each # subsequent scan will ensure the host is no longer indicated as new. host_object.New = False except: # If the host is not present in the database, create a host record. # targetaddress indicates that this parsing is the result of the scan # functionality. When a scan is performed, hosts are created with a # relationship to the address selected to scan if targetaddress: print "Creating new host record under Address" address_record = Addresses.objects.get( AddressID=targetaddress.replace('.', '-').replace( '/', '-')) host_object = address_record.hosts_set.create() host_object.New = True else: host_object = Hosts() host_object.HostID = ipaddress.replace('.', '-') host_object.Rating = "" host_object.IP = ipaddress # Get hostname hostnames = host.find('hostnames') try: host_object.Hostname = hostnames[0].get('name') if not host_object.Hostname: host_object.Hostname = "" except: host_object.Hostname = "" # Timestamp to indicate that the host was seen during this scan. host_object.LastSeen = timestamp host_object.save() # Loop through all ports for each host ports = host.find('ports') for port in ports.findall('port'): # The port must be open and present in the HttpPorts list in order to be added. if port[0].get('state') == 'open' and int( port.get('portid')) in HttpPorts or 'http' in str( port[1].get('extrainfo')) or 'http' in str( port[1].get('product')): print('Port ' + port.get('portid') + ' found.') # All interfaces records are tied to hosts in the database using relational mapping. # Attempt to locate interface record for this host and port combination. try: interface_object = Interfaces.objects.get( host_object.HostID + '-' + port.get('portid')) # If not, create one. except: interface_object = host_object.interfaces_set.create() # Set port Category using the Nmap devicetype value. This can change during the source code-based categorization # the screenshot process performs. try: host_object.Category = port[1].get('devicetype') if not host_object.Category: host_object.Category = "" except: host_object.Category = "" # Set host OS try: host_object.OS = port[1].get('ostype') if not host_object.OS: host_object.OS = "" except: host_object.OS = "" # Set port number and name interface_object.Port = port.get('portid') interface_object.Name = port[1].get('name') if not interface_object.Name: interface_object.Name = "" # Set port product. This can change during the source code-based credential # checking the screenshot process performs. try: interface_object.Product = port[1].get('extrainfo') if not interface_object.Product: interface_object.Product = "" except: interface_object.Product = "" # Set port version information try: interface_object.Version = port[1].get('version') if not interface_object.Version: interface_object.Version = "" except: interface_object.Version = "" # Set port database identification interface_object.IntID = host_object.IP.replace( '.', '-') + '-' + interface_object.Port interface_object.Banner = "" # The ImgLink is used on the front end to display screenshots. interface_object.ImgLink = "Web_Scout/" + host_object.IP.replace( '.', '-') + '-' + interface_object.Port + ".png" interface_object.Banner = "" # Determine the corrent URL protocol to assign. # Hostname is preferred over IP due to virtual hosts. if host_object.Hostname: if interface_object.Port == "80": interface_object.Url = "http://" + host_object.Hostname elif interface_object.Port == "443" or interface_object.Port == "8443" or interface_object.Port == "12443": interface_object.Url = "https://" + host_object.Hostname else: interface_object.Url = "http://" + host_object.Hostname + ":" + interface_object.Port else: if interface_object.Port == "80": interface_object.Url = "http://" + host_object.IP elif interface_object.Port == "443" or interface_object.Port == "8443" or interface_object.Port == "12443": interface_object.Url = "https://" + host_object.IP else: interface_object.Url = "http://" + host_object.IP + ":" + interface_object.Port # Indicate that this Interface record is create from a port, # rather than a specific URL path. interface_object.Type = 'port' interface_object.save() host_object.save() # Check Hosts and Interfaces table for duplicates and remove them. print('Checking for duplicates.') for row in Interfaces.objects.all(): if Interfaces.objects.filter(IntID=row.IntID).count() > 1: row.delete() for row in Hosts.objects.all(): if Hosts.objects.filter(HostID=row.HostID).count() > 1: row.delete() number_of_hosts = Hosts.objects.all().count() number_of_interfaces = Interfaces.objects.all().count() LogKrakenEvent( 'Celery', 'Parsing Complete. Hosts: ' + str(number_of_hosts) + ', Interfaces: ' + str(number_of_interfaces), 'info')