def hashfiles(request): context = {} context["Section"] = "Hashes" if request.method == 'POST': if request.POST["action"] == "add": hash_type = int(request.POST["hash_type"]) hashfile_name = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) + ".hashfile" hashfile_path = os.path.join(os.path.dirname(__file__), "..", "Files", "Hashfiles", hashfile_name) hashes = request.POST["hashes"] f = open(hashfile_path, 'w') if len(hashes) == 0 and "hashfile" in request.FILES: for chunk in request.FILES['hashfile'].chunks(): f.write(chunk.decode('UTF-8', 'backslashreplace')) else: f.write(hashes.strip()) f.close() username_included = "username_included" in request.POST hashfile = Hashfile( name=request.POST['name'], hashfile=hashfile_name, hash_type=hash_type, line_count=0, cracked_count=0, username_included=username_included, ) hashfile.save() init_hashfile_locks(hashfile) # Update the new file with the potfile, this may take a while, but it is processed in a background task import_hashfile_task.delay(hashfile.id) if hash_type != -1: # if != plaintext messages.success(request, "Hashfile successfully added") else: messages.success(request, "Plaintext file successfully added") context["node_list"] = Node.objects.all() context["hash_type_list"] = [{ 'id': -1, 'name': 'Plaintext' }] + sorted(list(Hashcat.get_hash_types().values()), key=itemgetter('name')) context["rule_list"] = [{ 'name': None }] + sorted(Hashcat.get_rules(detailed=False), key=itemgetter('name')) context["mask_list"] = sorted(Hashcat.get_masks(detailed=False), key=itemgetter('name')) context["wordlist_list"] = sorted(Hashcat.get_wordlists(detailed=False), key=itemgetter('name')) template = loader.get_template('Hashcat/hashes.html') return HttpResponse(template.render(context, request))
def api_update_hashfiles(request): if request.method == "POST": params = request.POST else: params = request.GET Hashcat.update_hashfiles() return HttpResponse(json.dumps({"result": "success"}), content_type="application/json")
def upload_wordlist(request): if request.method == 'POST': name = request.POST["name"] if "file" in request.FILES: # get from file f = request.FILES["file"] wordlist_file = f.read() Hashcat.upload_wordlist(name, wordlist_file) return redirect('Hashcat:files')
def remove_hashfile_task(hashfile_id): hashfile = Hashfile.objects.get(id=hashfile_id) task = Task(time=datetime.datetime.now(), message="Removing hash file %s..." % hashfile.name) task.save() try: Hashcat.remove_hashfile(hashfile) except Exception as e: traceback.print_exc() finally: task.delete()
def import_hashfile_task(hashfile_id): hashfile = Hashfile.objects.get(id=hashfile_id) task = Task( time = datetime.datetime.now(), message = "Importing hash file %s..." % hashfile.name ) task.save() try: if hashfile.hash_type != -1: # if != plaintext task.message = "Importing hash file %s..." % hashfile.name task.save() Hashcat.insert_hashes(hashfile) task.message = "Comparing hash file %s to potfile..." % hashfile.name task.save() Hashcat.compare_potfile(hashfile) else: task.message = "Importing plaintext file %s..." % hashfile.name task.save() Hashcat.insert_plaintext(hashfile) except Exception as e: traceback.print_exc() finally: task.delete()
def hashfile(request, hashfile_id, error_msg=''): context = {} context["Section"] = "Hashfile" hashfile = get_object_or_404(Hashfile, id=hashfile_id) context['hashfile'] = hashfile context['lines'] = humanize.intcomma(hashfile.line_count) context['recovered'] = "%s (%.2f%%)" % (humanize.intcomma(hashfile.cracked_count), hashfile.cracked_count/hashfile.line_count*100) if hashfile.line_count != 0 else "0" context['hash_type'] = "Plaintext" if hashfile.hash_type == -1 else Hashcat.get_hash_types()[hashfile.hash_type]["name"] template = loader.get_template('Hashcat/hashfile.html') return HttpResponse(template.render(context, request))
def files(request): context = {} context["Section"] = "Files" if request.method == 'POST': if request.POST["action"] == "remove": if request.POST["filetype"] == "rule": Hashcat.remove_rule(request.POST["filename"]) elif request.POST["filetype"] == "mask": Hashcat.remove_mask(request.POST["filename"]) elif request.POST["filetype"] == "wordlist": Hashcat.remove_wordlist(request.POST["filename"]) context["rule_list"] = Hashcat.get_rules() context["mask_list"] = Hashcat.get_masks() context["wordlist_list"] = Hashcat.get_wordlists() template = loader.get_template('Hashcat/files.html') return HttpResponse(template.render(context, request))
def hashfiles(request): context = {} context["Section"] = "Hashes" if request.method == 'POST': if request.POST["action"] == "add": hash_type = int(request.POST["hash_type"]) hashfile_name = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) + ".hashfile" hashfile_path = os.path.join(os.path.dirname(__file__), "..", "Files", "Hashfiles", hashfile_name) crackedfile_name = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) + ".crackedfile" crackedfile_path = os.path.join(os.path.dirname(__file__), "..", "Files", "Crackedfiles", crackedfile_name) hashes = request.POST["hashes"] if hash_type != -1: # if != plaintext f = open(hashfile_path, 'w') else: f = open(crackedfile_path, 'w') if len(hashes) == 0 and "hashfile" in request.FILES: for chunk in request.FILES['hashfile'].chunks(): f.write(chunk.decode('UTF-8', 'backslashreplace')) else: f.write(hashes.strip()) f.close() username_included = "username_included" in request.POST if hash_type != -1: # if != plaintext line_count = sum( 1 for _ in open(hashfile_path, errors="backslashreplace")) else: line_count = sum( 1 for _ in open(crackedfile_path, errors="backslashreplace")) hashfile = Hashfile( name=request.POST['name'], hashfile=hashfile_name, crackedfile=crackedfile_name, hash_type=hash_type, line_count=line_count, cracked_count=0, username_included=username_included, ) hashfile.save() init_hashfile_locks(hashfile) # Update the new file with the potfile, this may take a while updated = False while not updated: try: if hash_type != -1: # if != plaintext Hashcat.compare_potfile(hashfile) else: Hashcat.insert_plaintext(hashfile) updated = True except OperationalError: # db locked, try again !!! pass if hash_type != -1: # if != plaintext messages.success(request, "Hashfile successfully added") else: messages.success(request, "Plaintext file successfully added") context["node_list"] = Node.objects.all() context["hash_type_list"] = [{ 'id': -1, 'name': 'Plaintext' }] + sorted(list(Hashcat.get_hash_types().values()), key=itemgetter('name')) context["rule_list"] = [{ 'name': None }] + sorted(Hashcat.get_rules(detailed=False), key=itemgetter('name')) context["mask_list"] = sorted(Hashcat.get_masks(detailed=False), key=itemgetter('name')) context["wordlist_list"] = sorted(Hashcat.get_wordlists(detailed=False), key=itemgetter('name')) template = loader.get_template('Hashcat/hashes.html') return HttpResponse(template.render(context, request))
def api_hashfiles(request): if request.method == "POST": params = request.POST else: params = request.GET result = { "draw": params["draw"], } session_status = {} node_object_list = Node.objects.all() for node in node_object_list: try: hashcat_api = HashcatAPI(node.hostname, node.port, node.username, node.password) hashcat_info = hashcat_api.get_hashcat_info() for session in hashcat_info["sessions"]: session_status[session["name"]] = session["status"] except ConnectionRefusedError: pass except requests.exceptions.ConnectTimeout: pass sort_index = [ "name", "name", "hash_type", "line_count", "cracked_count", "name", "name", "name" ][int(params["order[0][column]"])] sort_index = "-" + sort_index if params[ "order[0][dir]"] == "desc" else sort_index hashfile_list = Hashfile.objects.filter( name__contains=params["search[value]"]).order_by( sort_index)[int(params["start"]):int(params["start"]) + int(params["length"])] data = [] for hashfile in hashfile_list: buttons = "<a href='%s'><button title='Export cracked results' class='btn btn-info btn-xs' ><span class='glyphicon glyphicon-download-alt'></span></button></a>" % reverse( 'Hashcat:export_cracked', args=(hashfile.id, )) buttons += "<button title='Create new cracking session' style='margin-left: 5px' class='btn btn-primary btn-xs' data-toggle='modal' data-target='#action_new' data-hashfile='%s' data-hashfile_id=%d ><span class='glyphicon glyphicon-plus'></span></button>" % ( hashfile.name, hashfile.id) buttons += "<button title='Remove hashfile' style='margin-left: 5px' type='button' class='btn btn-danger btn-xs' onClick='hashfile_action(%d, \"%s\")'><span class='glyphicon glyphicon-remove'></span></button>" % ( hashfile.id, "remove") buttons = "<div style='float: right'>%s</div>" % buttons running_session_count = 0 total_session_count = Session.objects.filter( hashfile_id=hashfile.id).count() for session in Session.objects.filter(hashfile_id=hashfile.id): try: if session_status[session.name] == "Running": running_session_count += 1 except KeyError: pass data.append({ "DT_RowId": "row_%d" % hashfile.id, "name": "<a href='%s'>%s<a/>" % (reverse('Hashcat:hashfile', args=(hashfile.id, )), hashfile.name), "type": "Plaintext" if hashfile.hash_type == -1 else Hashcat.get_hash_types()[hashfile.hash_type]["name"], "line_count": humanize.intcomma(hashfile.line_count), "cracked": "%s (%.2f%%)" % (humanize.intcomma(hashfile.cracked_count), hashfile.cracked_count / hashfile.line_count * 100) if hashfile.line_count > 0 else "0", "username_included": "yes" if hashfile.username_included else "no", "sessions_count": "%d / %d" % (running_session_count, total_session_count), "buttons": buttons, }) result["data"] = data result["recordsTotal"] = Hashfile.objects.all().count() result["recordsFiltered"] = Hashfile.objects.filter( name__contains=params["search[value]"]).count() for query in connection.queries[-4:]: print(query["sql"]) print(query["time"]) return HttpResponse(json.dumps(result), content_type="application/json")
def run_search_task(search_id): search = Search.objects.get(id=search_id) task = Task( time = datetime.datetime.now(), message = "Running search %s..." % search.name ) task.save() if os.path.exists(search.output_file): os.remove(search.output_file) try: search.status = "Running" search.output_lines = None search.processing_time = None search.save() search_info = json.loads(search.json_search_info) start_time = time.time() cursor = connection.cursor() args = [] columns = ["hashfile_id", "username", "password", "hash_type", "hash"] query = "SELECT %s FROM Hashcat_hash" % ",".join(columns) if "pattern" in search_info or not "all_hashfiles" in search_info or "ignore_uncracked" in search_info: query += " WHERE " if "pattern" in search_info: query_pattern_list = [] for pattern in search_info["pattern"].split(';'): query_pattern_list.append("username LIKE %s") args.append("%" + pattern + "%") query += "(" + " OR ".join(query_pattern_list) + ")" if not "all_hashfiles" in search_info or "ignore_uncracked" in search_info: query += " AND " if not "all_hashfiles" in search_info: query += "hashfile_id IN (%s)" % ','.join(['%s'] * len(search_info["hashfiles"])) args += [int(i) for i in search_info["hashfiles"]] if "ignore_uncracked" in search_info: query += " AND " if "ignore_uncracked" in search_info: query += "password IS NOT NULL" tmpfile_name = ''.join([random.choice(string.ascii_lowercase) for i in range(16)]) tmp_file = os.path.join(os.path.dirname(__file__), "..", "Files", "tmp", tmpfile_name) query += " INTO OUTFILE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\n'" args.append(tmp_file) rows = cursor.execute(query, args) cursor.close() if os.path.exists(tmp_file): hash_types_dict = Hashcat.get_hash_types() hashfile_dict = {} for hashfile in Hashfile.objects.all(): hashfile_dict[hashfile.id] = hashfile.name with open(search.output_file, 'w', newline='') as out_csvfile: spamwriter = csv.writer(out_csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) with open(tmp_file, 'r', newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',', quotechar='"') for row in spamreader: try: row[0] = hashfile_dict[int(row[0])] except KeyError: pass try: row[3] = hash_types_dict[int(row[3])]['name'] if int(row[3]) != -1 else "Plaintext" except KeyError: pass except ValueError: pass except IndexError: pass spamwriter.writerow(row) os.remove(tmp_file) end_time = time.time() search.status = "Done" search.output_lines = int(rows) search.processing_time = int(end_time - start_time) search.save() except Exception as e: traceback.print_exc() finally: task.delete()
def optimize_potfile(): Hashcat.optimize_potfile()
def update_potfile_task(): Hashcat.update_hashfiles()
def node(request, node_name, error_msg=""): context = {} context["Section"] = "Nodes" if len(error_msg) != 0: context["error_message"] = error_msg template = loader.get_template('Nodes/node.html') return HttpResponse(template.render(context, request)) node_item = get_object_or_404(Node, name=node_name) context["node_name"] = node_item.name context["hostname"] = node_item.hostname context["port"] = node_item.port if request.method == 'POST': if request.POST["action"] == "synchronize": hashcat_api = HashcatAPI(node_item.hostname, node_item.port, node_item.username, node_item.password) node_data = hashcat_api.get_hashcat_info() rule_list = Hashcat.get_rules() mask_list = Hashcat.get_masks() wordlist_list = Hashcat.get_wordlists() for rule in rule_list: if not rule["name"] in node_data["rules"]: hashcat_api.upload_rule(rule["name"], open(rule["path"], 'rb').read()) elif node_data["rules"][rule["name"]]["md5"] != rule["md5"]: hashcat_api.upload_rule(rule["name"], open(rule["path"], 'rb').read()) for mask in mask_list: if not mask["name"] in node_data["masks"]: hashcat_api.upload_mask(mask["name"], open(mask["path"], 'rb').read()) elif node_data["masks"][mask["name"]]["md5"] != mask["md5"]: hashcat_api.upload_mask(mask["name"], open(mask["path"], 'rb').read()) for wordlist in wordlist_list: if not wordlist["name"] in node_data["wordlists"]: hashcat_api.upload_wordlist( wordlist["name"], open(wordlist["path"], 'rb').read()) elif node_data["wordlists"][ wordlist["name"]]["md5"] != wordlist["md5"]: hashcat_api.upload_wordlist( wordlist["name"], open(wordlist["path"], 'rb').read()) hashcat_api = HashcatAPI(node_item.hostname, node_item.port, node_item.username, node_item.password) node_data = hashcat_api.get_hashcat_info() if node_data["response"] == "error": return node(request, node_name, error_msg=node_data["message"]) rule_list = Hashcat.get_rules() mask_list = Hashcat.get_masks() wordlist_list = Hashcat.get_wordlists() for rule in rule_list: if not rule["name"] in node_data["rules"]: rule["synchro"] = False elif node_data["rules"][rule["name"]]["md5"] != rule["md5"]: rule["synchro"] = False else: rule["synchro"] = True for mask in mask_list: if not mask["name"] in node_data["masks"]: mask["synchro"] = False elif node_data["masks"][mask["name"]]["md5"] != mask["md5"]: mask["synchro"] = False else: mask["synchro"] = True for wordlist in wordlist_list: if not wordlist["name"] in node_data["wordlists"]: wordlist["synchro"] = False elif node_data["wordlists"][ wordlist["name"]]["md5"] != wordlist["md5"]: wordlist["synchro"] = False else: wordlist["synchro"] = True hash_type_list = sorted(node_data["hash_types"], key=itemgetter('id')) context["version"] = node_data["version"] context["rule_list"] = rule_list context["mask_list"] = mask_list context["wordlist_list"] = wordlist_list context["hash_type_list"] = hash_type_list template = loader.get_template('Nodes/node.html') return HttpResponse(template.render(context, request))
def api_upload_file(request): auth_header = request.META.get('HTTP_AUTHORIZATION', '') token_type, _, credentials = auth_header.partition(' ') username, password = base64.b64decode(credentials).decode().split(':') try: user = User.objects.get(username=username) except User.DoesNotExist: return HttpResponse(status=401) password_valid = user.check_password(password) if token_type != 'Basic' or not password_valid: return HttpResponse(status=401) if request.method == "POST": params = request.POST else: return HttpResponse(json.dumps({ "result": "error", "value": "Only POST accepted" }), content_type="application/json") if not 'name' in params: return HttpResponse(json.dumps({ "result": "error", "value": "Please specify the uploaded file name" }), content_type="application/json") if not 'type' in params: return HttpResponse(json.dumps({ "result": "error", "value": "Please specify the uploaded file type" }), content_type="application/json") if not 'file' in request.FILES: return HttpResponse(json.dumps({ "result": "error", "value": "Please upload a file" }), content_type="application/json") print(params) if params['type'] == 'hashfile': if not 'hash_type' in params: return HttpResponse(json.dumps({ "result": "error", "value": "Please specify the hash type" }), content_type="application/json") hash_type = int(params["hash_type"]) hashfile_name = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) + ".hashfile" hashfile_path = os.path.join(os.path.dirname(__file__), "..", "Files", "Hashfiles", hashfile_name) f = open(hashfile_path, 'w') for chunk in request.FILES['file'].chunks(): f.write(chunk.decode('UTF-8', 'backslashreplace')) f.close() username_included = "username_included" in params hashfile = Hashfile( name=request.POST['name'], hashfile=hashfile_name, hash_type=hash_type, line_count=0, cracked_count=0, username_included=username_included, ) hashfile.save() init_hashfile_locks(hashfile) # Update the new file with the potfile, this may take a while, but it is processed in a background task import_hashfile_task.delay(hashfile.id) elif params['type'] == 'wordlist': f = request.FILES["file"] wordlist_file = f.read() Hashcat.upload_wordlist(params['name'], wordlist_file) elif params['type'] == 'rule': f = request.FILES["file"] rule_file = f.read() Hashcat.upload_rule(params['name'], rule_file) elif params['type'] == 'mask': f = request.FILES["file"] mask_file = f.read() Hashcat.upload_mask(params['name'], mask_file) return HttpResponse(json.dumps({"result": "success"}), content_type="application/json")