def import_hashfile_task(hashfile_id): hashfile = Hashfile.objects.get(id=hashfile_id) task = Task( time = datetime.datetime.now(), message = "Importing hash file %s..." % hashfile.name ) task.save() try: if hashfile.hash_type != -1: # if != plaintext task.message = "Importing hash file %s..." % hashfile.name task.save() Hashcat.insert_hashes(hashfile) task.message = "Comparing hash file %s to potfile..." % hashfile.name task.save() Hashcat.compare_potfile(hashfile) else: task.message = "Importing plaintext file %s..." % hashfile.name task.save() Hashcat.insert_plaintext(hashfile) except Exception as e: traceback.print_exc() finally: task.delete()
def cleanup_tasks(sender, instance, **kwargs): for task in Task.objects.all(): task.delete() # Set all "Running" and "Starting" searches to aborted for search in Search.objects.filter(status__in=["Starting", "Running"]): search.status = "Aborted" search.save()
def remove_hashfile_task(hashfile_id): hashfile = Hashfile.objects.get(id=hashfile_id) task = Task(time=datetime.datetime.now(), message="Removing hash file %s..." % hashfile.name) task.save() try: Hashcat.remove_hashfile(hashfile) except Exception as e: traceback.print_exc() finally: task.delete()
def run_search_task(search_id): search = Search.objects.get(id=search_id) task = Task( time = datetime.datetime.now(), message = "Running search %s..." % search.name ) task.save() if os.path.exists(search.output_file): os.remove(search.output_file) try: search.status = "Running" search.output_lines = None search.processing_time = None search.save() search_info = json.loads(search.json_search_info) start_time = time.time() cursor = connection.cursor() args = [] columns = ["hashfile_id", "username", "password", "hash_type", "hash"] query = "SELECT %s FROM Hashcat_hash" % ",".join(columns) if "pattern" in search_info or not "all_hashfiles" in search_info or "ignore_uncracked" in search_info: query += " WHERE " if "pattern" in search_info: query_pattern_list = [] for pattern in search_info["pattern"].split(';'): query_pattern_list.append("username LIKE %s") args.append("%" + pattern + "%") query += "(" + " OR ".join(query_pattern_list) + ")" if not "all_hashfiles" in search_info or "ignore_uncracked" in search_info: query += " AND " if not "all_hashfiles" in search_info: query += "hashfile_id IN (%s)" % ','.join(['%s'] * len(search_info["hashfiles"])) args += [int(i) for i in search_info["hashfiles"]] if "ignore_uncracked" in search_info: query += " AND " if "ignore_uncracked" in search_info: query += "password IS NOT NULL" tmpfile_name = ''.join([random.choice(string.ascii_lowercase) for i in range(16)]) tmp_file = os.path.join(os.path.dirname(__file__), "..", "Files", "tmp", tmpfile_name) query += " INTO OUTFILE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\n'" args.append(tmp_file) rows = cursor.execute(query, args) cursor.close() if os.path.exists(tmp_file): hash_types_dict = Hashcat.get_hash_types() hashfile_dict = {} for hashfile in Hashfile.objects.all(): hashfile_dict[hashfile.id] = hashfile.name with open(search.output_file, 'w', newline='') as out_csvfile: spamwriter = csv.writer(out_csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) with open(tmp_file, 'r', newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',', quotechar='"') for row in spamreader: try: row[0] = hashfile_dict[int(row[0])] except KeyError: pass try: row[3] = hash_types_dict[int(row[3])]['name'] if int(row[3]) != -1 else "Plaintext" except KeyError: pass except ValueError: pass except IndexError: pass spamwriter.writerow(row) os.remove(tmp_file) end_time = time.time() search.status = "Done" search.output_lines = int(rows) search.processing_time = int(end_time - start_time) search.save() except Exception as e: traceback.print_exc() finally: task.delete()
def task_delete(request, pk): task = get_object_or_404(Task, pk=pk) task.delete() return redirect('/')
from upload_queue import models from celery.decorators import task from celery.task.sets import subtask @task(max_retries=5, ignore_result=True) def upload_file(upload_task_id, to_property, to_path, callback=None, **kwargs): """Celery task that uploads a file.""" task = models.UploadTask.objects.get(pk=upload_task_id) field = getattr(task.to_instance, to_property) try: if field.name: field.delete() field.save(to_path, task.local_file.file) except Exception, exc: upload_file.retry(args=[upload_task_id, to_property, to_path, callback], kwargs=kwargs, exc=exc) task.local_file.delete() task.delete() if callback: subtask(callback).delay() class PendingUpload(object): def __init__(self, upload_task_id, to_property, to_path, callback=None): self.upload_task_id = upload_task_id self.to_property = to_property self.to_path = to_path self.callback = callback def queue(self): upload_file.delay(self.upload_task_id, self.to_property, self.to_path, callback=self.callback) def prepare_upload(to_instance, to_property, to_path, src_file, callback=None): """ Call this function to prepare to schedule an upload to be performed