def edit_job(job_id): # TODO prefill form with current values job = Job.objects.get(id=job_id) if job: if not can_do_stuff_with_job(current_user, job.owner): flask.flash('You are not allowed to edit this job') return flask.redirect("/jobs/show") if flask.request.method == 'POST': data = flask.request.form engine = data.get('mutation_engine') if data.get('fuzzer') == "afl" or "syzkaller": engine = 'external' Job.objects(id=job_id).update(**{ 'name': data.get('name'), 'description': data.get('description'), 'fuzzer': data.get('fuzzer'), 'mutation_engine': engine, 'verifier': data.get('verifier'), }) return flask.redirect("/jobs/show") else: engines = [x['name'] for x in f3c_global_config.mutation_engines] fuzzers = [x['name'] for x in f3c_global_config.fuzzers] verifiers = [x['name'] for x in f3c_global_config.verifiers] return flask.render_template('jobs_edit.html', job=job, engines=engines, fuzzers=fuzzers, verifiers=verifiers) else: flask.abort(400, description="Invalid job ID")
def api_create_job(): # TODO check if a job with this name does already exist data = flask.request.get_json() if data: # TODO sanitize data if not data.get('name'): return json.dumps({ 'success': False, 'msg': "No fuzz job name specified" }) elif not data.get('description'): return json.dumps({ 'success': False, 'msg': "No fuzz job description specified" }) if data.get('fuzzer') == "afl" and data.get('engine') is not None: return json.dumps({ 'success': False, 'msg': "The fuzzer afl contains a mutation engine. No need to select a mutation engine" }) if data.get('samples') is None or data.get('fuzzing_target') is None: return json.dumps({ 'success': False, 'msg': "Please provide a fuzzing target AND some initial test cases." }) if data.get('firmware_root') is not None: firmware_root = base64.b64decode(data.get('firmware_root')) else: firmware_root = None new_job = Job(name=data.get('name'), description=data.get('description'), maximum_samples=int(data.get('maximum_samples')), archived=False, enabled=True, maximum_iteration=int(data.get('maximum_iteration')), timeout=int(data.get('timeout')), date=datetime.datetime.now().strftime('%Y-%m-%d'), mutation_engine=data.get('mutation_engine'), fuzzer=data.get('fuzzer'), samples=base64.b64decode(data.get('samples')), fuzzing_target=base64.b64decode( data.get('fuzzing_target')), cmd_args=data.get('cmd_args'), verifier=data.get('verifier'), firmware_root=firmware_root) new_job.save() return json.dumps({'success': True}) else: return json.dumps({ 'success': False, 'msg': 'no json document provided' })
def add_job(): # TODO check if job with this name already exists if flask.request.method == 'GET': engines = [x['name'] for x in f3c_global_config.mutation_engines] fuzzers = [x['name'] for x in f3c_global_config.fuzzers] verifiers = [x['name'] for x in f3c_global_config.verifiers] return flask.render_template("jobs_add.html", engines=engines, fuzzers=fuzzers, verifiers=verifiers) else: data = flask.request.form files = flask.request.files engine = data.get('mutation_engine') if data.get('fuzzer') == "afl": engine = 'external' if not ('fuzzing_target' in files): flask.flash('Please provide a fuzzing target.') return flask.redirect('/jobs/add') if mutation_engine_requires_samples(engine) and not ('samples' in files): flask.flash( 'If mutation engine is not external then you must provide some initial test cases.' ) return flask.redirect('/jobs/add') samples = None if 'samples' in files: samples = files['samples'].stream.read() firmware_root = None if 'firmware_root' in files: firmware_root = files['firmware_root'].stream.read() new_job = Job(name=data.get('name'), description=data.get('description'), maximum_samples=f3c_global_config.maximum_samples, archived=False, enabled=True, maximum_iteration=int(data.get('maximum_iteration')), timeout=int(data.get('timeout')), date=datetime.datetime.now().strftime('%Y-%m-%d'), mutation_engine=engine, fuzzer=data.get('fuzzer'), verifier=data.get('verifier'), samples=samples, fuzzing_target=files['fuzzing_target'].stream.read(), cmd_args=data.get('cmd_args'), firmware_root=firmware_root, owner=User.objects.get(email=current_user.email)) new_job.save() return flask.redirect("/jobs/show")
def add_job(): if flask.request.method == 'GET': engines = [x['name'] for x in f3c_global_config.mutation_engines] fuzzers = [x['name'] for x in f3c_global_config.fuzzers] return flask.render_template("jobs_add.html", engines=engines, fuzzers=fuzzers) else: data = flask.request.form files = flask.request.files if not data.get('name'): flask.abort(400, description="No fuzz job name specified") elif not data.get('description'): flask.abort(400, description="No fuzz job description specified") if data.get('fuzzer') == "afl" and data.get('engine') is not None: flask.abort(400, description="The fuzzer afl contains a mutation engine. No need to select a mutation engine") if not ('samples' in files) or not ('fuzzing_target' in files): flask.abort(400, description="Please provide a fuzzing target AND some initial test cases.") firmware_root = None if 'firmware_root' in files: firmware_root = files['firmware_root'].stream.read() new_job = Job(name=data.get('name'), description=data.get('description'), maximum_samples=int(data.get('maximum_samples')), archived=False, enabled=True, maximum_iteration=int(data.get('maximum_iteration')), timeout=int(data.get('timeout')), date=datetime.datetime.now().strftime('%Y-%m-%d'), mutation_engine=data.get('mutation_engine'), fuzzer=data.get('fuzzer'), samples=files['samples'].stream.read(), fuzzing_target=files['fuzzing_target'].stream.read(), firmware_root=firmware_root) new_job.save() return flask.redirect("/jobs/show")
def get_job_information_from_Statistic_table(self, selected_job): job_statistic = Statistic.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { '$match': {'relation.name': selected_job} }, ]) return list(job_statistic)[0]
def calculate_number_of_crashes_for_selected_job(self, selected_job): all_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { '$match': {'relation.name': selected_job} } ]) return len(list(all_crashes))
def get_original_and_crash_test_case_of_crash(crash): crash_test_case = crash.test_case original_test_case = list(Job.objects(id=crash.job_id))[0]["samples"] encoded_crash_test_case = base64.b64encode(crash_test_case).decode('ascii') if testcase_can_be_diffed(crash.job_id): if (original_test_case.startswith(b'PK')): original_test_case = get_original_crash_test_case_of_zipfile( crash_test_case, original_test_case) encoded_original_test_case = base64.b64encode( original_test_case).decode('ascii') else: encoded_original_test_case = None return encoded_original_test_case, encoded_crash_test_case
def calculate_number_of_unique_and_exploitable_crashes_for_selected_job(self, selected_job): unique_exploitable_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { '$match': {'relation.name': selected_job, 'exploitability': 'EXPLOITABLE'} }, { '$group': {'_id': '$crash_hash'} } ]) return len(list(unique_exploitable_crashes))
def get_crashes_of_date_for_selected_job(date, selected_job): return Crash.objects.aggregate(*[{ '$lookup': { 'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation' } }, { "$match": { "relation.name": selected_job, "date": { "$gte": date, "$lt": date + timedelta(days=1) } } }])
def calculate_number_of_unique_crashes_for_selected_job(self, selected_job): unique_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { "$match": {"relation.name": selected_job} }, { "$group": {"_id": "$crash_hash"} } ]) return len(list(unique_crashes))
def calculate_different_crash_signals_for_selected_job(self, selected_job): different_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { '$match': {'relation.name': selected_job} }, { '$group': {'_id': '$crash_signal', 'quantity': {'$sum': 1}} } ]) different_crashes_with_quantity = {} for different_crash in different_crashes: different_crashes_with_quantity[different_crash['_id']] = different_crash['quantity'] return different_crashes_with_quantity
def calculate_different_crash_signals_for_selected_job(self, selected_job): different_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { "$match": {"relation.name": selected_job} }, { "$group": {"_id": "$crash_signal", "quantity": {"$sum": 1}} } ]) different_crashes_with_quantity = {} for different_crash in different_crashes: different_crashes_with_quantity[different_crash["_id"]] = different_crash["quantity"] return different_crashes_with_quantity
def calculate_all_crashes_per_time_interval_for_selected_job(self, selected_job): all_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { '$match': {'relation.name': selected_job} }, { '$sort': {'date': 1} }, { '$project': {'date': 1, 'iteration': 1} } ]) all_crashes_per_time_interval, iterations_per_time_interval = self.calculate_crashes_and_iterations_per_time_interval_for_selected_job(list(all_crashes)) return all_crashes_per_time_interval, iterations_per_time_interval
def calculate_all_unique_crashes_per_time_interval_for_selected_job(self, selected_job): all_unique_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { "$match": {"relation.name": selected_job} }, { "$group": {"_id": "$crash_hash", "date": {"$min": "$date"}} }, { "$sort": {"date": 1} } ]) all_unique_crashes_per_time_interval = self.calculate_crashes_per_time_interval(list(all_unique_crashes)) return all_unique_crashes_per_time_interval
def calculate_last_24_hours_crashes_per_time_interval_for_selected_job(self, selected_job): last_24_hours_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { '$match': {'relation.name': selected_job, 'date': {'$gte': self.date_now - timedelta(days=1)}} }, { '$sort': {'date': 1} }, { '$project': {'date': 1, 'iteration': 1} } ]) last_24_hours_crashes = list(last_24_hours_crashes) self.crash_counter -= len(last_24_hours_crashes) last_24_hours_crashes_per_time_interval, last_24_hours_iterations_per_time_interval = self.calculate_crashes_and_iterations_per_time_interval_for_selected_job(last_24_hours_crashes) return last_24_hours_crashes_per_time_interval, last_24_hours_iterations_per_time_interval
def calculate_last_24_hours_unique_crashes_per_time_interval_for_selected_job(self, selected_job): last_24_hours_unique_crashes = Crash.objects.aggregate(*[ { '$lookup': {'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation'} }, { "$match": {"relation.name": selected_job, "date": {"$gte": self.date_now - timedelta(days=1)}} }, { "$group": {"_id": "$crash_hash", "date": {"$min": "$date"}} }, { "$sort": {"date": 1} } ]) last_24_hours_unique_crashes = list(last_24_hours_unique_crashes) self.crash_counter -= len(last_24_hours_unique_crashes) last_24_hours_unique_crashes_per_time_interval = self.calculate_crashes_per_time_interval(last_24_hours_unique_crashes) return last_24_hours_unique_crashes_per_time_interval
def get_unique_exploitable_crashes_of_date_for_selected_job( date, selected_job): unique_exploitable_crashes = Crash.objects.aggregate(*[{ '$lookup': { 'from': Job._get_collection_name(), 'localField': 'job_id', 'foreignField': '_id', 'as': 'relation' } }, { "$match": { "relation.name": selected_job, "exploitability": "EXPLOITABLE", "date": { "$gte": date, "$lt": date + timedelta(days=1) } } }, { "$group": { "_id": "$crash_hash" } }]) return unique_exploitable_crashes
def get_job_name_of_job_id(job_id): return list(Job.objects(id=job_id))[0]["name"]
def _get_active_jobs(self): res = Job.objects(enabled=True) return res
def testcase_can_be_diffed(job_id): mutation_engine = list(Job.objects(id=job_id))[0]["mutation_engine"] if mutation_engine == "radamsa": return True else: return False