Beispiel #1
0
    def createReport(self, index, z, point):
        """Create report file.

        Prints error when report txt file cannot be opened for writing.

        """
        try:
            try:
                # python 3.x
                report = open(self.reportFileName, 'w', encoding='utf-8')
            except:
                # python 2.x
                report = codecs.open(self.reportFileName,
                                     'w',
                                     encoding='utf-8')
        except IOError as e:
            self.computeMessage.emit(
                u'Error', u'Unable open {} for writing. Reason: {}'.format(
                    self.reportFileName, e), 'CRITICAL')
            return

        if index == 0:
            report_text = (u'/{}BQM2'.format(z),
                           u'/MGRS:{}'.format(point, ls=os.linesep))
        elif index == 1:
            report_text = (u'/{}CGH'.format(z),
                           u'/MGRS:{}'.format(point, ls=os.linesep))

        for line in report_text:
            report.write(line)

        report.close()
Beispiel #2
0
def decompile(path):
	"""
	Converts DEX to JAR(containing class files) and then class files to near original java code using 3 different decompilers and selecting the best available decompiled code
	"""
	common.pathToDEX = path
	pathToDex2jar = common.rootDir + "/lib/dex2jar/dex2jar.sh"
	sp = subprocess.Popen([pathToDex2jar, common.pathToDEX], shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
	output, error = sp.communicate()
	common.pathToJar = common.pathToDEX.rsplit(".",1)[0] + "_dex2jar.jar"
	dirname, extension = common.pathToJar.rsplit(".",1)
	zf = zipfile.ZipFile(common.pathToJar)

	#Total number of class files that need to be decompiled
	total_files = len(zf.namelist())
	report.write("totalfiles", total_files)
	common.count = len([s for s in zf.namelist() if ((".class" in s) and ("$" not in s))])

	pub.subscribe(decompiler_update, 'decompile')

	thread0 = Process(name='clear', target=clear, args = ())
	thread1 = Process(name='jdcore', target=jdcore, args = (zf.filename,dirname))
	thread2 = Process(name='procyon', target=cfr, args = (zf.filename,dirname))
	thread3 = Process(name='cfr', target=procyon, args = (zf.filename,dirname))

	thread0.start()
	thread0.join()

	progressbar1.start()
	progressbar2.start()
	progressbar3.start()


	thread1.start()
	thread2.start()
	thread3.start()
	thread1.join(0)
	thread2.join(0)
	thread3.join(0)

	with common.term.cbreak():
		val = None
		while val not in (u'c', u'C'):
			with common.term.location(0,common.term.height-3):
				print "Decompilation may hang/take too long (usually happens when the source is obfuscated)."
				print "At any time," + common.term.bold_underline_red_on_white('Press C to continue') + " and QARK will attempt to run SCA on whatever was decompiled."
				val = common.term.inkey(timeout=1)
				if not (thread1.is_alive() or thread2.is_alive() or thread3.is_alive()):
					break

	if thread1.is_alive():
		thread1.terminate()
	if thread2.is_alive():
		thread2.terminate()
	if thread3.is_alive():
		thread3.terminate()

	#Go back to the bottom of the screen
	with common.term.location(0,common.term.height):
		print ""

	g1 = grep_1(dirname, "// Byte code:")
	g2 = grep_1(dirname+"1", "// This method has failed to decompile.")
	g3 = grep_1(dirname+"2", "// This method could not be decompiled.")

	#print list(set(g1) - set(g2))
	logger.info("Trying to improve accuracy of the decompiled files")
	restored = 0
	try:
		for filename in g1:
			relative_filename = str(filename).split(dirname)[1]
			if any(relative_filename in s for s in g2):
				if any(relative_filename in s for s in g3):
					logger.debug("Failed to reconstruct: " + relative_filename)
				else:
					shutil.copy(dirname+"2"+relative_filename, filename)
					restored = restored +1
			else:
				shutil.copy(dirname+"1"+relative_filename, filename)
				restored = restored +1
	except Exception as e:
		print e.message
	report.write("restorestats","Restored " + str(restored) + " file(s) out of " + str(len(g1)) + " corrupt file(s)")
	logger.info("Restored " + str(restored) + " file(s) out of " + str(len(g1)) + " corrupt file(s)")
	logger.debug("Deleting redundant decompiled files")
	try:
		shutil.rmtree(dirname+"1")
		logger.debug("Deleted " + dirname+"1")
		shutil.rmtree(dirname+"2")
		logger.debug("Deleted " + dirname+"2")
	except Exception as e:
		logger.debug("Unable to delete redundant decompiled files (no impact on scan results): " + str(e))
Beispiel #3
0
    for N in Nlist:
        W = dense_graph_random(N, dtype)
        searcher = pkg.dense_graph_bf_searcher(W, dtype=dtype)
        result = benchmark.search(searcher)
        results.append((N, ) + result)

    return results


def run_bipartite_graph_benchmark(Nlist, dtype, pkg):
    results = []
    for N in Nlist:
        b0, b1, W = bipartite_graph_random(N / 2, N / 2, dtype)
        searcher = pkg.bipartite_graph_bf_searcher(b0, b1, W, dtype=dtype)
        result = benchmark.search(searcher)
        results.append((N, ) + result)

    return results


if __name__ == '__main__':
    # Nlist = [ 8, 16, 20, 24, 28, 32, 36 ]
    benchmark.duration = 10.
    Nlist = [8, 16, 20]

    results = run_dense_graph_benchmark(Nlist, np.float32, sq.cpu)
    report.write('dense_graph_bf_searcher.csv', results)

    results = run_bipartite_graph_benchmark(Nlist, np.float32, sq.cpu)
    report.write('bipartite_graph_bf_searcher.csv', results)
Beispiel #4
0
        elif k == "trace":
            trace.enabled = eval(v)
        elif k == "dir":
            output = path(v)
        elif k == "file":
            res_fname = path(v)
        elif k == "series":
            series_fname = path(v)
        else:
            args[k] = eval(v)
    except:
        pass # none = arg


m = model(**args)
m.run()

r = m.calc_results()

#print results
report.printr(r)
report.write(r, output/res_fname)
report.write_series(m, output/series_fname)

#from guppy import hpy
#h = hpy()
#print h.heap()


                  
Beispiel #5
0
def decompile(path):
    """
	Converts DEX to JAR(containing class files) and then class files to near original java code using 3 different decompilers and selecting the best available decompiled code
	"""
    common.pathToDEX = path
    pathToDex2jar = common.rootDir + "/lib/dex2jar/dex2jar.sh"
    sp = subprocess.Popen([pathToDex2jar, common.pathToDEX],
                          shell=False,
                          stdout=subprocess.PIPE,
                          stderr=subprocess.STDOUT)
    output, error = sp.communicate()
    common.pathToJar = common.pathToDEX.rsplit(".", 1)[0] + "_dex2jar.jar"
    dirname, extension = common.pathToJar.rsplit(".", 1)
    zf = zipfile.ZipFile(common.pathToJar)

    #Total number of class files that need to be decompiled
    total_files = len(zf.namelist())
    report.write("totalfiles", total_files)
    common.count = len(
        [s for s in zf.namelist() if ((".class" in s) and ("$" not in s))])

    pub.subscribe(decompiler_update, 'decompile')

    thread0 = Process(name='clear', target=clear, args=())
    thread1 = Process(name='jdcore',
                      target=jdcore,
                      args=(zf.filename, dirname))
    thread2 = Process(name='procyon', target=cfr, args=(zf.filename, dirname))
    thread3 = Process(name='cfr', target=procyon, args=(zf.filename, dirname))

    thread0.start()
    thread0.join()

    progressbar1.start()
    progressbar2.start()
    progressbar3.start()

    thread1.start()
    thread2.start()
    thread3.start()
    thread1.join(0)
    thread2.join(0)
    thread3.join(0)

    with common.term.cbreak():
        val = None
        while val not in (u'c', u'C'):
            with common.term.location(0, common.term.height - 3):
                print "Decompilation may hang/take too long (usually happens when the source is obfuscated)."
                print "At any time," + common.term.bold_underline_red_on_white(
                    'Press C to continue'
                ) + " and QARK will attempt to run SCA on whatever was decompiled."
                val = common.term.inkey(timeout=1)
                if not (thread1.is_alive() or thread2.is_alive()
                        or thread3.is_alive()):
                    break

    if thread1.is_alive():
        thread1.terminate()
    if thread2.is_alive():
        thread2.terminate()
    if thread3.is_alive():
        thread3.terminate()

    #Go back to the bottom of the screen
    with common.term.location(0, common.term.height):
        print ""

    g1 = grep_1(dirname, "// Byte code:")
    g2 = grep_1(dirname + "1", "// This method has failed to decompile.")
    g3 = grep_1(dirname + "2", "// This method could not be decompiled.")

    #print list(set(g1) - set(g2))
    logger.info("Trying to improve accuracy of the decompiled files")
    restored = 0
    try:
        for filename in g1:
            relative_filename = str(filename).split(dirname)[1]
            if any(relative_filename in s for s in g2):
                if any(relative_filename in s for s in g3):
                    logger.debug("Failed to reconstruct: " + relative_filename)
                else:
                    shutil.copy(dirname + "2" + relative_filename, filename)
                    restored = restored + 1
            else:
                shutil.copy(dirname + "1" + relative_filename, filename)
                restored = restored + 1
    except Exception as e:
        print e.message
    report.write(
        "restorestats", "Restored " + str(restored) + " file(s) out of " +
        str(len(g1)) + " corrupt file(s)")
    logger.info("Restored " + str(restored) + " file(s) out of " +
                str(len(g1)) + " corrupt file(s)")
    logger.debug("Deleting redundant decompiled files")
    try:
        shutil.rmtree(dirname + "1")
        logger.debug("Deleted " + dirname + "1")
        shutil.rmtree(dirname + "2")
        logger.debug("Deleted " + dirname + "2")
    except Exception as e:
        logger.debug(
            "Unable to delete redundant decompiled files (no impact on scan results): "
            + str(e))
    chars_counter += len(news_text)

    orig_path = get_path(news_date, title, 'txt', version='original_text')
    text_lemma_path = get_path(news_date, title, 'txt', version='lemm_text')
    xml_path = get_path(news_date, title, 'xml', version='xml')
    html_path = get_path(news_date, title, 'html', version='html')

    # сохраняем текст статьи, но пока без шапки, чтобы mystem не работал с
    # лишними данными
    save(news_text, orig_path)

    # необходимо для рекурсивного создания дирректорий, в ином случае данные
    # лемматизации некуда будет сохранять
    save('', text_lemma_path)
    save('', xml_path)

    lemmatisation(orig_path, xml_path, 'xml')
    lemmatisation(orig_path, text_lemma_path, 'text')

    # пересохраняем текст статьи уже с шапкой, т.к. лемматизация окончена
    text = get_original_text(news_text, title, news_date, news_link, author)
    save(text, orig_path)

    # сохранчем html страницу
    save(raw_news_page.decode('utf8'), html_path)

    # добавляем данные о новости в отчёт
    report.write(orig_path, title, news_date, news_link, author)


Beispiel #7
0
def main(args):
    # Load binary and source flow graphs
    exe = fparser.Executable(args.bin_json,
                             args.dwarf_json,
                             args.src_csv,
                             args.optime_csv,
                             simplify=(not args.no_simplify))
    log.debug("Optime_csv={}".format(args.optime_csv))

    ##########################
    # Load external loop info
    ##########################
    annot_file = None
    if args.annot_file is not None:
        with open(args.annot_file, 'r') as fp:
            annot_file = json.load(fp)
        log.info("External annot file loaded.")
        log.info(json.dumps(annot_file, indent=4))

    #########################
    # Process flows pairwise
    #########################
    time_all_begin = time.time()
    funcs_all = set()
    funcs_mapped = dict()
    n_tot = n_grp = n_prec = 0
    for bFlow, sFlow in exe.get_flow_pairs():
        funcs_all.add(bFlow.name)
        log.debug("Mapping {} ({}) to {} ({}).".format(bFlow.name, bFlow.file,
                                                       sFlow.name, sFlow.file))
        if args.render_graphs:
            do_render_flows(bFlow=bFlow, sFlow=sFlow)

        annot_func = None
        if annot_file is not None and bFlow.name in annot_file:
            annot_func = annot_file[bFlow.name]

        ##########
        # mapping
        ##########
        try:
            full_map, rpt = do_mapping(bFlow=bFlow,
                                       sFlow=sFlow,
                                       annot_func=annot_func,
                                       hom_order=args.hom_order,
                                       mapper_name=args.mapper,
                                       do_render=args.render_graphs,
                                       trust_dbg=args.trust_dbg_info)
            funcs_mapped[bFlow.name] = full_map
            precise_map = _get_last_precise_map(full_map)
            stats = precise_map.calc_statistics()
            if stats is not None:
                n_grp += stats.data.get('graphs', 0)
                n_tot += stats.data.get('total', 0)
                n_prec += stats.data.get('mapped', 0)
        except AssertionError:
            full_map = rpt = None
            log.error("Failed to match flow {}.".format(bFlow.name),
                      exc_info=True)
            # exit(2)

        ##########
        # outputs
        ##########
        # writes the mapping CSV and JSON reports
        if full_map:
            report.write(bFlow=bFlow,
                         sFlow=sFlow,
                         reportdata=rpt,
                         mapping=full_map)

            if args.render_graphs:
                full_map.name = bFlow.name
                try:
                    do_render_mapping(bFlow=bFlow,
                                      sFlow=sFlow,
                                      annot_file=annot_file,
                                      hierarchical_map=full_map)
                except AssertionError:
                    log.warning("Error during rendering of mapping {}".format(
                        full_map.name))
                    import traceback
                    traceback.print_exc()

    #####################
    # Overall Statistics
    #####################
    funcs_unmapped = funcs_all.difference(set(funcs_mapped.keys()))
    time_all_end = time.time()
    percent_precise = (100. * n_prec) / n_tot if n_tot > 0 else 100.
    log.info("Statistics: {} subgraphs with {} nodes, {:.2f}% precise".format(
        n_grp, n_tot, percent_precise))
    log.info("Mapped {} out of {} functions in {:.2f} seconds".format(
        len(funcs_mapped), len(funcs_all), time_all_end - time_all_begin))
    if len(funcs_unmapped) > 0:
        log.warning("Unmapped functions: {}".format(", ".join(
            sorted(list(funcs_unmapped)))))
    # --
    return len(funcs_unmapped)