DEC_CODE_DIR = "decompiled_code" ABSTRACTED_DEC_CODE_DIR = "abstracted_decompiled_code" MAPPING_NAME = "mapping.json" FUNC_NAME_BLACKLIST = ["start"] COMPILE_DB_DIR = "/home/kylebot/Desktop/courses/CSE576/datasets/compiled_bin_dataset/compile-db" ##################### INIT ########################### src_code_dir = None abstracted_code_dir = None dec_code_dir = None mapping_path = None id_cnt = 0 mapping = {} clang.cindex.Config.set_library_path('/usr/local/lib/') logger = new_logger("Dataset") logger.setLevel("INFO") handler = logging.FileHandler("log.txt", mode='a', delay=False) handler.setLevel("DEBUG") handler.setFormatter(logger_formatter) logger.addHandler(handler) def abstract_code(code, build_dir=None, build_src_path=None): try: c_tokenizer = CTokenizer(build_dir=build_dir, build_src_path=build_src_path) tokens = c_tokenizer.tokenize(code) except Exception: c_tokenizer = CTokenizer() tokens = c_tokenizer.tokenize(code) return c_tokenizer.detokenize(tokens)
""" cloud master for parallel processing """ import os import json import logging from threading import Lock import colorlog from flask import Flask, request from utils import new_logger from shared_func import get_all_items, process_task_result, populate #####------initialization-----##### logger = new_logger("MASTER") logger.setLevel("INFO") app = Flask(__name__) app.extensions["lock"] = Lock() app.extensions["all_tasks"] = [] app.extensions["processing_tasks"] = {} app.extensions["completed_tasks"] = [] app.extensions["errored_tasks"] = [] app.extensions["next_task_id"] = 0 app.extensions["missed_tasks"] = [] #####------ views----------------------##### @app.route('/') def index():
# Python 3.5.2 :: Anaconda 4.2.0 (x86_64) # Python 3.7.4 import pdb from abc import ABC, abstractmethod import sys import argparse from inspect import cleandoc import numpy as np from utils import new_logger log = new_logger() # TODO: implement save and load for model def new_model(): """Initialize new Keras model with fixed architecture""" from keras.models import Sequential from keras.layers import Dense from keras.optimizers import Adam from collections import deque model = Sequential() model.add(Dense(units=16, activation='relu', input_dim=9)) model.add(Dense(units=16, activation='relu')) model.add(Dense(9, activation='linear')) model.compile(loss='mse', optimizer=Adam(lr=learningrate)) return(model) def new_memory(): memory = deque(maxlen=2000) return(memory)
""" user specified functions """ import os import json import base64 import subprocess import jsonlines from placeholder import IDATokenizer from utils import new_logger, tmp_path, tmp_ida_cxt logger = new_logger("Zipper") #####--------Configuration----------###### DATASET_PATH = "/home/kylebot/Desktop/courses/CSE576/datasets/dataset_v2" #####--------MASTER FUNCTIONS-------###### def get_all_items(): """ return a list of items """ # use this code for better debugging #with open('./state.json') as f: # state = json.load(f) #links = [x[1] for x in state['targets']] pkg_paths = [ os.path.join(DATASET_PATH, x) for x in os.listdir(DATASET_PATH) ]
import sys import time import json import logging import requests import colorlog from utils import new_logger from shared_func import process_task #####---------Configuration-----------##### NO_TASK_STR = "NOTASK" #####---------Initialization----------##### logger = new_logger("SLAVE") logger.setLevel("INFO") #####---------Internal Functions-------##### def get_next_task(): """ retrieve next task """ try: r = requests.get(GET_REQ_URL) return json.loads(r.text) except Exception as e:# pylint: disable=broad-except if r.text != NO_TASK_STR: logger.exception(e) return None