Exemplo n.º 1
0
def lambda_handler(event, context):
    user_str = 'aaa'
    exe_file = 'hello'
    try:
        # API Gateway GET method
        if event['httpMethod'] == 'GET':
            user_str = event['queryStringParameters']['str']
            exe_file = event['queryStringParameters']['bin']
        # API Gateway POST method
        elif event['httpMethod'] == 'POST':
            data = json.loads(event['body'])
            user_str = data['str']
            exe_file = data['bin']
    except KeyError:
        # direct invocation
        user_str = event['str']
        exe_file = event['bin']

    user_str = '\"' + user_str + '\"'
    exe = Executable('executables/' + exe_file)
    result = exe.run(user_str)
    print('OUT: {}\nERR: {}\nRET: {}'.format(exe.stdout, exe.stderr,
                                             exe.returncode))
    out = {
        "headers": {
            "content-type": "application/json",
            "Access-Control-Allow-Origin": "*"
        },
        "body": exe.stdout,
        "statusCode": 200
    }
    return out
Exemplo n.º 2
0
def hatanaka_decompress(local_file):
    """Hatanaka decompresses a local file using the CRX2RNX program
    Outputs data to new file with correct name under same directory as input

    Input:
        local_file  path to Hatanaka compressed RINEX file

    Returns:
        new_name    name of created decompressed RINEX file
    """
    # Check if CRX2RNX is in /tmp - where Lambda instances are throttled
    if os.path.isfile('/tmp/CRX2RNX'):
        CRX2RNX = Executable('/tmp/CRX2RNX', True)

    else:
        CRX2RNX = Executable('lib/executables/CRX2RNX')

    rinex_data = CRX2RNX.run('{} -'.format(local_file))

    if CRX2RNX.returncode > 0:
        raise Exception('CRX2RNX failed with error code {}: {}'.format(
            CRX2RNX.returncode, CRX2RNX.stderr))

    # RINEX 3 file extension changes from crx to rnx when decompressed
    new_name = local_file.replace('.crx', '.rnx')
    
    # Hatanaka compressed RINEX 2 files are suffixed with d, replace with o
    if new_name == local_file:
        new_name = local_file[:-1] + 'o'

    with open(new_name, 'w') as out_file:
        out_file.write(rinex_data)

    return new_name
Exemplo n.º 3
0
	def __init__(self, network_interface, src_ip, target_ip):
		Observable.__init__(self)
		Executable.__init__(self, self.ping_executable)
		self.network_interface = network_interface
		self.src_ip = src_ip
		self.target_ip = target_ip
		print "# Setting up loop for pinging " + target_ip + " (with " + src_ip + " on " + network_interface + ") "
Exemplo n.º 4
0
	def __init__(self, network_interface, src_ip, target_ip):
		Observable.__init__(self)
		Executable.__init__(self, self.ping_executable)
		self.network_interface = network_interface
		self.src_ip = src_ip
		self.target_ip = target_ip
		print "# Setting up loop for pinging " + target_ip + " (with " + src_ip + " on " + network_interface + ") "
Exemplo n.º 5
0
def get_executable(f):
    if Executable.isElf(f):
        return ElfExecutable(f)
    elif Executable.isMacho(f):
        # return MachoExecutable(f)
        return None
    else:
        return None
Exemplo n.º 6
0
Arquivo: app.py Projeto: dekior/disasm
def get_executable(f):
    if Executable.isElf(f):
        return ElfExecutable(f)
    elif Executable.isMacho(f):
        # return MachoExecutable(f)
        return None
    else:
        return None
Exemplo n.º 7
0
 def it_has_sections(self):
     executable = Executable()
     text = executable.new_section(".text")
     executable |should| have(1).sections
     data = executable.new_section(".data")
     executable |should| have(2).sections
     executable.sections |should| include(text)
     executable.sections |should| include(data)
Exemplo n.º 8
0
class TestRandomIndexing(TestCase):
    def setUp(self):
        self.exec_var = Executable()
        self.file_path = ["../latimes/la010189"]

    def test_exec_random_indexing(self):
        self.exec_var = Executable()
        self.file_path = ["../latimes/la010189"]
        self.exec_var.indexing(self.file_path)
        self.exec_var.random_indexing("washington", 10)
Exemplo n.º 9
0
 def __extract_command(self, name, dictionary):
     command = dictionary.pop(name, None)
     if command is not None:
         self[name] = Executable(command=command.get('command'),
                                 options=command.get('options', []),
                                 arguments=command.get('arguments'),
                                 filters=command.get('filters'))
Exemplo n.º 10
0
def main():
    current_app.logger.info("Received request")
    msg = '---HEADERS---\n{}\n--BODY--\n{}\n-----\n'.format(
        request.headers, request.get_data())
    current_app.logger.info(msg)

    # Get request body
    reqStr = request.get_data().decode("utf-8")
    current_app.logger.info(reqStr)

    # Call executable binary
    exe = Executable(os.path.join(MYPATH, 'hello'))
    result = exe.run(reqStr)
    current_app.logger.info('OUT: {}\nERR: {}\nRET: {}'.format(
        exe.stdout, exe.stderr, exe.returncode))

    return result, 200
Exemplo n.º 11
0
def lambda_handler(event, context):
    # Get the file object and bucket names from the event
    bucket = event['Records'][0]['s3']['bucket']['name']
    key = urllib.unquote_plus(
        event['Records'][0]['s3']['object']['key']).decode('utf8')

    print('Quality Check: Key {}'.format(key))

    status, file_type, data_type, year, day = key.split('/')[:5]

    if data_type == 'nav':
        nav_file = os.path.basename(key)
        if nav_file[:4].lower() == 'brdc' and nav_file[-4:] == 'n.gz':
            triggerQCFromNav(year, day, context.function_name, bucket)

        else:
            print('Do not Quality Check using non-Broadcast Navigation data')

        return

    # Use AWS request ID from context object for unique directory
    session_id = context.aws_request_id
    local_path = '/tmp/{}'.format(session_id)

    if not os.path.exists(local_path):
        os.makedirs(local_path)

    try:
        response = S3.get_object(Bucket=bucket, Key=key)

    except Exception as err:
        # This should only fail more than once if permissions are incorrect
        print('Error: Failed to get object {} from bucket {}.'.format(
            key, bucket))
        raise err

    # Decompress Observation file and store locally
    filename, extension = os.path.splitext(os.path.basename(key))
    local_file = os.path.join(local_path, filename)

    file_data = zlib.decompress(response['Body'].read(), 15+32)
    with open(local_file, 'wb') as out_file:
        out_file.write(file_data)

    # Parse RINEX file
    rinex_obs = RINEXData(local_file)

    # Attempt to get Broadcast Navigation file from archive
    nav_file = getBRDCNavFile(bucket, rinex_obs.start_time, local_path)
    if nav_file == None:
        print('Daily BRDC file does not yet exist for {}/{}'.format(
            year, day))
        return

    # Hatanaka decompress RINEX file if needed 
    if rinex_obs.compressed == True:
        rinex_obs.local_file = hatanaka_decompress(rinex_obs.local_file)

    # Generate an Anubis XML config file
    anubis_config, result_file = generateQCConfig(
        rinex_obs, nav_file, local_path)

    # Run Anubis with the generated config file as input
    anubis = Executable('lib/executables/anubis-2.0.1')
    anubis_log = anubis.run('-x {}'.format(anubis_config))
    if anubis.returncode > 0:
        print('Anubis errored with return code {}: {}\n{}'.format(
            anubis.returncode, anubis.stderr, anubis.stdout))
        return

    # Parse results of Anubis
    parseQCResult(result_file, key)

    # Delete tmp working space and Anubis copy to resolve Lambda disk 
    # space allocation issue
    shutil.rmtree(local_path)

    return
Exemplo n.º 12
0
	def __init__(self, network_interface, src_ip, target_ip, mac_address):
		Executable.__init__(self, self.arp_executable)
		self.network_interface = network_interface
		self.src_ip = src_ip
		self.target_ip = target_ip
		self.mac_address = mac_address
Exemplo n.º 13
0
 def setUp(self):
     self.exec_var = Executable()
     self.file_path = ["../latimes/la010189"]
Exemplo n.º 14
0
	def execute(self):		 
		print "INTERFACE: " + self.network_interface + " SRC: " + self.target_ip + " Target: " + self.target_ip + " MAC: " + self.mac_address
		Executable.execute(self, self.network_interface, self.target_ip, self.target_ip, self.mac_address )
Exemplo n.º 15
0
class ExecutableTests(unittest.TestCase):
    def setUp(self):
        self.command_mock = CommandMock.create()
        self.executable = Executable()

    def test_execute_stores_the_command(self):
        self.executable.execute(self.command_mock)
        self.assertEqual(self.executable.command, self.command_mock)

    def test_execute_calls_display_on_the_command_supplied_as_argument(self):
        self.executable.execute(self.command_mock)
        self.command_mock.display.assert_called_with(self.executable.items(),
                                                     self.executable.callback)

    def test_execute_calls_display_on_the_stored_command_if_None_is_passed(
            self):
        self.executable.command = self.command_mock
        self.executable.execute()
        self.command_mock.display.assert_called_with(self.executable.items(),
                                                     self.executable.callback)

    def test_reexecute_calls_execute_only_if_renavigate_is_set_on_the_comand(
            self):
        self.executable.execute = MagicMock()
        self.executable.command = self.command_mock
        self.executable.reexecute()
        self.executable.command.renavigate = False
        self.executable.reexecute()
        self.assertEqual(self.executable.execute.call_count, 1)
Exemplo n.º 16
0
 def setUp(self):
     self.command_mock = CommandMock.create()
     self.executable = Executable()
Exemplo n.º 17
0
class Handler:
    def __init__(self, builder):
        self.builder = builder

    indexation_parameters = {
        "has_stemming": False,
        "has_stop_words_removal": False,
        "has_compression": False,
        "files_list": [],
        "ignore_case": False,
        "date_weight": 1,
        "title_weight": 1,
        "use_weights": False,
        "memory_limit": 50
    }

    query_parameters = {
        "algorithm": "NAIVE",
        "results_number": 5,
        "query": "",
        "similar_words_number": 5
    }

    backend = Executable()

    @staticmethod
    def get_filelist_from_folderpath(folderpath):
        filenameslist = []

        # Get all file names in folder at first level only
        for (dirpath, dirnames, filenames) in walk(folderpath):
            filenameslist.extend(filenames)
            break

        # Concatenate them with folder path
        filelist = [folderpath + "/" + filename for filename in filenameslist]

        return filelist

    def get_document_from_DOCID_and_filepath(docid, filepath):
        documentViews = []

        with open(filepath) as file:
            document_string = ""
            for line in file:
                document_string = "{}\n{}".format(document_string, line)
                if "</DOC>" in line:
                    documentView = et.fromstring(document_string)

                    for el in documentView.findall('DOC'):
                        print('-------------------')
                        for ch in el.getchildren():
                            print('{:>15}: {:<30}'.format(ch.tag, ch.text))

                    documentViews.append(documentView)
                    document_string = ""

    def toggle_stemming(self, button):
        print("Toggle stemming to " + str(button.get_active()))
        self.indexation_parameters["has_stemming"] = button.get_active()

    def toggle_stop_words_removal(self, button):
        print("Toggle stop words removal to " + str(button.get_active()))
        self.indexation_parameters[
            "has_stop_words_removal"] = button.get_active()

    def toggle_ignore_case(self, button):
        print("Toggle ignore case to " + str(button.get_active()))
        self.indexation_parameters["ignore_case"] = button.get_active()

    def toggle_compression(self, button):
        print("Toggle compression to " + str(button.get_active()))
        self.indexation_parameters["has_compression"] = button.get_active()

    def memory_limit_changed(self, spinbutton):
        print("Memory Limit changed to " + str(spinbutton.get_value_as_int()))
        self.indexation_parameters[
            "memory_limit"] = spinbutton.get_value_as_int()

    def toggle_weights_use(self, button):
        print("Toggle weights use to " + str(button.get_active()))
        self.indexation_parameters["use_weights"] = button.get_active()

        title_weight_grid = self.builder.get_object("title_weight_grid")
        title_weight_grid.set_visible(button.get_active())

        date_weight_grid = self.builder.get_object("date_weight_grid")
        date_weight_grid.set_visible(button.get_active())

    def title_weight_changed(self, spinbutton):
        print("Title weight changed to " + str(spinbutton.get_value_as_int()))
        self.indexation_parameters[
            "title_weight"] = spinbutton.get_value_as_int()

    def date_weight_changed(self, spinbutton):
        print("Date weight changed to " + str(spinbutton.get_value_as_int()))
        self.indexation_parameters[
            "date_weight"] = spinbutton.get_value_as_int()

    def open_file_chooser(self, button):
        print("Open file chooser")
        main_window = self.builder.get_object("main_window")
        dialog = Gtk.FileChooserDialog(
            "Please choose a folder", main_window,
            Gtk.FileChooserAction.SELECT_FOLDER,
            (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, "Select",
             Gtk.ResponseType.OK))
        dialog.set_default_size(800, 400)

        response = dialog.run()
        if response == Gtk.ResponseType.OK:
            folder_path = dialog.get_filename()
            print("Select clicked")
            print("Folder selected: " + folder_path)
            choose_folder_label = self.builder.get_object(
                "choose_folder_label")
            choose_folder_label.set_text(folder_path)

            # Get all files in that folder
            self.indexation_parameters[
                "files_list"] = self.get_filelist_from_folderpath(folder_path)
            print(self.indexation_parameters["files_list"])

        elif response == Gtk.ResponseType.CANCEL:
            print("Cancel clicked")

        dialog.destroy()

    def call_backend_indexation(self):
        print("Indexation started !")
        self.backend.indexing(
            files=self.indexation_parameters["files_list"],
            ignore_stop_words=self.
            indexation_parameters["has_stop_words_removal"],
            stemming=self.indexation_parameters["has_stemming"],
            use_vbytes=self.indexation_parameters["has_compression"],
            ignore_case=self.indexation_parameters["ignore_case"],
            date_weight=self.indexation_parameters["date_weight"],
            title_weight=self.indexation_parameters["title_weight"],
            use_weights=self.indexation_parameters["use_weights"],
            memory_limit=self.indexation_parameters["memory_limit"])
        GLib.idle_add(self.on_indexation_complete)

    def start_indexation(self, button):
        print("Start indexation")

        if len(self.indexation_parameters["files_list"]) <= 0:
            print("No file has been specified !")
            return

        button.set_sensitive(False)

        loading_box = self.builder.get_object("loading_box")
        loading_box.set_visible(True)

        indexation_statistics_box = self.builder.get_object(
            "indexation_statistics_box")
        query_box = self.builder.get_object("query_box")
        results_box = self.builder.get_object("results_box")
        similar_words_box = self.builder.get_object("similar_words_box")
        similar_words_box.set_visible(False)
        indexation_statistics_box.set_visible(False)
        query_box.set_visible(False)
        results_box.set_visible(False)

        thread = threading.Thread(target=self.call_backend_indexation)
        thread.daemon = True
        thread.start()

    def on_indexation_complete(self):
        print("Indexation complete !")

        # When indexation is finished Change this to get vocabulary from inverted file
        # Eventualy manage exception if vocabular inexistent
        vocabulary = self.backend.inv_file.get_terms()

        liststore = Gtk.ListStore(str)
        for s in vocabulary:
            liststore.append([s])

        completion = Gtk.EntryCompletion()
        completion.set_model(liststore)
        completion.set_text_column(0)

        entry = self.builder.get_object("search_entry")
        entry.set_completion(completion)

        loading_box = self.builder.get_object("loading_box")
        indexation_statistics_box = self.builder.get_object(
            "indexation_statistics_box")
        query_box = self.builder.get_object("query_box")
        start_indexation_button = self.builder.get_object(
            "start_indexation_button")

        loading_box.set_visible(False)

        indexation_stats = StatsControl.last_indexing()

        indexation_start_time_tofill = self.builder.get_object(
            "indexation_start_time_tofill")
        indexation_start_time_tofill.set_text("{:%H:%M:%S.%f}".format(
            indexation_stats.start_time))

        indexation_end_time_tofill = self.builder.get_object(
            "indexation_end_time_tofill")
        indexation_end_time_tofill.set_text("{:%H:%M:%S.%f}".format(
            indexation_stats.finish_time))

        indexation_total_time_tofill = self.builder.get_object(
            "indexation_total_time_tofill")
        indexation_total_time_tofill.set_text("{}".format(
            indexation_stats.total_time))

        indexation_file_size_tofill = self.builder.get_object(
            "indexation_file_size_tofill")
        indexation_file_size_tofill.set_text(str(indexation_stats.file_size))

        indexation_statistics_box.set_visible(True)

        query_box.set_visible(True)

        start_indexation_button.set_sensitive(True)

    def algo_combo_changed(self, combobox):
        print("Algo combo changed to " + combobox.get_active_text())
        self.query_parameters["algorithm"] = combobox.get_active_text()

    def results_number_changed(self, spinbutton):
        print("Results number changed to " +
              str(spinbutton.get_value_as_int()))
        self.query_parameters["results_number"] = spinbutton.get_value_as_int()

    def similar_words_number_changed(self, spinbutton):
        print("Similar words number changed to " +
              str(spinbutton.get_value_as_int()))
        self.query_parameters[
            "similar_words_number"] = spinbutton.get_value_as_int()

    def search_changed(self, searchentry):
        query = searchentry.get_text()
        print("Search changed to " + query)
        self.query_parameters["query"] = query

        start_query_button = self.builder.get_object("start_query_button")
        display_similar_words_button = self.builder.get_object(
            "display_similar_words_button")

        if query == "":
            start_query_button.set_sensitive(False)
            display_similar_words_button.set_sensitive(False)
        elif len(query.split()) == 1:
            start_query_button.set_sensitive(True)
            display_similar_words_button.set_sensitive(True)
        else:
            start_query_button.set_sensitive(True)
            display_similar_words_button.set_sensitive(False)

    def call_backend_query(self):
        print("Query started !")

        results = self.backend.query(
            query=self.query_parameters["query"],
            algorithm=self.query_parameters["algorithm"],
            number_of_results=self.query_parameters["results_number"])
        GLib.idle_add(self.on_query_complete, results)

    def start_query(self, button):
        print("Start query")

        if self.query_parameters["query"] == "":
            print("No query has been specified !")
            return

        button.set_sensitive(False)

        loading_box = self.builder.get_object("loading_box")
        loading_box.set_visible(True)

        print("Dict : {}".format(self.query_parameters))

        thread = threading.Thread(target=self.call_backend_query)
        thread.daemon = True
        thread.start()

    def on_query_complete(self, results):
        print("Query complete !")

        query_stats = StatsControl.last_query()

        loading_box = self.builder.get_object("loading_box")
        loading_box.set_visible(False)

        start_time_tofill = self.builder.get_object("start_time_tofill")
        start_time_tofill.set_text("{:%H:%M:%S.%f}".format(
            query_stats.start_time))

        end_time_tofill = self.builder.get_object("end_time_tofill")
        end_time_tofill.set_text("{:%H:%M:%S.%f}".format(
            query_stats.finish_time))

        total_time_tofill = self.builder.get_object("total_time_tofill")
        total_time_tofill.set_text("{}".format(query_stats.total_time))

        pl_accesses_tofill = self.builder.get_object("pl_accesses_tofill")
        pl_accesses_tofill.set_text(str(query_stats.pl_accesses))

        disk_accesses_tofill = self.builder.get_object("disk_accesses_tofill")
        disk_accesses_tofill.set_text(str(query_stats.memory_accesses))

        results_text = "\t Score     |\tDOCID   |\t   File path \n"
        for result in results:
            results_text += ("\t{:8.5f} |\t{:8} |\t{}".format(
                result[1], result[0], result[2])) + "\n"

        print("results" + results_text)

        results_textview = self.builder.get_object("results_textview")
        results_textview_buffer = results_textview.get_buffer()
        results_textview_buffer.set_text(results_text)

        results_box = self.builder.get_object("results_box")
        results_box.set_visible(True)

        start_query_button = self.builder.get_object("start_query_button")
        start_query_button.set_sensitive(True)

    def call_backend_similar_search(self):
        print("Similar search started !")

        results = self.backend.random_indexing(
            choice_key=self.query_parameters["query"],
            top_results=self.query_parameters["similar_words_number"])
        GLib.idle_add(self.on_similar_search_complete, results)

    def start_similar_search(self, button):
        print("Start query")

        query = self.query_parameters["query"]

        # Just in case, check if the query is only a single word, otherwise random indexing querying doesn't work
        if len(query.split()) > 1 or query == "":
            print("Can only find similar words to a single word !")
            return

        button.set_sensitive(False)

        similar_words_box = self.builder.get_object("similar_words_box")
        similar_words_box.set_visible(False)

        thread = threading.Thread(target=self.call_backend_similar_search)
        thread.daemon = True
        thread.start()

    def on_similar_search_complete(self, results):
        print("Query complete !")

        display_similar_words_button = self.builder.get_object(
            "display_similar_words_button")
        display_similar_words_button.set_sensitive(True)

        results_text = str(results)

        similar_words_textview = self.builder.get_object(
            "similar_words_textview")
        similar_words_textview_buffer = similar_words_textview.get_buffer()
        similar_words_textview_buffer.set_text(results_text)

        similar_words_box = self.builder.get_object("similar_words_box")
        similar_words_box.set_visible(True)
Exemplo n.º 18
0
 def __init__(self, network_interface, src_ip, target_ip):
     Executable.__init__(self, self.arp_executable)
     self.network_interface = network_interface
     self.src_ip = src_ip
     self.target_ip = target_ip
     print "# Setting up loop for arp spoofing " + target_ip + " (with " + src_ip + " on " + network_interface + ") "
Exemplo n.º 19
0
 def __init__(self, binary_filename, arch_obj):
     self.binary = Executable(binary_filename, arch_obj)
     self.arch = arch_obj
Exemplo n.º 20
0
    filenameslist = []

    # Get all file names in folder at first level only
    for (dirpath, dirnames, filenames) in walk(folderpath):
        filenameslist.extend(filenames)
        break

    # Concatenate them with folder path
    filelist = [folderpath + "/" + filename for filename in filenameslist]

    return filelist


file_paths = get_filelist_from_folderpath("latests")

exe = Executable()

algorithm = DEFAULT_ALGORITHM
number_of_results = DEFAULT_NUMBER_OF_RESULTS

memorylimit = 200

exe.indexing(file_paths, memory_limit=memorylimit)

print(SC.last_indexing())

try:
    in_res = int(input("Number of results desired ? ").strip())
    number_of_results = in_res
except ValueError:
    print("Non-int value entered using default {}".format(
 def setUp(self):
     self.command_mock = CommandMock.create()
     self.executable = Executable()
Exemplo n.º 22
0
MYPATH = os.path.dirname(os.path.realpath(__file__))


def main():
    current_app.logger.info("Received request")
    msg = '---HEADERS---\n{}\n--BODY--\n{}\n-----\n'.format(
        request.headers, request.get_data())
    current_app.logger.info(msg)

    # Get request body
    reqStr = request.get_data().decode("utf-8")
    current_app.logger.info(reqStr)

    # Call executable binary
    exe = Executable(os.path.join(MYPATH, 'hello'))
    result = exe.run(reqStr)
    current_app.logger.info('OUT: {}\nERR: {}\nRET: {}'.format(
        exe.stdout, exe.stderr, exe.returncode))

    return result, 200


if __name__ == "__main__":
    reqStr = "haha"
    exe = Executable('./hello')
    result = exe.run(reqStr)
    print('OUT: {}\nERR: {}\nRET: {}'.format(exe.stdout, exe.stderr,
                                             exe.returncode))
    print(result)
Exemplo n.º 23
0
 def test_exec_random_indexing(self):
     self.exec_var = Executable()
     self.file_path = ["../latimes/la010189"]
     self.exec_var.indexing(self.file_path)
     self.exec_var.random_indexing("washington", 10)
Exemplo n.º 24
0
	def __init__(self, network_interface, src_ip, target_ip):
		Executable.__init__(self, self.arp_executable)
		self.network_interface = network_interface
		self.src_ip = src_ip
		self.target_ip = target_ip
		print "# Setting up loop for arp spoofing " + target_ip + " (with " + src_ip + " on " + network_interface + ") "
Exemplo n.º 25
0
 def it_is_structured_text(self):
     executable = Executable()
     text = executable.new_section(".text")
     text.new_function("_start")
     str(executable) |should| equal_to("section .text\n\n_start:\n\n\tret")
Exemplo n.º 26
0
class Patcher(object):
    def __init__(self, binary_filename, arch_obj):
        self.binary = Executable(binary_filename, arch_obj)
        self.arch = arch_obj

    def pad_nops(self, data, padding):
        '''
        param data: the original data to be padded
        param padding: modulu for len(data) to be devide by
        returns: the original data with nops after it,
                 so the bytes count will devide by padding
        notes: currently there is a support in one byte nop only
        '''
        nop = self.arch.get_nop()
        nop_size = len(nop)
        data_size = len(data)
        if padding % nop_size != 0:
            raise PaddingError('padding %d not divides by nop_size (%d)' % (padding, nop_size))
        size_to_pad = padding - (data_size % padding)
        if size_to_pad % nop_size != 0:
            raise PaddingError('cannot pad data_size %d to %d, because size_to_pad = %d and nop_size = %d' % (
                data_size, data_size + size_to_pad, size_to_pad, nop_size))

        if data_size % padding != 0:
            data += nop * (size_to_pad / nop_size)
        #If I somehow got mistake here -
        if len(data) % padding:
            raise PaddingError('I padded it but it still not padded.. a programmer error..')
        tracer.trace('length of new data after padding is %d, new_data = %s' % (len(data), data.encode('hex')))
        return data

    def get_branch_data(self, source_address, dest_address):
        '''
        Returns: (branch_data, overriden_dat, overriden_data_disas)
        '''
        #get branch to hook_glue
        branch = self.arch.get_branch(source_address, dest_address)

        #get overriden data
        branch_size = len(branch)
        overriden_data_disas_to_compare = self.arch.disas(self.binary.filename,
                source_address, source_address + branch_size + self.arch.padding_modulu * PADDING_MULTIPLE)
        got_disas_match = False
        for i in xrange(PADDING_MULTIPLE):
            overriden_data_disas = self.arch.disas(self.binary.filename,
                    source_address, source_address + branch_size + self.arch.padding_modulu * i)
            if overriden_data_disas in overriden_data_disas_to_compare:
                got_disas_match = True
                break
        if not got_disas_match:
            raise FailedToGetDisasMatch('Failed to got disas match of overriden_data_disas in overriden_data_disas_to_compare')
        branch = branch + self.arch.get_nop() * (self.arch.padding_modulu * i)
        overriden_data = self.binary.get_data(source_address, len(branch))

        tracer.trace('branch = %s, len = %d' % (branch.encode('hex'), len(branch)))
        tracer.trace('overriden_data = %s, length = %d\n%s\n' % (overriden_data.encode('hex'),
                len(overriden_data), overriden_data_disas))

        return branch, overriden_data, overriden_data_disas
    
    def get_hook_glue_data(self, hook_exe, hook_symbol_to_jump_to, overriden_data_disas, hook_glue_address, return_address):
        '''
        Returns hook_glue
        '''
        #get hook_glue
        registers = self.arch.registers
        #dump registers
        hook_glue = self.arch.get_registers_dumper(registers)
        #nop padding
        hook_glue = self.pad_nops(hook_glue, self.arch.padding_modulu)
        #call hook_data
        hook_glue += self.arch.get_call(hook_glue_address + len(hook_glue),
                                        hook_exe.get_symbol_by_name(hook_symbol_to_jump_to).virtual_address)
        #load back registers
        hook_glue += self.arch.get_registers_loader(registers)
        #do original code
        relocated_overriden_data = self.arch.relocate(overriden_data_disas,
                    hook_glue_address + len(hook_glue))
        tracer.trace('relocated_overriden_data - %s, len - %d, address - 0x%x' % (relocated_overriden_data.encode('hex'),
                len(relocated_overriden_data), hook_glue_address + len(hook_glue)))
        hook_glue += relocated_overriden_data
        #jump back after hook
        hook_glue += self.arch.get_branch(hook_glue_address + len(hook_glue), return_address)
        tracer.trace('hook_glue: %s, len - %d, at address - 0x%x, ends at - 0x%x' % (hook_glue.encode('hex'),
                len(hook_glue), hook_glue_address, hook_glue_address + len(hook_glue)))

        return hook_glue

    def get_patch(self,
             #hook
             hook_address,
             #hook glue
             hook_glue_address,
             #hook data
             hook_filename,
             hook_symbols_to_paste,
             hook_symbol_to_jump_to=None):
        '''
        This function will patch self.binary, it will generate a branch to a hook glue:
        a place where registers backup, jump to your hook code, load registers load, hook-overriden code and jump back after hook.
        param hook_address: the branch address in the target executable to jump from
        param hook_glue_address: where the hook glue will be
        param hook_symbol_to_jump_to: the name of symbol in hook_filename to jump to
        param hook_filename: your hook code file name
        param hook_symbols_to_paste: which symbols from your hook code elf will be injected
        return value: patch_table - a list of PatchEntry's
        '''

        #preparations
        if hook_symbol_to_jump_to is None:
            hook_symbol_to_jump_to = 'main'
        tracer.trace('Hooking %s at 0x%x with %s, hook_glue at 0x%x, hook_symbol_to_jump_to is %s' % (
            self.binary.filename, hook_address, hook_filename, hook_glue_address, hook_symbol_to_jump_to))
        hook_exe = Executable(hook_filename, self.arch)

        branch, overriden_data, overriden_data_disas = self.get_branch_data(hook_address, hook_glue_address)
        hook_glue = self.get_hook_glue_data(hook_exe,
                                            hook_symbol_to_jump_to,
                                            overriden_data_disas,
                                            hook_glue_address,
                                            hook_address + len(branch))

        #prepare patch table
        patch_table = []

        for sym in hook_exe.symbols:
            if sym.name in hook_symbols_to_paste:
                patch_table.append(PatchEntry(sym.virtual_address, sym.data, 'hook_symbol_%s' % (sym.name),
                                              self.binary.get_data(sym.virtual_address, len(sym.data)), 'hook symbol'))

        patch_table.append(PatchEntry(hook_glue_address, hook_glue, 'hook_glue', self.binary.get_data(hook_glue_address, len(hook_glue)),
                                      'hook glue'))

        patch_table.append(PatchEntry(hook_address, branch, 'branch', overriden_data, 'branch to hook_glue'))

        return patch_table

    def write_binary_to_file(self, output_filepath):
        with file(output_filepath, 'wb') as f:
            f.write(self.binary.build())

    def patch_binary(self, patch_table, output_filepath):
        '''
        This function will apply patch table on self.binary and will save it to output_filepath
        Note that this function will change self.binary
        if you want to undo the operation of this function - call undo_patch_binary()
        param patch_table: a list of PatchEntry's
        param output_filepath: a filepath to write the patched binary to
        '''
        for patch_entry in patch_table:
            tracer.trace('pasting patch %s' % (str(patch_entry)))
            self.binary.set_data(patch_entry.virtual_address, patch_entry.data)

        self.write_binary_to_file(output_filepath)

    def create_undo_table(self, patch_table):
        '''
        patch_entry.original_data will be patch data,
        and patch_entry.data will be original_data
        '''
        undo_table = copy.deepcopy(patch_table)
        for undo_entry in undo_table:
            undo_entry.original_data, undo_entry.data = undo_entry.data, undo_entry.original_data
        return undo_table

    def undo_patch_binary(self, patch_table):
        '''
        See doc of patch_binary
        '''
        for undo_entry in self.create_undo_table(patch_table):
            tracer.trace('binary undo %s' % (str(undo_entry)))
            self.binary.set_data(undo_entry.virtual_address, undo_entry.data)

    def hot_patch(self, patch_table, hot_patcher, should_read_original_data=True):
        '''
        This function will apply patch table by hot patching with a hot_patcher object.
        Note that this function will change a running thing (can be a process or what-ever you need it to be),
        if you want to undo the operation of this function - call undo_hot_patch()
        Note2: if you want to hot patch slowly then pass a partial patch_table (the same for undo_hot_patch)
        param patch_table: a list of PatchEntry's
        param hot_patcher: an object of a class that inherits HotPatcher (which is abstract class)
        param should_read_original_data: if you pass True - the original data will be read before patching
        '''
        for patch_entry in patch_table:
            tracer.trace('hot patch %s' % (str(patch_entry)))
            if verify_original_data:
                tracer.trace('reading original data')
                patch_entry.original_data = hot_patcher.read(patch_entry.virtual_address, patch_entry.size)
            hot_patcher.write(patch_entry.virtual_address, patch_entry.data)

    def undo_hot_patch(self, patch_table, hot_patcher):
        for undo_entry in self.create_undo_table(patch_table):
            tracer.trace('hot undo %s' % (str(undo_entry)))
            hot_patcher.write(undo_entry.virtual_address, undo_entry.data)
class ExecutableTests(unittest.TestCase):
    def setUp(self):
        self.command_mock = CommandMock.create()
        self.executable = Executable()

    def test_execute_stores_the_command(self):
        self.executable.execute(self.command_mock)
        self.assertEqual(self.executable.command, self.command_mock)

    def test_execute_calls_display_on_the_command_supplied_as_argument(self):
        self.executable.execute(self.command_mock)
        self.command_mock.display.assert_called_with(self.executable.items(), self.executable.callback)

    def test_execute_calls_display_on_the_stored_command_if_None_is_passed(self):
        self.executable.command = self.command_mock
        self.executable.execute()
        self.command_mock.display.assert_called_with(self.executable.items(), self.executable.callback)

    def test_reexecute_calls_execute_only_if_renavigate_is_set_on_the_comand(self):
        self.executable.execute = MagicMock()
        self.executable.command = self.command_mock
        self.executable.reexecute()
        self.executable.command.renavigate = False
        self.executable.reexecute()
        self.assertEqual(self.executable.execute.call_count, 1)