def run(self, extra_args, binary): in_file = self.resolve_path(common.get(self.input, 0)) in_file_no_ext = common.stripext(in_file) file_name = os.path.basename(in_file_no_ext) file_size = os.path.getsize(in_file) tmp_file = self.resolve_path(common.get(self.output, 0) + '.asm') parser = argparse.ArgumentParser(prog='rodata.py', add_help=False) parser.add_argument('--includes', help='module\'s addincls', nargs='*', required=False) args = parser.parse_args(extra_args) self._incl_dirs = args.includes with open(tmp_file, 'w') as f: f.write('global ' + self._prefix + file_name + '\n') f.write('global ' + self._prefix + file_name + 'Size' + '\n') f.write('SECTION .rodata ALIGN=16\n') f.write(self._prefix + file_name + ':\nincbin "' + in_file + '"\n') f.write('align 4, db 0\n') f.write(self._prefix + file_name + 'Size:\ndd ' + str(file_size) + '\n') if self._fmt.startswith('elf'): f.write('size ' + self._prefix + file_name + ' ' + str(file_size) + '\n') f.write('size ' + self._prefix + file_name + 'Size 4\n') return self.do_run(binary, tmp_file)
def run(self, interpeter): self.call(interpeter + [ self.resolve_path(common.get(self.input, 0)), self.resolve_path(common.get(self.input, 1)), self.resolve_path(common.get(self.input, 2)), 'dontuse' ], stdout=common.get(self.output, 0))
def run(self, binary): in_file = self.resolve_path(common.get(self.input, 0)) in_file_no_ext = common.stripext(in_file) file_name = os.path.basename(in_file_no_ext) tmp_file = self.resolve_path(common.get(self.output, 0) + '.asm') with open(tmp_file, 'w') as f: f.write('global ' + self._prefix + file_name + '\n') f.write('global ' + self._prefix + file_name + 'Size' + '\n') f.write('SECTION .rodata\n') f.write(self._prefix + file_name + ':\nincbin "' + in_file + '"\n') f.write(self._prefix + file_name + 'Size:\ndd ' + str(os.path.getsize(in_file)) + '\n') return self.do_run(binary, tmp_file)
def run_with_map_size_and_proportion_of_capacity(map_size, proportion_of_capacity): database, database_location = create_database(map_size) available_size = calculate_largest_possible_entry(database, map_size) content = bytearray(int(available_size * proportion_of_capacity)) iterations = 20 for i in range(iterations): try: put(EXAMPLE_KEY, content, database) print("Put content of size equal to %d%% actual capacity (%d bytes) in a database of %d bytes" % ((100 * proportion_of_capacity), available_size, map_size)) except Exception as e: print("Exception adding content on iteration %d: %s" % (i + 1, e)) break try: deleted = delete(EXAMPLE_KEY, database) assert deleted assert get(EXAMPLE_KEY, database) is None print("Content deleted successfully (iteration %d/%d)" % (i + 1, iterations)) except Exception as e: print("Exception deleting content on iteration %d: %s" % (i + 1, e)) break shutil.rmtree(database_location)
def do_run(self, binary, path): def incls(): for x in self._incl_dirs: yield '-I' yield self.resolve_path(x) output_dir = os.path.dirname(self.resolve_path(common.get(self.output, 0))) cmd = common.get_interpreter_path() + ['$S/build/scripts/stdout2stderr.py', binary, '--cpp'] + list(incls()) + ['-o', output_dir, path] self.call(cmd)
def run(self, extra_args, omniidl, omnicpp): out = get(self.output, 0) self.call([omniidl, '-Y', omnicpp] + self._flags + [self._path], cwd=os.path.dirname(out)) if self._genh: self.call(['mv', self._prefix + 'DynSK.cc', self._prefix + 'DynSK.h']) self.call(['mv', self._prefix + 'SK.cc', self._prefix + 'SK.h'])
def run(self, omniidl, omnicpp): out = get(self.output, 0) self.call([omniidl, '-Y', omnicpp] + self._flags + [self._path], cwd=os.path.dirname(out)) if self._genh: self.call(['mv', self._prefix + 'DynSK.cc', self._prefix + 'DynSK.h']) self.call(['mv', self._prefix + 'SK.cc', self._prefix + 'SK.h'])
def do_run(self, binary, path): self._incl_dirs = [ '-I' + self.resolve_path(x) for x in self._incl_dirs ] self.call([binary] + self._flags + [ '-o', self.resolve_path(common.get(self.output, 0)), '-outdir', self.resolve_path(self._bindir) ] + self._incl_dirs + [self.resolve_path(path)])
def can_delete_entries(map_size, size_buffer, files_to_write_initially, files_to_write_after_delete): database, database_location = create_database(map_size) largest_entry_size = calculate_largest_possible_entry(database, map_size) available_size = largest_entry_size - size_buffer assert available_size > 0 add_files_with_total_size(files_to_write_initially, available_size, database) for i in range(files_to_write_initially): assert get("%s_%d" % (EXAMPLE_KEY, i), database) is not None delete("%s_%d" % (EXAMPLE_KEY, i), database) assert get("%s_%d" % (EXAMPLE_KEY, i), database) is None try: add_files_with_total_size(files_to_write_after_delete, available_size, database) return True, largest_entry_size except MapFullError as e: return False, largest_entry_size finally: database.close() shutil.rmtree(database_location)
def run(self, binary): in_file = self.resolve_path(common.get(self.input, 0)) in_file_no_ext = common.stripext(in_file) file_name = os.path.basename(in_file_no_ext) file_size = os.path.getsize(in_file) tmp_file = self.resolve_path(common.get(self.output, 0) + '.asm') with open(tmp_file, 'w') as f: f.write('global ' + self._prefix + file_name + '\n') f.write('global ' + self._prefix + file_name + 'Size' + '\n') f.write('SECTION .rodata ALIGN=16\n') f.write(self._prefix + file_name + ':\nincbin "' + in_file + '"\n') f.write('align 4, db 0\n') f.write(self._prefix + file_name + 'Size:\ndd ' + str(file_size) + '\n') if self._fmt.startswith('elf'): f.write('size ' + self._prefix + file_name + ' ' + str(file_size) + '\n') f.write('size ' + self._prefix + file_name + 'Size 4\n') return self.do_run(binary, tmp_file)
def do_run(self, binary, path): def incls(): for x in self._incl_dirs: yield '-I' yield self.resolve_path(x) output_dir = os.path.dirname( self.resolve_path(common.get(self.output, 0))) cmd = common.get_interpreter_path() + [ '$S/build/scripts/stdout2stderr.py', binary, '--cpp' ] + list(incls()) + ['-o', output_dir, path] self.call(cmd)
def do_run(self, binary, path): def incls(): for x in self._incl_dirs: yield '-I' yield self.resolve_path(x) output_dir = os.path.dirname(self.resolve_path(common.get(self.output, 0))) cmd = (common.get_interpreter_path() + ['$S/build/scripts/stdout2stderr.py', binary, '--cpp', '--keep-prefix', '--gen-mutable', '--schema', '-b'] + self.extra_arguments() + list(incls()) + ['-o', output_dir, path]) self.call(cmd)
def run_with_map_size(map_size): database, database_location = create_database(map_size) available_size = calculate_usable_bytes(database, map_size / 50) for i in range(100): key = str(i) print("Starting %s" % i) put(key, bytearray(available_size), database) Thread(target=paused_get, args=(key, database)).start() sleep(0.01) deleted = delete(key, database) assert deleted assert get(key, database) is None
def do_run(self, binary, path): def plt(): for x in self._platform: yield '-D' yield x def incls(): for x in self._incl_dirs: yield '-I' yield self.resolve_path(x) cmd = [binary, '-f', self._fmt] + list(plt()) + [ '-D', '_' + self._type + '_', '-D_YASM_' ] + self._flags + list( incls()) + ['-o', common.get(self.output, 0), path] self.call(cmd)
def do_run(self, binary, path): self._incl_dirs = ['-I' + self.resolve_path(x) for x in self._incl_dirs] self.call([binary] + self._flags + ['-o', self.resolve_path(common.get(self.output, 0)), '-outdir', self.resolve_path(self._bindir)] + self._incl_dirs + [self.resolve_path(path)])
def do_run(self, binary, path): def plt(): for x in self._platform: yield '-D' yield x def incls(): for x in self._incl_dirs: yield '-I' yield self.resolve_path(x) cmd = [binary, '-f', self._fmt] + list(plt()) + ['-D', '_' + self._type + '_', '-D_YASM_'] + self._flags + list(incls()) + ['-o', common.get(self.output, 0), path] self.call(cmd)
def run(self, interpeter): self.call(interpeter + [self.resolve_path(common.get(self.input, 0)), self.resolve_path(common.get(self.input, 1)), self.resolve_path(common.get(self.input, 2)), 'dontuse'], stdout=common.get(self.output, 0))