def report_missing_symbols(all_implemented, pre): # the initial list of missing functions are that the user explicitly exported # but were not implemented in compiled code missing = set(shared.Settings.USER_EXPORTED_FUNCTIONS) - all_implemented for requested in sorted(missing): if (f'function {requested}(') not in pre: diagnostics.warning('undefined', f'undefined exported symbol: "{requested}"') # Special hanlding for the `_main` symbol if shared.Settings.STANDALONE_WASM: # standalone mode doesn't use main, and it always reports missing entry point at link time. # In this mode we never expect _main in the export list. return if shared.Settings.IGNORE_MISSING_MAIN: # The default mode for emscripten is to ignore the missing main function allowing # maximum compatibility. return if shared.Settings.EXPECT_MAIN and '_main' not in all_implemented: # For compatibility with the output of wasm-ld we use the same wording here in our # error message as if wasm-ld had failed (i.e. in LLD_REPORT_UNDEFINED mode). exit_with_error( 'entry symbol not defined (pass --no-entry to suppress): main')
def create_sending(invoke_funcs, metadata): em_js_funcs = set(metadata['emJsFuncs'].keys()) declares = [asmjs_mangle(d) for d in metadata['declares']] externs = [asmjs_mangle(e) for e in metadata['globalImports']] send_items = set(invoke_funcs + declares + externs) send_items.update(em_js_funcs) def fix_import_name(g): # Unlike fastcomp the wasm backend doesn't use the '_' prefix for native # symbols. Emscripten currently expects symbols to start with '_' so we # artificially add them to the output of emscripten-wasm-finalize and them # strip them again here. # note that we don't do this for EM_JS functions (which, rarely, may have # a '_' prefix) if g.startswith('_') and g not in em_js_funcs: return g[1:] return g send_items_map = OrderedDict() for name in send_items: internal_name = fix_import_name(name) if internal_name in send_items_map: exit_with_error('duplicate symbol in exports to wasm: %s', name) send_items_map[internal_name] = name add_standard_wasm_imports(send_items_map) sorted_keys = sorted(send_items_map.keys()) return '{\n ' + ',\n '.join('"' + k + '": ' + send_items_map[k] for k in sorted_keys) + '\n}'
def report_missing_symbols(js_library_funcs): # Report any symbol that was explicitly exported but is present neither # as a native function nor as a JS library function. defined_symbols = set( asmjs_mangle(e) for e in settings.WASM_EXPORTS).union(js_library_funcs) missing = set(settings.USER_EXPORTED_FUNCTIONS) - defined_symbols for symbol in sorted(missing): diagnostics.warning('undefined', f'undefined exported symbol: "{symbol}"') # Special hanlding for the `_main` symbol if settings.STANDALONE_WASM: # standalone mode doesn't use main, and it always reports missing entry point at link time. # In this mode we never expect _main in the export list. return if settings.IGNORE_MISSING_MAIN: # The default mode for emscripten is to ignore the missing main function allowing # maximum compatibility. return if settings.EXPECT_MAIN and 'main' not in settings.WASM_EXPORTS: # For compatibility with the output of wasm-ld we use the same wording here in our # error message as if wasm-ld had failed (i.e. in LLD_REPORT_UNDEFINED mode). exit_with_error( 'entry symbol not defined (pass --no-entry to suppress): main')
def report_missing_symbols(all_implemented, pre): # the initial list of missing functions are that the user explicitly exported # but were not implemented in compiled code missing = list( set(shared.Settings.USER_EXPORTED_FUNCTIONS) - all_implemented) for requested in missing: if ('function ' + asstr(requested)) in pre: continue # special-case malloc, EXPORTED by default for internal use, but we bake in a # trivial allocator and warn at runtime if used in ASSERTIONS if missing == '_malloc': continue diagnostics.warning('undefined', 'undefined exported function: "%s"', requested) # Special hanlding for the `_main` symbol if shared.Settings.STANDALONE_WASM: # standalone mode doesn't use main, and it always reports missing entry point at link time. # In this mode we never expect _main in the export list. return if shared.Settings.IGNORE_MISSING_MAIN: # The default mode for emscripten is to ignore the missing main function allowing # maximum compatibility. return if shared.Settings.EXPECT_MAIN and '_main' not in all_implemented: # For compatibility with the output of wasm-ld we use the same wording here in our # error message as if wasm-ld had failed (i.e. in LLD_REPORT_UNDEFINED mode). exit_with_error( 'entry symbol not defined (pass --no-entry to suppress): main')
def generate_minimal_runtime_html(target, options, js_target, target_basename): logger.debug('generating HTML for minimal runtime') shell = utils.read_file(options.shell_path) if settings.SINGLE_FILE: # No extra files needed to download in a SINGLE_FILE build. shell = shell.replace('{{{ DOWNLOAD_JS_AND_WASM_FILES }}}', '') else: shell = shell.replace('{{{ DOWNLOAD_JS_AND_WASM_FILES }}}', generate_minimal_runtime_load_statement(target_basename)) temp_files = shared.configuration.get_temp_files() with temp_files.get_file(suffix='.js') as shell_temp: utils.write_file(shell_temp, shell) shell = shared.read_and_preprocess(shell_temp) if re.search(r'{{{\s*SCRIPT\s*}}}', shell): shared.exit_with_error('--shell-file "' + options.shell_path + '": MINIMAL_RUNTIME uses a different kind of HTML page shell file than the traditional runtime! Please see $EMSCRIPTEN/src/shell_minimal_runtime.html for a template to use as a basis.') shell = shell.replace('{{{ TARGET_BASENAME }}}', target_basename) shell = shell.replace('{{{ EXPORT_NAME }}}', settings.EXPORT_NAME) shell = shell.replace('{{{ PTHREAD_WORKER_FILE }}}', settings.PTHREAD_WORKER_FILE) # In SINGLE_FILE build, embed the main .js file into the .html output if settings.SINGLE_FILE: js_contents = utils.read_file(js_target) shared.try_delete(js_target) else: js_contents = '' shell = shell.replace('{{{ JS_CONTENTS_IN_SINGLE_FILE_BUILD }}}', js_contents) shell = line_endings.convert_line_endings(shell, '\n', options.output_eol) # Force UTF-8 output for consistency across platforms and with the web. with open(target, 'wb') as f: f.write(shell.encode('utf-8'))
def create_sending_wasm(invoke_funcs, forwarded_json, metadata): basic_funcs = [] if shared.Settings.SAFE_HEAP: basic_funcs += ['segfault', 'alignfault'] em_js_funcs = list(metadata['emJsFuncs'].keys()) declared_items = ['_' + item for item in metadata['declares']] send_items = set(basic_funcs + invoke_funcs + em_js_funcs + declared_items) def fix_import_name(g): if g.startswith('Math_'): return g.split('_')[1] # Unlike fastcomp the wasm backend doesn't use the '_' prefix for native # symbols. Emscripten currently expects symbols to start with '_' so we # artificially add them to the output of emscripten-wasm-finalize and them # strip them again here. # note that we don't do this for EM_JS functions (which, rarely, may have # a '_' prefix) if g.startswith('_') and g not in metadata['emJsFuncs']: return g[1:] return g send_items_map = OrderedDict() for name in send_items: internal_name = fix_import_name(name) if internal_name in send_items_map: exit_with_error('duplicate symbol in exports to wasm: %s', name) send_items_map[internal_name] = name add_standard_wasm_imports(send_items_map) sorted_keys = sorted(send_items_map.keys()) return '{ ' + ', '.join('"' + k + '": ' + send_items_map[k] for k in sorted_keys) + ' }'
def build(src, result_libs, args=[]): if not shared.Settings.WASM_OBJECT_FILES: args += ['-s', 'WASM_OBJECT_FILES=0'] if shared.Settings.RELOCATABLE: args += ['-s', 'RELOCATABLE'] # build in order to generate the libraries # do it all in a temp dir where everything will be cleaned up temp_dir = temp_files.get_dir() cpp = os.path.join(temp_dir, 'src.cpp') open(cpp, 'w').write(src) temp_js = os.path.join(temp_dir, 'out.js') if force: for lib in result_libs: shared.Cache.erase_file(lib) try: shared.Building.emcc(cpp, args, output_filename=temp_js) except subprocess.CalledProcessError as e: shared.exit_with_error("embuilder: emcc command failed with %d: '%s'", e.returncode, ' '.join(e.cmd)) for lib in result_libs: if not os.path.exists(shared.Cache.get_path(lib)): shared.exit_with_error( 'not seeing that requested library %s has been built because file %s does not exist' % (lib, shared.Cache.get_path(lib)))
def eval_ctors_wasm(js, wasm_file, num): ctors_start, ctors_end, all_ctors, ctors = find_ctors_data(js, num) cmd = [os.path.join(binaryen_bin, 'wasm-ctor-eval'), wasm_file, '-o', wasm_file, '--ctors=' + ','.join(ctors)] cmd += extra_args if debug_info: cmd += ['-g'] logging.debug('wasm ctor cmd: ' + str(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) try: err = jsrun.timeout_run(proc, timeout=10, full_output=True, throw_on_failure=False) except Exception as e: if 'Timed out' not in str(e): raise logging.debug('ctors timed out\n') return 0, js if proc.returncode != 0: shared.exit_with_error('unexpected error while trying to eval ctors:\n' + err) num_successful = err.count('success on') logging.debug(err) if len(ctors) == num_successful: new_ctors = '' else: elements = [] for ctor in all_ctors[num_successful:]: elements.append('{ func: function() { %s() } }' % ctor) new_ctors = '__ATINIT__.push(' + ', '.join(elements) + ');' js = js[:ctors_start] + new_ctors + js[ctors_end:] return num_successful, js
def create_optimizer(): shared.logging.debug('building native optimizer: ' + name) output = shared.Cache.get_path(name) shared.try_delete(output) for compiler in [ shared.CLANG_CXX, 'g++', 'clang++' ]: # try our clang first, otherwise hope for a system compiler in the path shared.logging.debug(' using ' + compiler) try: shared.run_process([ compiler, shared.path_from_root('tools', 'optimizer', 'parser.cpp'), shared.path_from_root('tools', 'optimizer', 'simple_ast.cpp'), shared.path_from_root('tools', 'optimizer', 'optimizer.cpp'), shared.path_from_root('tools', 'optimizer', 'optimizer-shared.cpp'), shared.path_from_root('tools', 'optimizer', 'optimizer-main.cpp'), '-O3', '-std=c++11', '-fno-exceptions', '-fno-rtti', '-o', output ] + args, stdout=log_output, stderr=log_output) except Exception as e: logging.debug(str(e)) continue # perhaps the later compilers will succeed # success return output shared.exit_with_error('Failed to build native optimizer')
def load_metadata_wasm(metadata_raw, DEBUG): try: metadata_json = json.loads(metadata_raw) except Exception: logger.error( 'emscript: failure to parse metadata output from wasm-emscripten-finalize. raw output is: \n' + metadata_raw) raise metadata = { 'declares': [], 'externs': [], 'staticBump': 0, 'tableSize': 0, 'exports': [], 'namedGlobals': {}, 'emJsFuncs': {}, 'asmConsts': {}, 'invokeFuncs': [], 'features': [], 'mainReadsParams': 1, } legacy_keys = set(['implementedFunctions', 'initializers', 'simd']) assert 'tableSize' in metadata_json.keys() for key, value in metadata_json.items(): if key in legacy_keys: continue if key not in metadata: exit_with_error( 'unexpected metadata key received from wasm-emscripten-finalize: %s', key) metadata[key] = value # Support older metadata when asmConsts values were lists. We only use the first element # nowadays # TODO(sbc): remove this once binaryen has been changed to only emit the single element metadata['asmConsts'] = { k: v[0] if type(v) is list else v for k, v in metadata['asmConsts'].items() } if DEBUG: logger.debug("Metadata parsed: " + pprint.pformat(metadata)) # Calculate the subset of exports that were explicitly marked with llvm.used. # These are any exports that were not requested on the command line and are # not known auto-generated system functions. unexpected_exports = [ e for e in metadata['exports'] if treat_as_user_function(e) ] unexpected_exports = [asmjs_mangle(e) for e in unexpected_exports] unexpected_exports = [ e for e in unexpected_exports if e not in shared.Settings.EXPORTED_FUNCTIONS ] building.user_requested_exports += unexpected_exports return metadata
def load_metadata_wasm(metadata_raw, DEBUG): try: metadata_json = json.loads(metadata_raw) except Exception: logger.error( 'emscript: failure to parse metadata output from wasm-emscripten-finalize. raw output is: \n' + metadata_raw) raise metadata = { 'declares': [], 'externs': [], 'staticBump': 0, 'tableSize': 0, 'exports': [], 'namedGlobals': {}, 'emJsFuncs': {}, 'asmConsts': {}, 'invokeFuncs': [], 'features': [], 'mainReadsParams': 1, } legacy_keys = set(['implementedFunctions', 'initializers', 'simd']) assert 'tableSize' in metadata_json.keys() for key, value in metadata_json.items(): if key in legacy_keys: continue # json.loads returns `unicode` for strings but other code in this file # generally works with utf8 encoded `str` objects, and they don't alwasy # mix well. e.g. s.replace(x, y) will blow up is `s` a uts8 str containing # non-ascii and either x or y are unicode objects. # TODO(sbc): Remove this encoding if we switch to unicode elsewhere # (specifically the glue returned from compile_settings) if type(value) == list: value = [asstr(v) for v in value] if key not in metadata: exit_with_error( 'unexpected metadata key received from wasm-emscripten-finalize: %s', key) metadata[key] = value if DEBUG: logger.debug("Metadata parsed: " + pprint.pformat(metadata)) # Calculate the subset of exports that were explicitly marked with llvm.used. # These are any exports that were not requested on the command line and are # not known auto-generated system functions. unexpected_exports = [ e for e in metadata['exports'] if treat_as_user_function(e) ] unexpected_exports = [asmjs_mangle(e) for e in unexpected_exports] unexpected_exports = [ e for e in unexpected_exports if e not in shared.Settings.EXPORTED_FUNCTIONS ] building.user_requested_exports += unexpected_exports return metadata
def run(): if shared.Settings.WASM_BACKEND: # The wasm backend does suffer from the same probllem as fastcomp so doesn't # need the filename hashing. cmd = [shared.LLVM_AR] + sys.argv[1:] return shared.run_process(cmd, stdin=sys.stdin, check=False).returncode try: args = substitute_response_files(sys.argv) except IOError as e: shared.exit_with_error(e) newargs = [shared.LLVM_AR] + args[1:] tmpdir = None response_filename = None # The 3 argmuent form of ar doesn't involve other files. For example # 'ar x libfoo.a'. if len(newargs) > 3: tmpdir = tempfile.mkdtemp(prefix='emar-') cmd = newargs[1] if 'r' in cmd or 'q' in cmd: # We are adding files to the archive. # Normally the output file is then arg 2, except in the case were the # a or b modifiers are used in which case its arg 3. if 'a' in cmd or 'b' in cmd: out_arg_index = 3 else: out_arg_index = 2 # Add a hash to colliding basename, to make them unique. for j in range(out_arg_index + 1, len(newargs)): orig_name = newargs[j] full_name = os.path.abspath(orig_name) basename = os.path.basename(full_name) h = hashlib.md5(full_name.encode('utf-8')).hexdigest()[:8] parts = basename.split('.') parts[0] += '_' + h newname = '.'.join(parts) full_newname = os.path.join(tmpdir, newname) shutil.copyfile(orig_name, full_newname) newargs[j] = full_newname if shared.DEBUG: print('emar:', sys.argv, ' ==> ', newargs, file=sys.stderr) response_filename = create_response_file( newargs[3:], shared.get_emscripten_temp_dir()) newargs = newargs[:3] + ['@' + response_filename] if shared.DEBUG: print('emar:', sys.argv, ' ==> ', newargs, file=sys.stderr) rtn = shared.run_process(newargs, stdin=sys.stdin, check=False).returncode if tmpdir: shutil.rmtree(tmpdir) shared.try_delete(response_filename) return rtn
def run(): try: args = substitute_response_files(sys.argv) except IOError as e: shared.exit_with_error(e) newargs = [shared.LLVM_AR] + args[1:] to_delete = [] # The 3 argmuent form of ar doesn't involve other files. For example # 'ar x libfoo.a'. if len(newargs) > 3: cmd = newargs[1] if 'r' in cmd or 'q' in cmd: # We are adding files to the archive. # Normally the output file is then arg 2, except in the case were the # a or b modifiers are used in which case its arg 3. if 'a' in cmd or 'b' in cmd: out_arg_index = 3 else: out_arg_index = 2 # Add a hash to colliding basename, to make them unique. for j in range(out_arg_index + 1, len(newargs)): orig_name = newargs[j] full_name = os.path.abspath(orig_name) dirname = os.path.dirname(full_name) basename = os.path.basename(full_name) h = hashlib.md5(full_name.encode('utf-8')).hexdigest()[:8] parts = basename.split('.') parts[0] += '_' + h newname = '.'.join(parts) full_newname = os.path.join(dirname, newname) try: shutil.copyfile(orig_name, full_newname) newargs[j] = full_newname to_delete.append(full_newname) except Exception: # it is ok to fail here, we just don't get hashing pass if shared.DEBUG: print('emar:', sys.argv, ' ==> ', newargs, file=sys.stderr) response_filename = create_response_file( newargs[3:], shared.get_emscripten_temp_dir()) to_delete += [response_filename] newargs = newargs[:3] + ['@' + response_filename] if shared.DEBUG: print('emar:', sys.argv, ' ==> ', newargs, file=sys.stderr) rtn = shared.run_process(newargs, stdin=sys.stdin, check=False).returncode for d in to_delete: shared.try_delete(d) return rtn
def c_to_s(c): if c == 'WASM_RT_I32': return 'i' elif c == 'WASM_RT_I64': return 'j' elif c == 'WASM_RT_F32': return 'f' elif c == 'WASM_RT_F64': return 'd' else: exit_with_error('invalid wasm2c type element:' + str(c))
def s_to_c(s): if s == 'v': return 'void' elif s == 'i': return 'u32' elif s == 'j': return 'u64' elif s == 'f': return 'f32' elif s == 'd': return 'f64' else: exit_with_error('invalid sig element:' + str(s))
def build(src, result_libs, args=[]): # build in order to generate the libraries # do it all in a temp dir where everything will be cleaned up temp_dir = temp_files.get_dir() cpp = os.path.join(temp_dir, 'src.cpp') open(cpp, 'w').write(src) temp_js = os.path.join(temp_dir, 'out.js') shared.Building.emcc(cpp, args, output_filename=temp_js) # verify if not os.path.exists(temp_js): shared.exit_with_error('failed to build file') for lib in result_libs: if not os.path.exists(shared.Cache.get_path(lib)): shared.exit_with_error( 'not seeing that requested library %s has been built because file %s does not exist' % (lib, shared.Cache.get_path(lib)))
def __init__(self): # Note: if RELOCATABLE, then only relative sizes can be computed, and we don't # actually write out any absolute memory locations ({{{ STACK_BASE }}} # does not exist, etc.) # Memory layout: # * first the static globals self.global_base = shared.Settings.GLOBAL_BASE self.static_bump = shared.Settings.STATIC_BUMP # * then the stack (up on fastcomp, down on upstream) self.stack_low = align_memory(self.global_base + self.static_bump) self.stack_high = align_memory(self.stack_low + shared.Settings.TOTAL_STACK) self.stack_base = self.stack_high self.stack_max = self.stack_low # * then dynamic memory begins self.dynamic_base = align_memory(self.stack_high) if self.dynamic_base >= shared.Settings.INITIAL_MEMORY: exit_with_error('Memory is not large enough for static data (%d) plus the stack (%d), please increase INITIAL_MEMORY (%d) to at least %d' % (self.static_bump, shared.Settings.TOTAL_STACK, shared.Settings.INITIAL_MEMORY, self.dynamic_base))
def read_ports(): expected_attrs = ['get', 'clear', 'process_args', 'show', 'needed'] for filename in os.listdir(ports_dir): if not filename.endswith('.py') or filename == '__init__.py': continue filename = os.path.splitext(filename)[0] port = __import__(filename, globals(), level=1) ports.append(port) port.name = filename ports_by_name[port.name] = port for a in expected_attrs: assert hasattr(port, a), 'port %s is missing %s' % (port, a) if not hasattr(port, 'process_dependencies'): port.process_dependencies = lambda x: 0 if not hasattr(port, 'deps'): port.deps = [] for dep in port.deps: if dep not in ports_by_name: exit_with_error('unknown dependency in port: %s' % dep)
def generate_minimal_runtime_html(target, options, js_target, target_basename, asm_target, wasm_binary_target, memfile, optimizer): logger.debug('generating HTML for minimal runtime') shell = open(options.shell_path, 'r').read() shell = shell.replace('{{{ DOWNLOAD_JS_AND_WASM_FILES }}}', generate_minimal_runtime_load_statement(target_basename)) temp_files = shared.configuration.get_temp_files() with temp_files.get_file(suffix='.js') as shell_temp: open(shell_temp, 'w').write(shell) shell = shared.read_and_preprocess(shell_temp) if re.search(r'{{{\s*SCRIPT\s*}}}', shell): shared.exit_with_error('--shell-file "' + options.shell_path + '": MINIMAL_RUNTIME uses a different kind of HTML page shell file than the traditional runtime! Please see $EMSCRIPTEN/src/shell_minimal_runtime.html for a template to use as a basis.') shell = shell.replace('{{{ TARGET_BASENAME }}}', target_basename) shell = shell.replace('{{{ EXPORT_NAME }}}', shared.Settings.EXPORT_NAME) shell = line_endings.convert_line_endings(shell, '\n', options.output_eol) with open(target, 'wb') as f: f.write(shared.asbytes(shell))
def main(): global force parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog=get_help()) parser.add_argument('--lto', action='store_true', help='build bitcode object for LTO') parser.add_argument( '--pic', action='store_true', help='build relocatable objects for suitable for dynamic linking') parser.add_argument('--force', action='store_true', help='force rebuild of target (by removing it first)') parser.add_argument('operation', help='currently only "build" is supported') parser.add_argument('targets', nargs='+', help='see below') args = parser.parse_args() if args.operation != 'build': shared.exit_with_error('unfamiliar operation: ' + args.operation) # process flags # Check sanity so that if settings file has changed, the cache is cleared here. # Otherwise, the cache will clear in an emcc process, which is invoked while building # a system library into the cache, causing trouble. shared.check_sanity() if args.lto: shared.Settings.LTO = "full" # Reconfigure the cache dir to reflect the change shared.reconfigure_cache() if args.pic: shared.Settings.RELOCATABLE = 1 # Reconfigure the cache dir to reflect the change shared.reconfigure_cache() if args.force: force = True # process tasks libname = system_libs.Ports.get_lib_name auto_tasks = False tasks = args.targets if 'SYSTEM' in tasks: tasks = SYSTEM_TASKS auto_tasks = True elif 'USER' in tasks: tasks = USER_TASKS auto_tasks = True elif 'MINIMAL' in tasks: tasks = MINIMAL_TASKS auto_tasks = True elif 'ALL' in tasks: tasks = SYSTEM_TASKS + USER_TASKS auto_tasks = True if auto_tasks: if shared.Settings.WASM_BACKEND: skip_tasks = [] if shared.Settings.RELOCATABLE: # we don't support PIC + pthreads yet for task in SYSTEM_TASKS + USER_TASKS: if '-mt' in task: skip_tasks.append(task) if 'pthread' in task and 'stub' not in task: skip_tasks.append(task) print( 'Skipping building of %s, because we don\'t support threads and PIC code.' % ', '.join(skip_tasks)) # cocos2d: must be ported, errors on # "Cannot recognize the target platform; are you targeting an unsupported platform?" skip_tasks += ['cocos2d'] tasks = [x for x in tasks if x not in skip_tasks] else: if os.environ.get('EMSCRIPTEN_NATIVE_OPTIMIZER'): print( 'Skipping building of native-optimizer; EMSCRIPTEN_NATIVE_OPTIMIZER is environment.' ) elif shared.EMSCRIPTEN_NATIVE_OPTIMIZER: print( 'Skipping building of native-optimizer; EMSCRIPTEN_NATIVE_OPTIMIZER set in .emscripten config.' ) else: tasks += ['native_optimizer'] print('Building targets: %s' % ' '.join(tasks)) for what in tasks: logger.info('building and verifying ' + what) if what in SYSTEM_LIBRARIES: library = SYSTEM_LIBRARIES[what] if force: library.erase() library.get_path() elif what == 'struct_info': if force: shared.Cache.erase_file('generated_struct_info.json') emscripten.generate_struct_info() elif what == 'native_optimizer': if force: shared.Cache.erase_file('optimizer.2.exe') js_optimizer.get_native_optimizer() elif what == 'icu': build_port('icu', libname('libicuuc')) elif what == 'zlib': shared.Settings.USE_ZLIB = 1 build_port('zlib', 'libz.a') shared.Settings.USE_ZLIB = 0 elif what == 'bzip2': build_port('bzip2', 'libbz2.a') elif what == 'bullet': build_port('bullet', libname('libbullet')) elif what == 'vorbis': build_port('vorbis', libname('libvorbis')) elif what == 'ogg': build_port('ogg', libname('libogg')) elif what == 'libjpeg': build_port('libjpeg', libname('libjpeg')) elif what == 'libpng': build_port('libpng', libname('libpng')) elif what == 'sdl2': build_port('sdl2', libname('libSDL2')) elif what == 'sdl2-mt': shared.Settings.USE_PTHREADS = 1 build_port('sdl2', libname('libSDL2-mt')) shared.Settings.USE_PTHREADS = 0 elif what == 'sdl2-gfx': build_port('sdl2_gfx', libname('libSDL2_gfx')) elif what == 'sdl2-image': build_port('sdl2_image', libname('libSDL2_image')) elif what == 'sdl2-image-png': shared.Settings.SDL2_IMAGE_FORMATS = ["png"] build_port('sdl2_image', libname('libSDL2_image_png')) shared.Settings.SDL2_IMAGE_FORMATS = [] elif what == 'sdl2-image-jpg': shared.Settings.SDL2_IMAGE_FORMATS = ["jpg"] build_port('sdl2_image', libname('libSDL2_image_jpg')) shared.Settings.SDL2_IMAGE_FORMATS = [] elif what == 'sdl2-net': build_port('sdl2_net', libname('libSDL2_net')) elif what == 'sdl2-mixer': old_formats = shared.Settings.SDL2_MIXER_FORMATS shared.Settings.SDL2_MIXER_FORMATS = [] build_port('sdl2_mixer', libname('libSDL2_mixer')) shared.Settings.SDL2_MIXER_FORMATS = old_formats elif what == 'sdl2-mixer-ogg': old_formats = shared.Settings.SDL2_MIXER_FORMATS shared.Settings.SDL2_MIXER_FORMATS = ["ogg"] build_port('sdl2_mixer', libname('libSDL2_mixer_ogg')) shared.Settings.SDL2_MIXER_FORMATS = old_formats elif what == 'sdl2-mixer-mp3': old_formats = shared.Settings.SDL2_MIXER_FORMATS shared.Settings.SDL2_MIXER_FORMATS = ["mp3"] build_port('sdl2_mixer', libname('libSDL2_mixer_mp3')) shared.Settings.SDL2_MIXER_FORMATS = old_formats elif what == 'freetype': build_port('freetype', 'libfreetype.a') elif what == 'harfbuzz': build_port('harfbuzz', 'libharfbuzz.a') elif what == 'harfbuzz-mt': shared.Settings.USE_PTHREADS = 1 build_port('harfbuzz', 'libharfbuzz-mt.a') shared.Settings.USE_PTHREADS = 0 elif what == 'sdl2-ttf': build_port('sdl2_ttf', libname('libSDL2_ttf')) elif what == 'cocos2d': build_port('cocos2d', libname('libcocos2d')) elif what == 'regal': build_port('regal', libname('libregal')) elif what == 'regal-mt': shared.Settings.USE_PTHREADS = 1 build_port('regal', libname('libregal-mt')) shared.Settings.USE_PTHREADS = 0 elif what == 'boost_headers': build_port('boost_headers', libname('libboost_headers')) else: logger.error('unfamiliar build target: ' + what) return 1 logger.info('...success') return 0
def main(): global force parser = argparse.ArgumentParser(description=__doc__, usage=get_usage()) parser.add_argument('--lto', action='store_true', help='build bitcode object for LTO') parser.add_argument( '--pic', action='store_true', help='build relocatable objects for suitable for dynamic linking') parser.add_argument('--force', action='store_true', help='force rebuild of target (by removing it first)') parser.add_argument('operation', help='currently only "build" is supported') parser.add_argument('targets', nargs='+', help='see above') args = parser.parse_args() if args.operation != 'build': shared.exit_with_error('unfamiliar operation: ' + args.operation) # process flags # Check sanity so that if settings file has changed, the cache is cleared here. # Otherwise, the cache will clear in an emcc process, which is invoked while building # a system library into the cache, causing trouble. shared.check_sanity() if args.lto: shared.Settings.WASM_OBJECT_FILES = 0 # Reconfigure the cache dir to reflect the change shared.reconfigure_cache() if args.pic: shared.Settings.RELOCATABLE = 1 # Reconfigure the cache dir to reflect the change shared.reconfigure_cache() if args.force: force = True # process tasks libname = shared.static_library_name auto_tasks = False tasks = args.targets if 'SYSTEM' in tasks: tasks = SYSTEM_TASKS auto_tasks = True elif 'USER' in tasks: tasks = USER_TASKS auto_tasks = True elif 'ALL' in tasks: tasks = SYSTEM_TASKS + USER_TASKS auto_tasks = True if auto_tasks: if shared.Settings.WASM_BACKEND: skip_tasks = [] if shared.Settings.RELOCATABLE: # we don't support PIC + pthreads yet skip_tasks += [ task for task in SYSTEM_TASKS + USER_TASKS if '-mt' in task or 'thread' in task ] # cocos2d: must be ported, errors on # "Cannot recognize the target platform; are you targeting an unsupported platform?" skip_tasks += ['cocos2d'] print( 'Skipping building of %s, because WebAssembly does not support pthreads.' % ', '.join(skip_tasks)) tasks = [x for x in tasks if x not in skip_tasks] else: if os.environ.get('EMSCRIPTEN_NATIVE_OPTIMIZER'): print( 'Skipping building of native-optimizer; EMSCRIPTEN_NATIVE_OPTIMIZER is environment.' ) elif shared.EMSCRIPTEN_NATIVE_OPTIMIZER: print( 'Skipping building of native-optimizer; EMSCRIPTEN_NATIVE_OPTIMIZER set in .emscripten config.' ) else: tasks += ['native_optimizer'] print('Building targets: %s' % ' '.join(tasks)) for what in tasks: logger.info('building and verifying ' + what) if what in SYSTEM_LIBRARIES: library = SYSTEM_LIBRARIES[what] if force: library.erase() library.get_path() elif what == 'struct_info': build(C_BARE, ['generated_struct_info.json']) elif what == 'native_optimizer': build(C_BARE, ['optimizer.2.exe'], ['-O2', '-s', 'WASM=0']) elif what == 'icu': build_port('icu', libname('libicuuc'), ['-s', 'USE_ICU=1'], '#include "unicode/ustring.h"') elif what == 'zlib': build_port('zlib', 'libz.a', ['-s', 'USE_ZLIB=1']) elif what == 'bzip2': build_port('bzip2', 'libbz2.a', ['-s', 'USE_BZIP2=1']) elif what == 'bullet': build_port('bullet', libname('libbullet'), ['-s', 'USE_BULLET=1']) elif what == 'vorbis': build_port('vorbis', libname('libvorbis'), ['-s', 'USE_VORBIS=1']) elif what == 'ogg': build_port('ogg', libname('libogg'), ['-s', 'USE_OGG=1']) elif what == 'libjpeg': build_port('libjpeg', libname('libjpeg'), ['-s', 'USE_LIBJPEG=1']) elif what == 'libpng': build_port('libpng', libname('libpng'), ['-s', 'USE_ZLIB=1', '-s', 'USE_LIBPNG=1']) elif what == 'sdl2': build_port('sdl2', libname('libSDL2'), ['-s', 'USE_SDL=2']) elif what == 'sdl2-mt': build_port('sdl2', libname('libSDL2-mt'), ['-s', 'USE_SDL=2', '-s', 'USE_PTHREADS=1']) elif what == 'sdl2-gfx': build_port('sdl2-gfx', libname('libSDL2_gfx'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'USE_SDL_GFX=2' ]) elif what == 'sdl2-image': build_port('sdl2-image', libname('libSDL2_image'), ['-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2']) elif what == 'sdl2-image-png': build_port('sdl2-image', libname('libSDL2_image_png'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'SDL2_IMAGE_FORMATS=["png"]' ]) elif what == 'sdl2-image-jpg': build_port('sdl2-image', libname('libSDL2_image_jpg'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'SDL2_IMAGE_FORMATS=["jpg"]' ]) elif what == 'sdl2-net': build_port('sdl2-net', libname('libSDL2_net'), ['-s', 'USE_SDL=2', '-s', 'USE_SDL_NET=2']) elif what == 'sdl2-mixer': build_port('sdl2-mixer', libname('libSDL2_mixer'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_MIXER=2', '-s', 'USE_VORBIS=1' ]) elif what == 'freetype': build_port('freetype', 'libfreetype.a', ['-s', 'USE_FREETYPE=1']) elif what == 'harfbuzz': build_port('harfbuzz', 'libharfbuzz.a', ['-s', 'USE_HARFBUZZ=1']) elif what == 'harfbuzz-mt': build_port('harfbuzz-mt', 'libharfbuzz-mt.a', ['-s', 'USE_HARFBUZZ=1', '-s', 'USE_PTHREADS=1']) elif what == 'sdl2-ttf': build_port('sdl2-ttf', libname('libSDL2_ttf'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_TTF=2', '-s', 'USE_FREETYPE=1' ]) elif what == 'binaryen': build_port('binaryen', None, ['-s', 'WASM=1']) elif what == 'cocos2d': build_port('cocos2d', libname('libcocos2d'), [ '-s', 'USE_COCOS2D=3', '-s', 'USE_ZLIB=1', '-s', 'USE_LIBPNG=1', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0' ]) elif what == 'regal': build_port('regal', libname('libregal'), ['-s', 'USE_REGAL=1']) elif what == 'regal-mt': build_port( 'regal', libname('libregal'), ['-s', 'USE_REGAL=1', '-s', 'USE_PTHREADS=1', '-pthread']) elif what == 'boost_headers': build_port('boost_headers', libname('libboost_headers'), ['-s', 'USE_BOOST_HEADERS=1']) elif what == 'libsockets': build( ''' #include <sys/socket.h> int main() { return socket(0,0,0); } ''', [libname('libsockets')]) elif what == 'libsockets_proxy': build( ''' #include <sys/socket.h> int main() { return socket(0,0,0); } ''', [libname('libsockets_proxy')], [ '-s', 'PROXY_POSIX_SOCKETS=1', '-s', 'USE_PTHREADS=1', '-s', 'PROXY_TO_PTHREAD=1' ]) else: logger.error('unfamiliar build target: ' + what) return 1 logger.info('...success') return 0
def load_metadata_wasm(metadata_raw, DEBUG): try: metadata_json = json.loads(metadata_raw) except Exception: logger.error( 'emscript: failure to parse metadata output from wasm-emscripten-finalize. raw output is: \n' + metadata_raw) raise metadata = { 'aliases': {}, 'declares': [], 'implementedFunctions': [], 'externs': [], 'simd': False, # Obsolete, always False 'maxGlobalAlign': 0, 'staticBump': 0, 'tableSize': 0, 'initializers': [], 'exports': [], 'namedGlobals': {}, 'emJsFuncs': {}, 'asmConsts': {}, 'invokeFuncs': [], 'features': [], 'mainReadsParams': 1, } assert 'tableSize' in metadata_json.keys() for key, value in metadata_json.items(): # json.loads returns `unicode` for strings but other code in this file # generally works with utf8 encoded `str` objects, and they don't alwasy # mix well. e.g. s.replace(x, y) will blow up is `s` a uts8 str containing # non-ascii and either x or y are unicode objects. # TODO(sbc): Remove this encoding if we switch to unicode elsewhere # (specifically the glue returned from compile_settings) if type(value) == list: value = [asstr(v) for v in value] if key not in metadata: exit_with_error( 'unexpected metadata key received from wasm-emscripten-finalize: %s', key) metadata[key] = value if not shared.Settings.MINIMAL_RUNTIME: # In regular runtime initializers call the global var version of the export, so they get the mangled name. # In MINIMAL_RUNTIME, the initializers are called directly off the export object for minimal code size. metadata['initializers'] = [ asmjs_mangle(i) for i in metadata['initializers'] ] if DEBUG: logger.debug("Metadata parsed: " + pprint.pformat(metadata)) # Calculate the subset of exports that were explicitly marked with llvm.used. # These are any exports that were not requested on the command line and are # not known auto-generated system functions. unexpected_exports = [ e for e in metadata['exports'] if treat_as_user_function(e) ] unexpected_exports = [asmjs_mangle(e) for e in unexpected_exports] unexpected_exports = [ e for e in unexpected_exports if e not in shared.Settings.EXPORTED_FUNCTIONS ] building.user_requested_exports += unexpected_exports # With the wasm backend the set of implemented functions is identical to the set of exports # Set this key here simply so that the shared code that handle it. metadata['implementedFunctions'] = [ asmjs_mangle(x) for x in metadata['exports'] ] return metadata
def main(): if len(sys.argv) < 2 or sys.argv[1] in [ '-v', '-help', '--help', '-?', '?' ]: print_help() return 0 operation = sys.argv[1] if operation != 'build': shared.exit_with_error('unfamiliar operation: ' + operation) # process flags args = sys.argv[2:] def is_flag(arg): return arg.startswith('--') # Check sanity so that if settings file has changed, the cache is cleared here. # Otherwise, the cache will clear in an emcc process, which is invoked while building # a system library into the cache, causing trouble. shared.check_sanity() for arg in args: if is_flag(arg): arg = arg[2:] if arg == 'lto': shared.Settings.WASM_OBJECT_FILES = 0 elif arg == 'pic': shared.Settings.RELOCATABLE = 1 # Reconfigure the cache dir to reflect the change shared.reconfigure_cache() args = [a for a in args if not is_flag(a)] # process tasks libname = shared.static_library_name auto_tasks = False tasks = args if 'SYSTEM' in tasks: tasks = SYSTEM_TASKS auto_tasks = True elif 'USER' in tasks: tasks = USER_TASKS auto_tasks = True elif 'ALL' in tasks: tasks = SYSTEM_TASKS + USER_TASKS auto_tasks = True if auto_tasks: if shared.Settings.WASM_BACKEND: skip_tasks = [] if shared.Settings.RELOCATABLE: # we don't support PIC + pthreads yet skip_tasks += [ task for task in SYSTEM_TASKS + USER_TASKS if '-mt' in task or 'thread' in task ] # cocos2d: must be ported, errors on # "Cannot recognize the target platform; are you targeting an unsupported platform?" skip_tasks += ['cocos2d'] print( 'Skipping building of %s, because WebAssembly does not support pthreads.' % ', '.join(skip_tasks)) tasks = [x for x in tasks if x not in skip_tasks] else: if os.environ.get('EMSCRIPTEN_NATIVE_OPTIMIZER'): print( 'Skipping building of native-optimizer; EMSCRIPTEN_NATIVE_OPTIMIZER is environment.' ) elif shared.EMSCRIPTEN_NATIVE_OPTIMIZER: print( 'Skipping building of native-optimizer; EMSCRIPTEN_NATIVE_OPTIMIZER set in .emscripten config.' ) else: tasks += ['native_optimizer'] print('Building targets: %s' % ' '.join(tasks)) for what in tasks: logger.info('building and verifying ' + what) if what in SYSTEM_LIBRARIES: library = SYSTEM_LIBRARIES[what] library.get_path() elif what == 'struct_info': build(C_BARE, ['generated_struct_info.json']) elif what == 'native_optimizer': build(C_BARE, ['optimizer.2.exe'], ['-O2', '-s', 'WASM=0']) elif what == 'icu': build_port('icu', libname('libicuuc'), ['-s', 'USE_ICU=1']) elif what == 'zlib': build_port('zlib', 'libz.a', ['-s', 'USE_ZLIB=1']) elif what == 'bzip2': build_port('bzip2', 'libbz2.a', ['-s', 'USE_BZIP2=1']) elif what == 'bullet': build_port('bullet', libname('libbullet'), ['-s', 'USE_BULLET=1']) elif what == 'vorbis': build_port('vorbis', libname('libvorbis'), ['-s', 'USE_VORBIS=1']) elif what == 'ogg': build_port('ogg', libname('libogg'), ['-s', 'USE_OGG=1']) elif what == 'libjpeg': build_port('libjpeg', libname('libjpeg'), ['-s', 'USE_LIBJPEG=1']) elif what == 'libpng': build_port('libpng', libname('libpng'), ['-s', 'USE_ZLIB=1', '-s', 'USE_LIBPNG=1']) elif what == 'sdl2': build_port('sdl2', libname('libSDL2'), ['-s', 'USE_SDL=2']) elif what == 'sdl2-mt': build_port('sdl2', libname('libSDL2-mt'), ['-s', 'USE_SDL=2', '-s', 'USE_PTHREADS=1']) elif what == 'sdl2-gfx': build_port('sdl2-gfx', libname('libSDL2_gfx'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'USE_SDL_GFX=2' ]) elif what == 'sdl2-image': build_port('sdl2-image', libname('libSDL2_image'), ['-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2']) elif what == 'sdl2-image-png': build_port('sdl2-image', libname('libSDL2_image'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'SDL2_IMAGE_FORMATS=["png"]' ]) elif what == 'sdl2-image-jpg': build_port('sdl2-image', libname('libSDL2_image'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'SDL2_IMAGE_FORMATS=["jpg"]' ]) elif what == 'sdl2-net': build_port('sdl2-net', libname('libSDL2_net'), ['-s', 'USE_SDL=2', '-s', 'USE_SDL_NET=2']) elif what == 'sdl2-mixer': build_port('sdl2-mixer', 'libSDL2_mixer.a', [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_MIXER=2', '-s', 'USE_VORBIS=1' ]) elif what == 'freetype': build_port('freetype', 'libfreetype.a', ['-s', 'USE_FREETYPE=1']) elif what == 'harfbuzz': build_port('harfbuzz', 'libharfbuzz.a', ['-s', 'USE_HARFBUZZ=1']) elif what == 'sdl2-ttf': build_port('sdl2-ttf', libname('libSDL2_ttf'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_TTF=2', '-s', 'USE_FREETYPE=1' ]) elif what == 'binaryen': build_port('binaryen', None, ['-s', 'WASM=1']) elif what == 'cocos2d': build_port('cocos2d', libname('libcocos2d'), [ '-s', 'USE_COCOS2D=3', '-s', 'USE_ZLIB=1', '-s', 'USE_LIBPNG=1', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0' ]) elif what == 'regal': build_port('regal', libname('libregal'), ['-s', 'USE_REGAL=1']) elif what == 'boost_headers': build_port('boost_headers', libname('libboost_headers'), ['-s', 'USE_BOOST_HEADERS=1']) else: logger.error('unfamiliar build target: ' + what) return 1 logger.info('...success') return 0
def main(): all_build_start_time = time.time() parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog=get_help()) parser.add_argument('--lto', action='store_const', const='full', help='build bitcode object for LTO') parser.add_argument('--lto=thin', dest='lto', action='store_const', const='thin', help='build bitcode object for ThinLTO') parser.add_argument( '--pic', action='store_true', help='build relocatable objects for suitable for dynamic linking') parser.add_argument('--force', action='store_true', help='force rebuild of target (by removing it first)') parser.add_argument('--verbose', action='store_true', help='show build commands') parser.add_argument('--wasm64', action='store_true', help='use wasm64 architecture') parser.add_argument( 'operation', help='currently only "build" and "clear" are supported') parser.add_argument('targets', nargs='+', help='see below') args = parser.parse_args() if args.operation not in ('build', 'clear'): shared.exit_with_error('unfamiliar operation: ' + args.operation) # process flags # Check sanity so that if settings file has changed, the cache is cleared here. # Otherwise, the cache will clear in an emcc process, which is invoked while building # a system library into the cache, causing trouble. shared.check_sanity() if args.lto: settings.LTO = args.lto if args.verbose: shared.PRINT_STAGES = True if args.pic: settings.RELOCATABLE = 1 if args.wasm64: settings.MEMORY64 = 2 MINIMAL_TASKS[:] = [t for t in MINIMAL_TASKS if 'emmalloc' not in t] do_build = args.operation == 'build' do_clear = args.operation == 'clear' if args.force: do_clear = True # process tasks auto_tasks = False tasks = args.targets system_libraries, system_tasks = get_system_tasks() if 'SYSTEM' in tasks: tasks = system_tasks auto_tasks = True elif 'USER' in tasks: tasks = PORTS auto_tasks = True elif 'MINIMAL' in tasks: tasks = MINIMAL_TASKS auto_tasks = True elif 'MINIMAL_PIC' in tasks: tasks = MINIMAL_PIC_TASKS auto_tasks = True elif 'ALL' in tasks: tasks = system_tasks + PORTS auto_tasks = True if auto_tasks: # There are some ports that we don't want to build as part # of ALL since the are not well tested or widely used: skip_tasks = ['cocos2d'] tasks = [x for x in tasks if x not in skip_tasks] print('Building targets: %s' % ' '.join(tasks)) for what in tasks: for old, new in legacy_prefixes.items(): if what.startswith(old): what = what.replace(old, new) if do_build: logger.info('building ' + what) else: logger.info('clearing ' + what) start_time = time.time() if what in system_libraries: library = system_libraries[what] if do_clear: library.erase() if do_build: library.get_path() elif what == 'sysroot': if do_clear: shared.Cache.erase_file('sysroot_install.stamp') if do_build: system_libs.ensure_sysroot() elif what == 'struct_info': if do_clear: emscripten.clear_struct_info() if do_build: emscripten.generate_struct_info() elif what in PORTS: if do_clear: clear_port(what) if do_build: build_port(what) else: logger.error('unfamiliar build target: ' + what) return 1 time_taken = time.time() - start_time logger.info( '...success. Took %s(%.2fs)' % (('%02d:%02d mins ' % (time_taken // 60, time_taken % 60) if time_taken >= 60 else ''), time_taken)) if len(tasks) > 1: all_build_time_taken = time.time() - all_build_start_time logger.info( 'Built %d targets in %s(%.2fs)' % (len(tasks), ('%02d:%02d mins ' % (all_build_time_taken // 60, all_build_time_taken % 60) if all_build_time_taken >= 60 else ''), all_build_time_taken)) return 0
def main(): global force parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog=get_help()) parser.add_argument('--lto', action='store_true', help='build bitcode object for LTO') parser.add_argument('--pic', action='store_true', help='build relocatable objects for suitable for dynamic linking') parser.add_argument('--force', action='store_true', help='force rebuild of target (by removing it first)') parser.add_argument('operation', help='currently only "build" is supported') parser.add_argument('targets', nargs='+', help='see below') args = parser.parse_args() if args.operation != 'build': shared.exit_with_error('unfamiliar operation: ' + args.operation) # process flags # Check sanity so that if settings file has changed, the cache is cleared here. # Otherwise, the cache will clear in an emcc process, which is invoked while building # a system library into the cache, causing trouble. shared.check_sanity() if args.lto: shared.Settings.LTO = "full" if args.pic: shared.Settings.RELOCATABLE = 1 if args.force: force = True # process tasks auto_tasks = False tasks = args.targets if 'SYSTEM' in tasks: tasks = SYSTEM_TASKS auto_tasks = True elif 'USER' in tasks: tasks = USER_TASKS auto_tasks = True elif 'MINIMAL' in tasks: tasks = MINIMAL_TASKS auto_tasks = True elif 'ALL' in tasks: tasks = SYSTEM_TASKS + USER_TASKS auto_tasks = True if auto_tasks: # cocos2d: must be ported, errors on # "Cannot recognize the target platform; are you targeting an unsupported platform?" skip_tasks = ['cocos2d'] tasks = [x for x in tasks if x not in skip_tasks] print('Building targets: %s' % ' '.join(tasks)) for what in tasks: logger.info('building and verifying ' + what) if what in SYSTEM_LIBRARIES: library = SYSTEM_LIBRARIES[what] if force: library.erase() library.get_path() elif what == 'sysroot': if force: shared.Cache.erase_file('sysroot_install.stamp') system_libs.ensure_sysroot() elif what == 'struct_info': if force: shared.Cache.erase_file('generated_struct_info.json') emscripten.generate_struct_info() elif what == 'icu': build_port('icu', 'libicuuc.a') elif what == 'zlib': shared.Settings.USE_ZLIB = 1 build_port('zlib', 'libz.a') shared.Settings.USE_ZLIB = 0 elif what == 'bzip2': build_port('bzip2', 'libbz2.a') elif what == 'bullet': build_port('bullet', 'libbullet.a') elif what == 'vorbis': build_port('vorbis', 'libvorbis.a') elif what == 'ogg': build_port('ogg', 'libogg.a') elif what == 'giflib': build_port('giflib', 'libgif.a') elif what == 'libjpeg': build_port('libjpeg', 'libjpeg.a') elif what == 'libpng': build_port('libpng', 'libpng.a') elif what == 'sdl2': build_port('sdl2', 'libSDL2.a') elif what == 'sdl2-mt': shared.Settings.USE_PTHREADS = 1 build_port('sdl2', 'libSDL2-mt.a') shared.Settings.USE_PTHREADS = 0 elif what == 'sdl2-gfx': build_port('sdl2_gfx', 'libSDL2_gfx.a') elif what == 'sdl2-image': build_port('sdl2_image', 'libSDL2_image.a') elif what == 'sdl2-image-png': shared.Settings.SDL2_IMAGE_FORMATS = ["png"] build_port('sdl2_image', 'libSDL2_image_png.a') shared.Settings.SDL2_IMAGE_FORMATS = [] elif what == 'sdl2-image-jpg': shared.Settings.SDL2_IMAGE_FORMATS = ["jpg"] build_port('sdl2_image', 'libSDL2_image_jpg.a') shared.Settings.SDL2_IMAGE_FORMATS = [] elif what == 'sdl2-net': build_port('sdl2_net', 'libSDL2_net.a') elif what == 'sdl2-mixer': old_formats = shared.Settings.SDL2_MIXER_FORMATS shared.Settings.SDL2_MIXER_FORMATS = [] build_port('sdl2_mixer', 'libSDL2_mixer.a') shared.Settings.SDL2_MIXER_FORMATS = old_formats elif what == 'sdl2-mixer-ogg': old_formats = shared.Settings.SDL2_MIXER_FORMATS shared.Settings.SDL2_MIXER_FORMATS = ["ogg"] build_port('sdl2_mixer', 'libSDL2_mixer_ogg.a') shared.Settings.SDL2_MIXER_FORMATS = old_formats elif what == 'sdl2-mixer-mp3': old_formats = shared.Settings.SDL2_MIXER_FORMATS shared.Settings.SDL2_MIXER_FORMATS = ["mp3"] build_port('sdl2_mixer', 'libSDL2_mixer_mp3.a') shared.Settings.SDL2_MIXER_FORMATS = old_formats elif what == 'freetype': build_port('freetype', 'libfreetype.a') elif what == 'harfbuzz': build_port('harfbuzz', 'libharfbuzz.a') elif what == 'harfbuzz-mt': shared.Settings.USE_PTHREADS = 1 build_port('harfbuzz', 'libharfbuzz-mt.a') shared.Settings.USE_PTHREADS = 0 elif what == 'sdl2-ttf': build_port('sdl2_ttf', 'libSDL2_ttf.a') elif what == 'cocos2d': build_port('cocos2d', 'libcocos2d.a') elif what == 'regal': build_port('regal', 'libregal.a') elif what == 'regal-mt': shared.Settings.USE_PTHREADS = 1 build_port('regal', 'libregal-mt.a') shared.Settings.USE_PTHREADS = 0 elif what == 'boost_headers': build_port('boost_headers', 'libboost_headers.a') else: logger.error('unfamiliar build target: ' + what) return 1 logger.info('...success') return 0
def run_on_js(filename, passes, extra_info=None, just_split=False, just_concat=False): with ToolchainProfiler.profile_block('js_optimizer.split_markers'): if not isinstance(passes, list): passes = [passes] js = open(filename).read() if os.linesep != '\n': js = js.replace(os.linesep, '\n') # we assume \n in the splitting code # Find suffix suffix_marker = '// EMSCRIPTEN_GENERATED_FUNCTIONS' suffix_start = js.find(suffix_marker) suffix = '' if suffix_start >= 0: suffix_end = js.find('\n', suffix_start) suffix = js[suffix_start:suffix_end] + '\n' # if there is metadata, we will run only on the generated functions. If there isn't, we will run on everything. # Find markers start_funcs = js.find(start_funcs_marker) end_funcs = js.rfind(end_funcs_marker) if start_funcs < 0 or end_funcs < start_funcs or not suffix: shared.exit_with_error( 'Invalid input file. Did not contain appropriate markers. (start_funcs: %s, end_funcs: %s, suffix_start: %s' % (start_funcs, end_funcs, suffix_start)) minify_globals = 'minifyNames' in passes if minify_globals: passes = [ p if p != 'minifyNames' else 'minifyLocals' for p in passes ] start_asm = js.find(start_asm_marker) end_asm = js.rfind(end_asm_marker) assert (start_asm >= 0) == (end_asm >= 0) closure = 'closure' in passes if closure: passes = [p for p in passes if p != 'closure'] # we will do it manually cleanup = 'cleanup' in passes if cleanup: passes = [p for p in passes if p != 'cleanup'] # we will do it manually if not minify_globals: with ToolchainProfiler.profile_block('js_optimizer.no_minify_globals'): pre = js[:start_funcs + len(start_funcs_marker)] post = js[end_funcs + len(end_funcs_marker):] js = js[start_funcs + len(start_funcs_marker):end_funcs] if 'asm' not in passes: # can have Module[..] and inlining prevention code, push those to post class Finals(object): buf = [] def process(line): if len(line) and (line.startswith( ('Module[', 'if (globalScope)')) or line.endswith('["X"]=1;')): Finals.buf.append(line) return False return True js = '\n'.join(filter(process, js.split('\n'))) post = '\n'.join(Finals.buf) + '\n' + post post = end_funcs_marker + post else: with ToolchainProfiler.profile_block('js_optimizer.minify_globals'): # We need to split out the asm shell as well, for minification pre = js[:start_asm + len(start_asm_marker)] post = js[end_asm:] asm_shell = js[start_asm + len(start_asm_marker):start_funcs + len(start_funcs_marker)] + ''' EMSCRIPTEN_FUNCS(); ''' + js[end_funcs + len(end_funcs_marker):end_asm + len(end_asm_marker)] js = js[start_funcs + len(start_funcs_marker):end_funcs] # we assume there is a maximum of one new name per line minifier = Minifier(js) def check_symbol_mapping(p): if p.startswith('symbolMap='): minifier.symbols_file = p.split('=', 1)[1] return False if p == 'profilingFuncs': minifier.profiling_funcs = True return False return True passes = list(filter(check_symbol_mapping, passes)) asm_shell_pre, asm_shell_post = minifier.minify_shell( asm_shell, 'minifyWhitespace' in passes).split('EMSCRIPTEN_FUNCS();') asm_shell_post = asm_shell_post.replace('});', '})') pre += asm_shell_pre + '\n' + start_funcs_marker post = end_funcs_marker + asm_shell_post + post minify_info = minifier.serialize() if extra_info: for key, value in extra_info.items(): assert key not in minify_info or value == minify_info[ key], [key, value, minify_info[key]] minify_info[key] = value # if DEBUG: # print >> sys.stderr, 'minify info:', minify_info with ToolchainProfiler.profile_block( 'js_optimizer.remove_suffix_and_split'): # remove suffix if no longer needed if suffix and 'last' in passes: suffix_start = post.find(suffix_marker) suffix_end = post.find('\n', suffix_start) post = post[:suffix_start] + post[suffix_end:] total_size = len(js) funcs = split_funcs(js, just_split) js = None with ToolchainProfiler.profile_block('js_optimizer.split_to_chunks'): # if we are making source maps, we want our debug numbering to start from the # top of the file, so avoid breaking the JS into chunks cores = building.get_num_cores() if not just_split: intended_num_chunks = int(round(cores * NUM_CHUNKS_PER_CORE)) chunk_size = min( MAX_CHUNK_SIZE, max(MIN_CHUNK_SIZE, total_size / intended_num_chunks)) chunks = chunkify(funcs, chunk_size) else: # keep same chunks as before chunks = [f[1] for f in funcs] chunks = [chunk for chunk in chunks if len(chunk)] if DEBUG and len(chunks): print('chunkification: num funcs:', len(funcs), 'actual num chunks:', len(chunks), 'chunk size range:', max(map(len, chunks)), '-', min(map(len, chunks)), file=sys.stderr) funcs = None if len(chunks): serialized_extra_info = suffix_marker + '\n' if minify_globals: serialized_extra_info += '// EXTRA_INFO:' + json.dumps( minify_info) elif extra_info: serialized_extra_info += '// EXTRA_INFO:' + json.dumps( extra_info) with ToolchainProfiler.profile_block('js_optimizer.write_chunks'): def write_chunk(chunk, i): temp_file = temp_files.get('.jsfunc_%d.js' % i).name with open(temp_file, 'w') as f: f.write(chunk) f.write(serialized_extra_info) return temp_file filenames = [ write_chunk(chunks[i], i) for i in range(len(chunks)) ] else: filenames = [] with ToolchainProfiler.profile_block('run_optimizer'): if len(filenames): commands = [ config.NODE_JS + [JS_OPTIMIZER, f, 'noPrintMetadata'] + passes for f in filenames ] cores = min(cores, len(filenames)) if len(chunks) > 1 and cores >= 2: # We can parallelize if DEBUG: print( 'splitting up js optimization into %d chunks, using %d cores (total: %.2f MB)' % (len(chunks), cores, total_size / (1024 * 1024.)), file=sys.stderr) with ToolchainProfiler.profile_block('optimizer_pool'): pool = building.get_multiprocessing_pool() filenames = pool.map(run_on_chunk, commands, chunksize=1) else: # We can't parallize, but still break into chunks to avoid uglify/node memory issues if len(chunks) > 1 and DEBUG: print('splitting up js optimization into %d chunks' % (len(chunks)), file=sys.stderr) filenames = [run_on_chunk(command) for command in commands] else: filenames = [] for filename in filenames: temp_files.note(filename) with ToolchainProfiler.profile_block('split_closure_cleanup'): if closure or cleanup: # run on the shell code, everything but what we js-optimize start_asm = '// EMSCRIPTEN_START_ASM\n' end_asm = '// EMSCRIPTEN_END_ASM\n' cl_sep = 'wakaUnknownBefore(); var asm=wakaUnknownAfter(wakaGlobal,wakaEnv,wakaBuffer)\n' with temp_files.get_file('.cl.js') as cle: pre_1, pre_2 = pre.split(start_asm) post_1, post_2 = post.split(end_asm) with open(cle, 'w') as f: f.write(pre_1) f.write(cl_sep) f.write(post_2) cld = cle if closure: if DEBUG: print('running closure on shell code', file=sys.stderr) cld = building.closure_compiler(cld, pretty='minifyWhitespace' not in passes) temp_files.note(cld) elif cleanup: if DEBUG: print('running cleanup on shell code', file=sys.stderr) acorn_passes = ['JSDCE'] if 'minifyWhitespace' in passes: acorn_passes.append('minifyWhitespace') cld = building.acorn_optimizer(cld, acorn_passes) temp_files.note(cld) coutput = open(cld).read() coutput = coutput.replace('wakaUnknownBefore();', start_asm) after = 'wakaUnknownAfter' start = coutput.find(after) end = coutput.find(')', start) # If the closure comment to suppress useless code is present, we need to look one # brace past it, as the first is in there. Otherwise, the first brace is the # start of the function body (what we want). USELESS_CODE_COMMENT = '/** @suppress {uselessCode} */ ' USELESS_CODE_COMMENT_BODY = 'uselessCode' brace = pre_2.find('{') + 1 has_useless_code_comment = False if pre_2[brace:brace + len(USELESS_CODE_COMMENT_BODY )] == USELESS_CODE_COMMENT_BODY: brace = pre_2.find('{', brace) + 1 has_useless_code_comment = True pre = coutput[:start] + '(' + ( USELESS_CODE_COMMENT if has_useless_code_comment else '') + 'function(global,env,buffer) {\n' + pre_2[brace:] post = post_1 + end_asm + coutput[end + 1:] with ToolchainProfiler.profile_block('write_pre'): filename += '.jo.js' temp_files.note(filename) f = open(filename, 'w') f.write(pre) pre = None with ToolchainProfiler.profile_block('sort_or_concat'): if not just_concat: # sort functions by size, to make diffing easier and to improve aot times funcses = [] for out_file in filenames: funcses.append(split_funcs(open(out_file).read(), False)) funcs = [item for sublist in funcses for item in sublist] funcses = None if not os.environ.get('EMCC_NO_OPT_SORT'): funcs.sort(key=lambda x: (len(x[1]), x[0]), reverse=True) if 'last' in passes and len(funcs): count = funcs[0][1].count('\n') if count > 3000: print( 'warning: Output contains some very large functions (%s lines in %s), consider building source files with -Os or -Oz)' % (count, funcs[0][0]), file=sys.stderr) for func in funcs: f.write(func[1]) funcs = None else: # just concat the outputs for out_file in filenames: f.write(open(out_file).read()) with ToolchainProfiler.profile_block('write_post'): f.write('\n') f.write(post) # No need to write suffix: if there was one, it is inside post which exists when suffix is there f.write('\n') f.close() return filename
def main(): global force all_build_start_time = time.time() parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog=get_help()) parser.add_argument('--lto', action='store_true', help='build bitcode object for LTO') parser.add_argument( '--pic', action='store_true', help='build relocatable objects for suitable for dynamic linking') parser.add_argument('--force', action='store_true', help='force rebuild of target (by removing it first)') parser.add_argument('operation', help='currently only "build" is supported') parser.add_argument('targets', nargs='+', help='see below') args = parser.parse_args() if args.operation != 'build': shared.exit_with_error('unfamiliar operation: ' + args.operation) # process flags # Check sanity so that if settings file has changed, the cache is cleared here. # Otherwise, the cache will clear in an emcc process, which is invoked while building # a system library into the cache, causing trouble. shared.check_sanity() if args.lto: settings.LTO = "full" if args.pic: settings.RELOCATABLE = 1 if args.force: force = True # process tasks auto_tasks = False tasks = args.targets if 'SYSTEM' in tasks: tasks = SYSTEM_TASKS auto_tasks = True elif 'USER' in tasks: tasks = PORTS auto_tasks = True elif 'MINIMAL' in tasks: tasks = MINIMAL_TASKS auto_tasks = True elif 'ALL' in tasks: tasks = SYSTEM_TASKS + PORTS auto_tasks = True if auto_tasks: # cocos2d: must be ported, errors on # "Cannot recognize the target platform; are you targeting an unsupported platform?" skip_tasks = ['cocos2d'] tasks = [x for x in tasks if x not in skip_tasks] print('Building targets: %s' % ' '.join(tasks)) for what in tasks: for old, new in legacy_prefixes.items(): if what.startswith(old): what = what.replace(old, new) logger.info('building and verifying ' + what) start_time = time.time() if what in SYSTEM_LIBRARIES: library = SYSTEM_LIBRARIES[what] if force: library.erase() library.get_path() elif what == 'sysroot': if force: shared.Cache.erase_file('sysroot_install.stamp') system_libs.ensure_sysroot() elif what == 'struct_info': if force: shared.Cache.erase_file('generated_struct_info.json') emscripten.generate_struct_info() elif what in PORTS: build_port(what) else: logger.error('unfamiliar build target: ' + what) return 1 time_taken = time.time() - start_time logger.info( '...success. Took %s(%.2fs)' % (('%02d:%02d mins ' % (time_taken // 60, time_taken % 60) if time_taken >= 60 else ''), time_taken)) if len(tasks) > 1: all_build_time_taken = time.time() - all_build_start_time logger.info( 'Built %d targets in %s(%.2fs)' % (len(tasks), ('%02d:%02d mins ' % (all_build_time_taken // 60, all_build_time_taken % 60) if all_build_time_taken >= 60 else ''), all_build_time_taken)) return 0
def main(): if len(sys.argv) < 2 or sys.argv[1] in [ '-v', '-help', '--help', '-?', '?' ]: print_help() return 0 operation = sys.argv[1] if operation != 'build': shared.exit_with_error('unfamiliar operation: ' + operation) # process flags args = sys.argv[2:] def is_flag(arg): return arg.startswith('--') for arg in args: if is_flag(arg): arg = arg[2:] if arg == 'lto': shared.Settings.WASM_OBJECT_FILES = 0 # Reconfigure the cache dir to reflect the change shared.reconfigure_cache() args = [a for a in args if not is_flag(a)] # process tasks libname = shared.static_library_name auto_tasks = False tasks = args if 'SYSTEM' in tasks: tasks = SYSTEM_TASKS auto_tasks = True elif 'USER' in tasks: tasks = USER_TASKS auto_tasks = True elif 'ALL' in tasks: tasks = SYSTEM_TASKS + USER_TASKS auto_tasks = True if auto_tasks: if shared.Settings.WASM_BACKEND: skip_tasks = [ task for task in SYSTEM_TASKS + USER_TASKS if '-mt' in task or 'thread' in task ] # cocos2d: must be ported, errors on # "Cannot recognize the target platform; are you targeting an unsupported platform?" skip_tasks += ['cocos2d'] print( 'Skipping building of %s, because WebAssembly does not support pthreads.' % ', '.join(skip_tasks)) tasks = [x for x in tasks if x not in skip_tasks] else: if os.environ.get('EMSCRIPTEN_NATIVE_OPTIMIZER'): print( 'Skipping building of native-optimizer; EMSCRIPTEN_NATIVE_OPTIMIZER is environment.' ) elif shared.EMSCRIPTEN_NATIVE_OPTIMIZER: print( 'Skipping building of native-optimizer; EMSCRIPTEN_NATIVE_OPTIMIZER set in .emscripten config.' ) else: tasks += ['native_optimizer'] print('Building targets: %s' % ' '.join(tasks)) for what in tasks: logger.info('building and verifying ' + what) if what == 'compiler-rt': build( ''' int main() { double _Complex a, b, c; c = a / b; return 0; } ''', ['libcompiler_rt.a']) elif what == 'libc': build(C_WITH_MALLOC, [libname('libc')]) elif what == 'libc-extras': build( ''' extern char **environ; int main() { return (int)environ; } ''', [libname('libc-extras')]) elif what == 'struct_info': build(C_BARE, ['generated_struct_info.json']) elif what == 'emmalloc': build(C_WITH_MALLOC, [libname('libemmalloc')], ['-s', 'MALLOC="emmalloc"']) elif what == 'emmalloc_debug': build(C_WITH_MALLOC, [libname('libemmalloc_debug')], ['-s', 'MALLOC="emmalloc"', '-g']) elif what.startswith('dlmalloc'): cmd = ['-s', 'MALLOC="dlmalloc"'] if '_debug' in what: cmd += ['-g'] if '_noerrno' in what: cmd += ['-s', 'SUPPORT_ERRNO=0'] if '_threadsafe' in what: cmd += ['-s', 'USE_PTHREADS=1'] if '_tracing' in what: cmd += ['-s', 'EMSCRIPTEN_TRACING=1'] build(C_WITH_MALLOC, [libname('lib' + what)], cmd) elif what in ('libc-mt', 'pthreads'): build(C_WITH_MALLOC, [libname('libc-mt'), libname('libpthreads')], ['-s', 'USE_PTHREADS=1']) elif what == 'libc-wasm': build(C_WITH_STDLIB, [libname('libc-wasm')], ['-s', 'WASM=1']) elif what == 'libc++': build(CXX_WITH_STDLIB, ['libc++.a'], ['-s', 'DISABLE_EXCEPTION_CATCHING=0']) elif what == 'libc++_noexcept': build(CXX_WITH_STDLIB, ['libc++_noexcept.a']) elif what == 'libc++abi': build( ''' struct X { int x; virtual void a() {} }; struct Y : X { int y; virtual void a() { y = 10; }}; int main(int argc, char **argv) { Y* y = dynamic_cast<Y*>((X*)argv[1]); y->a(); return y->y; } ''', [libname('libc++abi')]) elif what == 'gl' or what.startswith('gl-'): opts = [] if '-mt' in what: opts += ['-s', 'USE_PTHREADS=1'] if '-emu' in what: opts += ['-s', 'LEGACY_GL_EMULATION=1'] if '-webgl2' in what: opts += ['-s', 'USE_WEBGL2=1'] build( ''' extern "C" { extern void* emscripten_GetProcAddress(const char *x); } int main() { return int(emscripten_GetProcAddress("waka waka")); } ''', [libname('lib' + what)], opts) elif what == 'native_optimizer': build(C_BARE, ['optimizer.2.exe'], ['-O2', '-s', 'WASM=0']) elif what == 'compiler_rt_wasm': if shared.Settings.WASM_BACKEND: build(C_BARE, ['libcompiler_rt_wasm.a'], ['-s', 'WASM=1']) else: logger.warning( 'compiler_rt_wasm not built when using JSBackend') elif what == 'html5': build( ''' #include <stdlib.h> #include "emscripten/key_codes.h" int main() { return emscripten_compute_dom_pk_code(NULL); } ''', [libname('libhtml5')]) elif what == 'pthreads_stub': build( ''' #include <emscripten/threading.h> int main() { return emscripten_is_main_runtime_thread(); } ''', [libname('libpthreads_stub')]) elif what == 'al': build( ''' #include "AL/al.h" int main() { alGetProcAddress(0); return 0; } ''', [libname('libal')]) elif what == 'icu': build_port('icu', libname('libicuuc'), ['-s', 'USE_ICU=1']) elif what == 'zlib': build_port('zlib', 'libz.a', ['-s', 'USE_ZLIB=1']) elif what == 'bullet': build_port('bullet', libname('libbullet'), ['-s', 'USE_BULLET=1']) elif what == 'vorbis': build_port('vorbis', libname('libvorbis'), ['-s', 'USE_VORBIS=1']) elif what == 'ogg': build_port('ogg', libname('libogg'), ['-s', 'USE_OGG=1']) elif what == 'libpng': build_port('libpng', libname('libpng'), ['-s', 'USE_ZLIB=1', '-s', 'USE_LIBPNG=1']) elif what == 'sdl2': build_port('sdl2', libname('libSDL2'), ['-s', 'USE_SDL=2']) elif what == 'sdl2-gfx': build_port('sdl2-gfx', libname('libSDL2_gfx'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'USE_SDL_GFX=2' ]) elif what == 'sdl2-image': build_port('sdl2-image', libname('libSDL2_image'), ['-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2']) elif what == 'sdl2-image-png': build_port('sdl2-image', libname('libSDL2_image'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'SDL2_IMAGE_FORMATS=["png"]' ]) elif what == 'sdl2-net': build_port('sdl2-net', libname('libSDL2_net'), ['-s', 'USE_SDL=2', '-s', 'USE_SDL_NET=2']) elif what == 'sdl2-mixer': build_port('sdl2-mixer', 'libSDL2_mixer.a', [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_MIXER=2', '-s', 'USE_VORBIS=1' ]) elif what == 'freetype': build_port('freetype', 'libfreetype.a', ['-s', 'USE_FREETYPE=1']) elif what == 'harfbuzz': build_port('harfbuzz', 'libharfbuzz.a', ['-s', 'USE_HARFBUZZ=1']) elif what == 'sdl2-ttf': build_port('sdl2-ttf', libname('libSDL2_ttf'), [ '-s', 'USE_SDL=2', '-s', 'USE_SDL_TTF=2', '-s', 'USE_FREETYPE=1' ]) elif what == 'binaryen': build_port('binaryen', None, ['-s', 'WASM=1']) elif what == 'cocos2d': build_port('cocos2d', libname('libcocos2d'), [ '-s', 'USE_COCOS2D=3', '-s', 'USE_ZLIB=1', '-s', 'USE_LIBPNG=1', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0' ]) elif what == 'regal': build_port('regal', libname('libregal'), ['-s', 'USE_REGAL=1']) elif what == 'libc-sockets': build( ''' #include <sys/socket.h> int main() { return socket(0,0,0); } ''', [static_library_name('libc-sockets')]) elif what == 'libc-sockets-proxy': build( ''' #include <sys/socket.h> int main() { return socket(0,0,0); } ''', [static_library_name('libc-sockets-proxy')], [ '-s', 'PROXY_POSIX_SOCKETS=1', '-s', 'USE_PTHREADS=1', '-s', 'PROXY_TO_PTHREAD=1' ]) else: logger.error('unfamiliar build target: ' + what) return 1 logger.info('...success') return 0
def emscript(in_wasm, out_wasm, outfile_js, memfile, DEBUG): # Overview: # * Run wasm-emscripten-finalize to extract metadata and modify the binary # to use emscripten's wasm<->JS ABI # * Use the metadata to generate the JS glue that goes with the wasm if settings.SINGLE_FILE: # placeholder strings for JS glue, to be replaced with subresource locations in do_binaryen settings.WASM_BINARY_FILE = '<<< WASM_BINARY_FILE >>>' else: # set file locations, so that JS glue can find what it needs settings.WASM_BINARY_FILE = shared.JS.escape_for_js_string( os.path.basename(out_wasm)) metadata = finalize_wasm(in_wasm, out_wasm, memfile, DEBUG) update_settings_glue(metadata, DEBUG) if settings.SIDE_MODULE: if metadata['asmConsts']: exit_with_error('EM_ASM is not supported in side modules') if metadata['emJsFuncs']: exit_with_error('EM_JS is not supported in side modules') logger.debug('emscript: skipping remaining js glue generation') return if DEBUG: logger.debug('emscript: js compiler glue') t = time.time() # memory and global initializers if settings.RELOCATABLE: static_bump = align_memory( webassembly.parse_dylink_section(in_wasm).mem_size) set_memory(static_bump) logger.debug('stack_base: %d, stack_max: %d, heap_base: %d', settings.STACK_BASE, settings.STACK_MAX, settings.HEAP_BASE) glue, forwarded_data = compile_settings() if DEBUG: logger.debug(' emscript: glue took %s seconds' % (time.time() - t)) t = time.time() forwarded_json = json.loads(forwarded_data) pre, post = glue.split('// EMSCRIPTEN_END_FUNCS') exports = metadata['exports'] if settings.ASYNCIFY: exports += [ 'asyncify_start_unwind', 'asyncify_stop_unwind', 'asyncify_start_rewind', 'asyncify_stop_rewind' ] report_missing_symbols(forwarded_json['libraryFunctions']) if not outfile_js: logger.debug('emscript: skipping remaining js glue generation') return if settings.MINIMAL_RUNTIME: # In MINIMAL_RUNTIME, atinit exists in the postamble part post = apply_static_code_hooks(forwarded_json, post) else: # In regular runtime, atinits etc. exist in the preamble part pre = apply_static_code_hooks(forwarded_json, pre) asm_consts = create_asm_consts(metadata) em_js_funcs = create_em_js(metadata) asm_const_pairs = ['%s: %s' % (key, value) for key, value in asm_consts] asm_const_map = 'var ASM_CONSTS = {\n ' + ', \n '.join( asm_const_pairs) + '\n};\n' pre = pre.replace('// === Body ===', ('// === Body ===\n\n' + asm_const_map + '\n'.join(em_js_funcs) + '\n')) with open(outfile_js, 'w') as out: out.write(normalize_line_endings(pre)) pre = None invoke_funcs = metadata['invokeFuncs'] sending = create_sending(invoke_funcs, metadata) receiving = create_receiving(exports) if settings.MINIMAL_RUNTIME: if settings.DECLARE_ASM_MODULE_EXPORTS: post = compute_minimal_runtime_initializer_and_exports( post, exports, receiving) receiving = '' module = create_module(sending, receiving, invoke_funcs, metadata) write_output_file(out, module) out.write(normalize_line_endings(post)) module = None
def do_wasm2c(infile): assert Settings.STANDALONE_WASM WASM2C = NODE_JS + [path_from_root('node_modules', 'wasm2c', 'wasm2c.js')] WASM2C_DIR = path_from_root('node_modules', 'wasm2c') c_file = unsuffixed(infile) + '.wasm.c' h_file = unsuffixed(infile) + '.wasm.h' cmd = WASM2C + [infile, '-o', c_file] check_call(cmd) total = '''\ /* * This file was generated by emcc+wasm2c. To compile it, use something like * * $CC FILE.c -O2 -lm -DWASM_RT_MAX_CALL_STACK_DEPTH=8000 */ ''' SEP = '\n/* ==================================== */\n' def bundle_file(total, filename): with open(filename) as f: total += '// ' + filename + '\n' + f.read() + SEP return total # hermeticize the C file, by bundling in the wasm2c/ includes headers = [ (WASM2C_DIR, 'wasm-rt.h'), (WASM2C_DIR, 'wasm-rt-impl.h'), ('', h_file) ] for header in headers: total = bundle_file(total, os.path.join(header[0], header[1])) # add the wasm2c output with open(c_file) as read_c: c = read_c.read() total += c + SEP # add the wasm2c runtime total = bundle_file(total, os.path.join(WASM2C_DIR, 'wasm-rt-impl.c')) # add the support code support_files = ['base'] if Settings.AUTODEBUG: support_files.append('autodebug') if Settings.EXPECT_MAIN: # TODO: add an option for direct OS access. For now, do that when building # an executable with main, as opposed to a library support_files.append('os') support_files.append('main') else: support_files.append('os_sandboxed') support_files.append('reactor') # for a reactor, also append wasmbox_* API definitions with open(h_file, 'a') as f: f.write(''' // wasmbox_* API // TODO: optional prefixing extern void wasmbox_init(void); ''') for support_file in support_files: total = bundle_file(total, path_from_root('tools', 'wasm2c', support_file + '.c')) # remove #includes of the headers we bundled for header in headers: total = total.replace('#include "%s"\n' % header[1], '/* include of %s */\n' % header[1]) # generate the necessary invokes invokes = [] for sig in re.findall(r"\/\* import\: 'env' 'invoke_(\w+)' \*\/", total): all_func_types = get_func_types(total) def name(i): return 'a' + str(i) wabt_sig = sig[0] + 'i' + sig[1:] typed_args = [s_to_c(sig[i]) + ' ' + name(i) for i in range(1, len(sig))] full_typed_args = ['u32 fptr'] + typed_args types = [s_to_c(sig[i]) for i in range(1, len(sig))] args = [name(i) for i in range(1, len(sig))] c_func_type = s_to_c(sig[0]) + ' (*)(' + (', '.join(types) if types else 'void') + ')' if sig not in all_func_types: exit_with_error('could not find signature ' + sig + ' in function types ' + str(all_func_types)) type_index = all_func_types[sig] invokes.append(r''' IMPORT_IMPL(%(return_type)s, Z_envZ_invoke_%(sig)sZ_%(wabt_sig)s, (%(full_typed_args)s), { VERBOSE_LOG("invoke\n"); // waka u32 sp = Z_stackSaveZ_iv(); if (next_setjmp >= MAX_SETJMP_STACK) { abort_with_message("too many nested setjmps"); } u32 id = next_setjmp++; int result = setjmp(setjmp_stack[id]); %(declare_return)s if (result == 0) { %(receive)sCALL_INDIRECT(w2c___indirect_function_table, %(c_func_type)s, %(type_index)s, fptr %(args)s); /* if we got here, no longjmp or exception happened, we returned normally */ } else { /* A longjmp or an exception took us here. */ Z_stackRestoreZ_vi(sp); Z_setThrewZ_vii(1, 0); } next_setjmp--; %(return)s }); ''' % { 'return_type': s_to_c(sig[0]) if sig[0] != 'v' else 'void', 'sig': sig, 'wabt_sig': wabt_sig, 'full_typed_args': ', '.join(full_typed_args), 'type_index': type_index, 'c_func_type': c_func_type, 'args': (', ' + ', '.join(args)) if args else '', 'declare_return': (s_to_c(sig[0]) + ' returned_value = 0;') if sig[0] != 'v' else '', 'receive': 'returned_value = ' if sig[0] != 'v' else '', 'return': 'return returned_value;' if sig[0] != 'v' else '' }) total += '\n'.join(invokes) # write out the final file with open(c_file, 'w') as out: out.write(total)