Exemplo n.º 1
0
def compile_settings():
    stderr_file = os.environ.get('EMCC_STDERR_FILE')
    if stderr_file:
        stderr_file = os.path.abspath(stderr_file)
        logger.info('logging stderr in js compiler phase into %s' %
                    stderr_file)
        stderr_file = open(stderr_file, 'w')

    # Only the names of the legacy settings are used by the JS compiler
    # so we can reduce the size of serialized json by simplifying this
    # otherwise complex value.
    settings['LEGACY_SETTINGS'] = [l[0] for l in settings['LEGACY_SETTINGS']]

    # Save settings to a file to work around v8 issue 1579
    with shared.configuration.get_temp_files().get_file(
            '.json') as settings_file:
        with open(settings_file, 'w') as s:
            json.dump(settings.dict(), s, sort_keys=True, indent=2)

        # Call js compiler
        env = os.environ.copy()
        env['EMCC_BUILD_DIR'] = os.getcwd()
        out = shared.run_js_tool(path_from_root('src/compiler.js'),
                                 [settings_file],
                                 stdout=subprocess.PIPE,
                                 stderr=stderr_file,
                                 cwd=path_from_root('src'),
                                 env=env)
    assert '//FORWARDED_DATA:' in out, 'Did not receive forwarded data in pre output - process failed?'
    glue, forwarded_data = out.split('//FORWARDED_DATA:')
    return glue, forwarded_data
Exemplo n.º 2
0
def compile_settings():
    stderr_file = os.environ.get('EMCC_STDERR_FILE')
    if stderr_file:
        stderr_file = os.path.abspath(stderr_file)
        logger.info('logging stderr in js compiler phase into %s' %
                    stderr_file)
        stderr_file = open(stderr_file, 'w')

    # Save settings to a file to work around v8 issue 1579
    with shared.configuration.get_temp_files().get_file(
            '.txt') as settings_file:
        with open(settings_file, 'w') as s:
            json.dump(settings.dict(), s, sort_keys=True)

        # Call js compiler
        env = os.environ.copy()
        env['EMCC_BUILD_DIR'] = os.getcwd()
        out = shared.run_js_tool(path_from_root('src/compiler.js'),
                                 [settings_file],
                                 stdout=subprocess.PIPE,
                                 stderr=stderr_file,
                                 cwd=path_from_root('src'),
                                 env=env)
    assert '//FORWARDED_DATA:' in out, 'Did not receive forwarded data in pre output - process failed?'
    glue, forwarded_data = out.split('//FORWARDED_DATA:')
    return glue, forwarded_data
Exemplo n.º 3
0
def get_clang_native_args():
  if MACOS:
    return ['-isystem', path_from_root('system', 'include', 'libcxx')]
  elif os.name == 'nt':
    # TODO: If Windows.h et al. are needed, will need to add something like '-isystemC:/Program
    # Files (x86)/Microsoft SDKs/Windows/v7.1A/Include'.
    return ['-DWIN32']
  else:
    return []
Exemplo n.º 4
0
def compare_metadata(metadata, pymetadata):
    if sorted(metadata.keys()) != sorted(pymetadata.keys()):
        print(sorted(metadata.keys()))
        print(sorted(pymetadata.keys()))
        exit_with_error('metadata keys mismatch')
    for key in metadata:
        old = metadata[key]
        new = pymetadata[key]
        if key == 'features':
            old = sorted(old)
            new = sorted(new)
        if old != new:
            print(key)
            open(path_from_root('first.txt'), 'w').write(pprint.pformat(old))
            open(path_from_root('second.txt'), 'w').write(pprint.pformat(new))
            print(pprint.pformat(old))
            print(pprint.pformat(new))
            exit_with_error('metadata mismatch')
Exemplo n.º 5
0
def run():
    if len(sys.argv) < 2 or sys.argv[1] in ('--version', '--help'):
        print('''\
emcmake is a helper for cmake, setting various environment
variables so that emcc etc. are used. Typical usage:

  emcmake cmake [FLAGS]
''',
              file=sys.stderr)
        return 1

    args = sys.argv[1:]

    def has_substr(args, substr):
        return any(substr in s for s in args)

    # Append the Emscripten toolchain file if the user didn't specify one.
    if not has_substr(args, '-DCMAKE_TOOLCHAIN_FILE'):
        args.append('-DCMAKE_TOOLCHAIN_FILE=' + utils.path_from_root(
            'cmake', 'Modules', 'Platform', 'Emscripten.cmake'))

    if not has_substr(args, '-DCMAKE_CROSSCOMPILING_EMULATOR'):
        node_js = config.NODE_JS[0]
        args.append(f'-DCMAKE_CROSSCOMPILING_EMULATOR={node_js}')

    # On Windows specify MinGW Makefiles or ninja if we have them and no other
    # toolchain was specified, to keep CMake from pulling in a native Visual
    # Studio, or Unix Makefiles.
    if utils.WINDOWS and '-G' not in args:
        if utils.which('mingw32-make'):
            args += ['-G', 'MinGW Makefiles']
        elif utils.which('ninja'):
            args += ['-G', 'Ninja']
        else:
            print(
                'emcmake: no compatible cmake generator found; Please install ninja or mingw32-make, or specify a generator explicitly using -G',
                file=sys.stderr)
            return 1

    # CMake has a requirement that it wants sh.exe off PATH if MinGW Makefiles
    # is being used. This happens quite often, so do this automatically on
    # behalf of the user. See
    # http://www.cmake.org/Wiki/CMake_MinGW_Compiler_Issues
    if utils.WINDOWS and 'MinGW Makefiles' in args:
        env = building.remove_sh_exe_from_path(os.environ)
    else:
        env = None

    print('configure: ' + shared.shlex_join(args), file=sys.stderr)
    try:
        shared.check_call(args, env=env)
        return 0
    except CalledProcessError as e:
        return e.returncode
Exemplo n.º 6
0
def run():
    if len(sys.argv) < 2 or sys.argv[1] in ('--version', '--help'):
        print('''\
emcmake is a helper for cmake, setting various environment
variables so that emcc etc. are used. Typical usage:

  emcmake cmake [FLAGS]
''',
              file=sys.stderr)
        return 1

    args = sys.argv[1:]

    def has_substr(args, substr):
        return any(substr in s for s in args)

    # Append the Emscripten toolchain file if the user didn't specify one.
    if not has_substr(args, '-DCMAKE_TOOLCHAIN_FILE'):
        args.append(
            '-DCMAKE_TOOLCHAIN_FILE=' +
            utils.path_from_root('cmake/Modules/Platform/Emscripten.cmake'))

    if not has_substr(args, '-DCMAKE_CROSSCOMPILING_EMULATOR'):
        node_js = config.NODE_JS[0]
        args.append(f'-DCMAKE_CROSSCOMPILING_EMULATOR={node_js}')

    # On Windows specify MinGW Makefiles or ninja if we have them and no other
    # toolchain was specified, to keep CMake from pulling in a native Visual
    # Studio, or Unix Makefiles.
    if utils.WINDOWS and '-G' not in args:
        if utils.which('mingw32-make'):
            args += ['-G', 'MinGW Makefiles']
        elif utils.which('ninja'):
            args += ['-G', 'Ninja']
        else:
            print(
                'emcmake: no compatible cmake generator found; Please install ninja or mingw32-make, or specify a generator explicitly using -G',
                file=sys.stderr)
            return 1

    print('configure: ' + shared.shlex_join(args), file=sys.stderr)
    try:
        shared.check_call(args)
        return 0
    except CalledProcessError as e:
        return e.returncode
Exemplo n.º 7
0
def check_engine(engine):
  if type(engine) is list:
    engine_path = engine[0]
  else:
    engine_path = engine
  global WORKING_ENGINES
  if engine_path not in WORKING_ENGINES:
    logging.debug('Checking JS engine %s' % engine)
    try:
      output = run_js(utils.path_from_root('tests/hello_world.js'), engine, skip_check=True)
      if 'hello, world!' in output:
        WORKING_ENGINES[engine_path] = True
      else:
        WORKING_ENGINES[engine_path] = False
    except Exception as e:
      logging.info('Checking JS engine %s failed. Check your config file. Details: %s' % (str(engine), str(e)))
      WORKING_ENGINES[engine_path] = False
  return WORKING_ENGINES[engine_path]
Exemplo n.º 8
0
def run():
  if len(sys.argv) < 2 or sys.argv[1] in ('--version', '--help'):
    print('''\
emcmake is a helper for cmake, setting various environment
variables so that emcc etc. are used. Typical usage:

  emcmake cmake [FLAGS]
''', file=sys.stderr)
    return 1

  args = sys.argv[1:]

  def has_substr(args, substr):
    return any(substr in s for s in args)

  # Append the Emscripten toolchain file if the user didn't specify one.
  if not has_substr(args, '-DCMAKE_TOOLCHAIN_FILE'):
    args.append('-DCMAKE_TOOLCHAIN_FILE=' + utils.path_from_root('cmake/Modules/Platform/Emscripten.cmake'))

  if not has_substr(args, '-DCMAKE_CROSSCOMPILING_EMULATOR'):
    node_js = config.NODE_JS[0]
    # In order to allow cmake to run code built with pthreads we need to pass some extra flags to node.
    # Note that we also need --experimental-wasm-bulk-memory which is true by default and hence not added here
    # See https://github.com/emscripten-core/emscripten/issues/15522
    args.append(f'-DCMAKE_CROSSCOMPILING_EMULATOR={node_js};--experimental-wasm-threads')

  # On Windows specify MinGW Makefiles or ninja if we have them and no other
  # toolchain was specified, to keep CMake from pulling in a native Visual
  # Studio, or Unix Makefiles.
  if utils.WINDOWS and not any(arg.startswith('-G') for arg in args):
    if utils.which('mingw32-make'):
      args += ['-G', 'MinGW Makefiles']
    elif utils.which('ninja'):
      args += ['-G', 'Ninja']
    else:
      print('emcmake: no compatible cmake generator found; Please install ninja or mingw32-make, or specify a generator explicitly using -G', file=sys.stderr)
      return 1

  print('configure: ' + shared.shlex_join(args), file=sys.stderr)
  try:
    shared.check_call(args)
    return 0
  except CalledProcessError as e:
    return e.returncode
Exemplo n.º 9
0
def main(args):
    global QUIET

    default_json_files = [
        utils.path_from_root('src/struct_info.json'),
        utils.path_from_root('src/struct_info_internal.json'),
        utils.path_from_root('src/struct_info_cxx.json'),
    ]
    parser = argparse.ArgumentParser(
        description='Generate JSON infos for structs.')
    parser.add_argument(
        'json',
        nargs='*',
        help=
        'JSON file with a list of structs and their fields (defaults to src/struct_info.json)',
        default=default_json_files)
    parser.add_argument('-q',
                        dest='quiet',
                        action='store_true',
                        default=False,
                        help='Don\'t output anything besides error messages.')
    parser.add_argument(
        '-o',
        dest='output',
        metavar='path',
        default=None,
        help=
        'Path to the JSON file that will be written. If omitted, the generated data will be printed to stdout.'
    )
    parser.add_argument('-I',
                        dest='includes',
                        metavar='dir',
                        action='append',
                        default=[],
                        help='Add directory to include search path')
    parser.add_argument('-D',
                        dest='defines',
                        metavar='define',
                        action='append',
                        default=[],
                        help='Pass a define to the preprocessor')
    parser.add_argument('-U',
                        dest='undefines',
                        metavar='undefine',
                        action='append',
                        default=[],
                        help='Pass an undefine to the preprocessor')
    parser.add_argument('--wasm64',
                        action='store_true',
                        help='use wasm64 architecture')
    args = parser.parse_args(args)

    QUIET = args.quiet

    # Avoid parsing problems due to gcc specifc syntax.
    cflags = ['-D_GNU_SOURCE']

    if args.wasm64:
        settings.MEMORY64 = 2

    # Add the user options to the list as well.
    for path in args.includes:
        cflags.append('-I' + path)

    for arg in args.defines:
        cflags.append('-D' + arg)

    for arg in args.undefines:
        cflags.append('-U' + arg)

    internal_cflags = [
        '-I' + utils.path_from_root('system/lib/libc/musl/src/internal'),
        '-I' + utils.path_from_root('system/lib/libc/musl/src/include'),
        '-I' + utils.path_from_root('system/lib/pthread/'),
        '-I' + utils.path_from_root('system/lib/wasmfs/'),
    ]

    cxxflags = [
        '-I' + utils.path_from_root('system/lib/libcxxabi/src'),
        '-D__USING_EMSCRIPTEN_EXCEPTIONS__',
    ]

    # Look for structs in all passed headers.
    info = {'defines': {}, 'structs': {}}

    for f in args.json:
        # This is a JSON file, parse it.
        header_files = parse_json(f)
        # Inspect all collected structs.
        if 'internal' in f:
            use_cflags = cflags + internal_cflags
        elif 'cxx' in f:
            use_cflags = cflags + cxxflags
        else:
            use_cflags = cflags
        info_fragment = inspect_code(header_files, use_cflags)
        merge_info(info, info_fragment)

    output_json(info, args.output)
    return 0
Exemplo n.º 10
0
# University of Illinois/NCSA Open Source License.  Both these licenses can be
# found in the LICENSE file.

"""WebIDL binder

https://emscripten.org/docs/porting/connecting_cpp_and_javascript/WebIDL-Binder.html
"""

import os
import sys

sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from tools import shared, utils

sys.path.append(utils.path_from_root('third_party'))
sys.path.append(utils.path_from_root('third_party/ply'))

import WebIDL

# CHECKS='FAST' will skip most argument type checks in the wrapper methods for
#                  performance (~3x faster than default).
# CHECKS='ALL' will do extensive argument type checking (~5x slower than default).
#                 This will catch invalid numbers, invalid pointers, invalid strings, etc.
# Anything else defaults to legacy mode for backward compatibility.
CHECKS = os.environ.get('IDL_CHECKS', 'DEFAULT')
# DEBUG=1 will print debug info in render_function
DEBUG = os.environ.get('IDL_VERBOSE') == '1'

if DEBUG:
  print("Debug print ON, CHECKS=%s" % CHECKS)
Exemplo n.º 11
0
import os
import random
import sys
import unittest

# Setup

__rootpath__ = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(__rootpath__)

import jsrun
import parallel_testsuite
import common
from tools import shared, config, utils

sys.path.append(utils.path_from_root('third_party/websockify'))

logger = logging.getLogger("runner")

# The core test modes
core_test_modes = [
    'wasm0',
    'wasm1',
    'wasm2',
    'wasm3',
    'wasms',
    'wasmz',
    'strict',
    'wasm2js0',
    'wasm2js1',
    'wasm2js2',
Exemplo n.º 12
0
import json
import shutil

__scriptdir__ = os.path.dirname(os.path.abspath(__file__))
__rootdir__ = os.path.dirname(__scriptdir__)
sys.path.append(__rootdir__)

from tools.toolchain_profiler import ToolchainProfiler
from tools.utils import path_from_root
from tools import building, config, shared, utils

configuration = shared.configuration
temp_files = configuration.get_temp_files()


ACORN_OPTIMIZER = path_from_root('tools/acorn-optimizer.js')

NUM_CHUNKS_PER_CORE = 3
MIN_CHUNK_SIZE = int(os.environ.get('EMCC_JSOPT_MIN_CHUNK_SIZE') or 512 * 1024) # configuring this is just for debugging purposes
MAX_CHUNK_SIZE = int(os.environ.get('EMCC_JSOPT_MAX_CHUNK_SIZE') or 5 * 1024 * 1024)

WINDOWS = sys.platform.startswith('win')

DEBUG = os.environ.get('EMCC_DEBUG')

func_sig = re.compile(r'function ([_\w$]+)\(')
func_sig_json = re.compile(r'\["defun", ?"([_\w$]+)",')
import_sig = re.compile(r'(var|const) ([_\w$]+ *=[^;]+);')


def split_funcs(js, just_split=False):
Exemplo n.º 13
0
def do_wasm2c(infile):
    assert settings.STANDALONE_WASM
    WASM2C = config.NODE_JS + [path_from_root('node_modules/wasm2c/wasm2c.js')]
    WASM2C_DIR = path_from_root('node_modules/wasm2c')
    c_file = unsuffixed(infile) + '.wasm.c'
    h_file = unsuffixed(infile) + '.wasm.h'
    cmd = WASM2C + [infile, '-o', c_file]
    check_call(cmd)
    total = '''\
/*
 * This file was generated by emcc+wasm2c. To compile it, use something like
 *
 *   $CC FILE.c -O2 -lm -DWASM_RT_MAX_CALL_STACK_DEPTH=8000
 */
'''
    SEP = '\n/* ==================================== */\n'

    def bundle_file(filename):
        nonlocal total
        with open(filename) as f:
            total += '// ' + filename + '\n' + f.read() + SEP

    # hermeticize the C file, by bundling in the wasm2c/ includes
    headers = [(WASM2C_DIR, 'wasm-rt.h'), (WASM2C_DIR, 'wasm-rt-impl.h'),
               ('', h_file)]
    for header in headers:
        bundle_file(os.path.join(header[0], header[1]))
    # add the wasm2c output
    bundle_file(c_file)
    # add the wasm2c runtime
    bundle_file(os.path.join(WASM2C_DIR, 'wasm-rt-impl.c'))
    # add the support code
    support_files = ['base.c']
    if settings.AUTODEBUG:
        support_files.append('autodebug.c')
    if settings.EXPECT_MAIN:
        # TODO: add an option for direct OS access. For now, do that when building
        #       an executable with main, as opposed to a library
        support_files.append('os.c')
        support_files.append('main.c')
    else:
        support_files.append('os_sandboxed.c')
        support_files.append('reactor.c')
        # for a reactor, also append wasmbox_* API definitions
        with open(h_file, 'a') as f:
            f.write('''
// wasmbox_* API
// TODO: optional prefixing
extern void wasmbox_init(void);
''')
    for support_file in support_files:
        bundle_file(path_from_root(f'tools/wasm2c/{support_file}'))
    # remove #includes of the headers we bundled
    for header in headers:
        total = total.replace('#include "%s"\n' % header[1],
                              '/* include of %s */\n' % header[1])
    # generate the necessary invokes
    invokes = []
    for sig in re.findall(r"\/\* import\: 'env' 'invoke_(\w+)' \*\/", total):
        all_func_types = get_func_types(total)

        def name(i):
            return 'a' + str(i)

        wabt_sig = sig[0] + 'i' + sig[1:]
        typed_args = [
            s_to_c(sig[i]) + ' ' + name(i) for i in range(1, len(sig))
        ]
        full_typed_args = ['u32 fptr'] + typed_args
        types = [s_to_c(sig[i]) for i in range(1, len(sig))]
        args = [name(i) for i in range(1, len(sig))]
        c_func_type = s_to_c(
            sig[0]) + ' (*)(' + (', '.join(types) if types else 'void') + ')'
        if sig not in all_func_types:
            exit_with_error('could not find signature ' + sig +
                            ' in function types ' + str(all_func_types))
        type_index = all_func_types[sig]

        invokes.append(
            r'''
IMPORT_IMPL(%(return_type)s, Z_envZ_invoke_%(sig)sZ_%(wabt_sig)s, (%(full_typed_args)s), {
  VERBOSE_LOG("invoke\n"); // waka
  u32 sp = WASM_RT_ADD_PREFIX(Z_stackSaveZ_iv)();
  if (next_setjmp >= MAX_SETJMP_STACK) {
    abort_with_message("too many nested setjmps");
  }
  u32 id = next_setjmp++;
  int result = setjmp(setjmp_stack[id]);
  %(declare_return)s
  if (result == 0) {
    %(receive)sCALL_INDIRECT(w2c___indirect_function_table, %(c_func_type)s, %(type_index)s, fptr %(args)s);
    /* if we got here, no longjmp or exception happened, we returned normally */
  } else {
    /* A longjmp or an exception took us here. */
    WASM_RT_ADD_PREFIX(Z_stackRestoreZ_vi)(sp);
    WASM_RT_ADD_PREFIX(Z_setThrewZ_vii)(1, 0);
  }
  next_setjmp--;
  %(return)s
});
''' % {
                'return_type':
                s_to_c(sig[0]) if sig[0] != 'v' else 'void',
                'sig':
                sig,
                'wabt_sig':
                wabt_sig,
                'full_typed_args':
                ', '.join(full_typed_args),
                'type_index':
                type_index,
                'c_func_type':
                c_func_type,
                'args': (', ' + ', '.join(args)) if args else '',
                'declare_return':
                (s_to_c(sig[0]) +
                 ' returned_value = 0;') if sig[0] != 'v' else '',
                'receive':
                'returned_value = ' if sig[0] != 'v' else '',
                'return':
                'return returned_value;' if sig[0] != 'v' else ''
            })

    total += '\n'.join(invokes)

    # adjust sandboxing
    TRAP_OOB = 'TRAP(OOB)'
    assert total.count(TRAP_OOB) == 2
    if settings.WASM2C_SANDBOXING == 'full':
        pass  # keep it
    elif settings.WASM2C_SANDBOXING == 'none':
        total = total.replace(TRAP_OOB, '{}')
    elif settings.WASM2C_SANDBOXING == 'mask':
        assert not settings.ALLOW_MEMORY_GROWTH
        assert (settings.INITIAL_MEMORY &
                (settings.INITIAL_MEMORY - 1)) == 0, 'poewr of 2'
        total = total.replace(TRAP_OOB, '{}')
        MEM_ACCESS = '[addr]'
        assert total.count(MEM_ACCESS) == 3, '2 from wasm2c, 1 from runtime'
        total = total.replace(MEM_ACCESS,
                              '[addr & %d]' % (settings.INITIAL_MEMORY - 1))
    else:
        exit_with_error('bad sandboxing')

    # adjust prefixing: emit simple output that works with multiple libraries,
    # each compiled into its own single .c file, by adding 'static' in some places
    # TODO: decide on the proper pattern for this in an upstream discussion in
    #       wasm2c; another option would be to prefix all these things.
    for rep in [
            'uint32_t wasm_rt_register_func_type(',
            'void wasm_rt_trap(',
            'void wasm_rt_allocate_memory(',
            'uint32_t wasm_rt_grow_memory(',
            'void wasm_rt_allocate_table(',
            'jmp_buf g_jmp_buf',
            'uint32_t g_func_type_count',
            'FuncType* g_func_types',
            'uint32_t wasm_rt_call_stack_depth',
            'uint32_t g_saved_call_stack_depth',
    ]:
        # remove 'extern' from declaration
        total = total.replace('extern ' + rep, rep)
        # add 'static' to implementation
        old = total
        total = total.replace(rep, 'static ' + rep)
        assert old != total, f'did not find "{rep}"'

    # write out the final file
    with open(c_file, 'w') as out:
        out.write(total)
Exemplo n.º 14
0
#!/usr/bin/env python3
"""Wrapping the scons invocation, EMSCRIPTEN_TOOL_PATH is set in the process
environment, and can be used to locate the emscripten SCons Tool.

Example:

# Load emscripten Tool
my_env = Environment(tools=['emscripten'], toolpath=[os.environ['EMSCRIPTEN_TOOL_PATH']])
"""

import os
import subprocess
import sys
from tools import utils

tool_path = utils.path_from_root(
    'tools/scons/site_scons/site_tools/emscripten')

env = os.environ.copy()
env['EMSCRIPTEN_TOOL_PATH'] = tool_path

sys.exit(subprocess.call(sys.argv[1:], env=env))
Exemplo n.º 15
0
def main():
  data_files = []
  export_name = 'Module'
  leading = ''
  has_preloaded = False
  plugins = []
  jsoutput = None
  from_emcc = False
  force = True
  # If set to True, IndexedDB (IDBFS in library_idbfs.js) is used to locally
  # cache VFS XHR so that subsequent page loads can read the data from the
  # offline cache instead.
  use_preload_cache = False
  indexeddb_name = 'EM_PRELOAD_CACHE'
  # If set to True, the package metadata is stored separately from js-output
  # file which makes js-output file immutable to the package content changes.
  # If set to False, the package metadata is stored inside the js-output file
  # which makes js-output file to mutate on each invocation of this packager tool.
  separate_metadata = False
  lz4 = False
  use_preload_plugins = False
  support_node = True

  for arg in sys.argv[2:]:
    if arg == '--preload':
      has_preloaded = True
      leading = 'preload'
    elif arg == '--embed':
      leading = 'embed'
    elif arg == '--exclude':
      leading = 'exclude'
    elif arg == '--no-force':
      force = False
      leading = ''
    elif arg == '--use-preload-cache':
      use_preload_cache = True
      leading = ''
    elif arg.startswith('--indexedDB-name'):
      indexeddb_name = arg.split('=', 1)[1] if '=' in arg else None
      leading = ''
    elif arg == '--no-heap-copy':
      print('ignoring legacy flag --no-heap-copy (that is the only mode supported now)')
      leading = ''
    elif arg == '--separate-metadata':
      separate_metadata = True
      leading = ''
    elif arg == '--lz4':
      lz4 = True
      leading = ''
    elif arg == '--use-preload-plugins':
      use_preload_plugins = True
      leading = ''
    elif arg == '--no-node':
      support_node = False
      leading = ''
    elif arg.startswith('--js-output'):
      jsoutput = arg.split('=', 1)[1] if '=' in arg else None
      leading = ''
    elif arg.startswith('--export-name'):
      if '=' in arg:
        export_name = arg.split('=', 1)[1]
      leading = ''
    elif arg.startswith('--from-emcc'):
      from_emcc = True
      leading = ''
    elif arg.startswith('--plugin'):
      with open(arg.split('=', 1)[1]) as f:
        plugin = f.read()
      eval(plugin) # should append itself to plugins
      leading = ''
    elif leading == 'preload' or leading == 'embed':
      mode = leading
      # position of @ if we're doing 'src@dst'. '__' is used to keep the index
      # same with the original if they escaped with '@@'.
      at_position = arg.replace('@@', '__').find('@')
      # '@@' in input string means there is an actual @ character, a single '@'
      # means the 'src@dst' notation.
      uses_at_notation = (at_position != -1)

      if uses_at_notation:
        srcpath = arg[0:at_position].replace('@@', '@') # split around the @
        dstpath = arg[at_position + 1:].replace('@@', '@')
      else:
        # Use source path as destination path.
        srcpath = dstpath = arg.replace('@@', '@')
      if os.path.isfile(srcpath) or os.path.isdir(srcpath):
        data_files.append({'srcpath': srcpath, 'dstpath': dstpath, 'mode': mode,
                           'explicit_dst_path': uses_at_notation})
      else:
        print('error: ' + arg + ' does not exist', file=sys.stderr)
        return 1
    elif leading == 'exclude':
      excluded_patterns.append(arg)
    else:
      print('Unknown parameter:', arg, file=sys.stderr)
      return 1

  if (not force) and not data_files:
    has_preloaded = False
  if not has_preloaded or jsoutput is None:
    assert not separate_metadata, (
       'cannot separate-metadata without both --preloaded files '
       'and a specified --js-output')

  if not from_emcc:
    print('Remember to build the main file with  -s FORCE_FILESYSTEM=1  '
          'so that it includes support for loading this file package',
          file=sys.stderr)

  if jsoutput and os.path.abspath(jsoutput) == os.path.abspath(data_target):
    print('error: TARGET should not be the same value of --js-output',
          file=sys.stderr)
    return 1

  ret = ''
  # emcc will add this to the output itself, so it is only needed for
  # standalone calls
  if not from_emcc:
    ret = '''
  var Module = typeof %(EXPORT_NAME)s !== 'undefined' ? %(EXPORT_NAME)s : {};
  ''' % {"EXPORT_NAME": export_name}

  ret += '''
  if (!Module.expectedDataFileDownloads) {
    Module.expectedDataFileDownloads = 0;
  }
  Module.expectedDataFileDownloads++;
  (function() {
   var loadPackage = function(metadata) {
  '''

  code = '''
      function assert(check, msg) {
        if (!check) throw msg + new Error().stack;
      }
  '''

  for file_ in data_files:
    if not should_ignore(file_['srcpath']):
      if os.path.isdir(file_['srcpath']):
        add(file_['mode'], file_['srcpath'], file_['dstpath'])
      else:
        new_data_files.append(file_)
  data_files = [file_ for file_ in new_data_files
                if not os.path.isdir(file_['srcpath'])]
  if len(data_files) == 0:
    print('Nothing to do!', file=sys.stderr)
    sys.exit(1)

  # Absolutize paths, and check that they make sense
  # os.getcwd() always returns the hard path with any symbolic links resolved,
  # even if we cd'd into a symbolic link.
  curr_abspath = os.path.abspath(os.getcwd())

  for file_ in data_files:
    if not file_['explicit_dst_path']:
      # This file was not defined with src@dst, so we inferred the destination
      # from the source. In that case, we require that the destination not be
      # under the current location
      path = file_['dstpath']
      # Use os.path.realpath to resolve any symbolic links to hard paths,
      # to match the structure in curr_abspath.
      abspath = os.path.realpath(os.path.abspath(path))
      if DEBUG:
          print(path, abspath, curr_abspath, file=sys.stderr)
      if not abspath.startswith(curr_abspath):
        print('Error: Embedding "%s" which is below the current directory '
              '"%s". This is invalid since the current directory becomes the '
              'root that the generated code will see' % (path, curr_abspath),
              file=sys.stderr)
        sys.exit(1)
      file_['dstpath'] = abspath[len(curr_abspath) + 1:]
      if os.path.isabs(path):
        print('Warning: Embedding an absolute file/directory name "%s" to the '
              'virtual filesystem. The file will be made available in the '
              'relative path "%s". You can use the explicit syntax '
              '--preload-file srcpath@dstpath to explicitly specify the target '
              'location the absolute source path should be directed to.'
              % (path, file_['dstpath']), file=sys.stderr)

  for file_ in data_files:
    # name in the filesystem, native and emulated
    file_['dstpath'] = file_['dstpath'].replace(os.path.sep, '/')
    # If user has submitted a directory name as the destination but omitted
    # the destination filename, use the filename from source file
    if file_['dstpath'].endswith('/'):
      file_['dstpath'] = file_['dstpath'] + os.path.basename(file_['srcpath'])
    # make destination path always relative to the root
    file_['dstpath'] = posixpath.normpath(os.path.join('/', file_['dstpath']))
    if DEBUG:
      print('Packaging file "%s" to VFS in path "%s".'
            % (file_['srcpath'],  file_['dstpath']), file=sys.stderr)

  # Remove duplicates (can occur naively, for example preload dir/, preload dir/subdir/)
  seen = {}

  def was_seen(name):
    if seen.get(name):
        return True
    seen[name] = 1
    return False

  data_files = [file_ for file_ in data_files if not was_seen(file_['dstpath'])]

  if AV_WORKAROUND:
    random.shuffle(data_files)

  # Apply plugins
  for file_ in data_files:
    for plugin in plugins:
      plugin(file_)

  metadata = {'files': []}

  # Set up folders
  partial_dirs = []
  for file_ in data_files:
    dirname = os.path.dirname(file_['dstpath'])
    dirname = dirname.lstrip('/') # absolute paths start with '/', remove that
    if dirname != '':
      parts = dirname.split('/')
      for i in range(len(parts)):
        partial = '/'.join(parts[:i + 1])
        if partial not in partial_dirs:
          code += ('''Module['FS_createPath'](%s, %s, true, true);\n'''
                   % (json.dumps('/' + '/'.join(parts[:i])), json.dumps(parts[i])))
          partial_dirs.append(partial)

  if has_preloaded:
    # Bundle all datafiles into one archive. Avoids doing lots of simultaneous
    # XHRs which has overhead.
    start = 0
    with open(data_target, 'wb') as data:
      for file_ in data_files:
        file_['data_start'] = start
        with open(file_['srcpath'], 'rb') as f:
          curr = f.read()
        file_['data_end'] = start + len(curr)
        if AV_WORKAROUND:
            curr += '\x00'
        start += len(curr)
        data.write(curr)

    # TODO: sha256sum on data_target
    if start > 256 * 1024 * 1024:
      print('warning: file packager is creating an asset bundle of %d MB. '
            'this is very large, and browsers might have trouble loading it. '
            'see https://hacks.mozilla.org/2015/02/synchronous-execution-and-filesystem-access-in-emscripten/'
            % (start / (1024 * 1024)), file=sys.stderr)

    create_preloaded = '''
          Module['FS_createPreloadedFile'](this.name, null, byteArray, true, true, function() {
            Module['removeRunDependency']('fp ' + that.name);
          }, function() {
            if (that.audio) {
              Module['removeRunDependency']('fp ' + that.name); // workaround for chromium bug 124926 (still no audio with this, but at least we don't hang)
            } else {
              err('Preloading file ' + that.name + ' failed');
            }
          }, false, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change
  '''
    create_data = '''
          Module['FS_createDataFile'](this.name, null, byteArray, true, true, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change
          Module['removeRunDependency']('fp ' + that.name);
  '''

    if not lz4:
        # Data requests - for getting a block of data out of the big archive - have
        # a similar API to XHRs
        code += '''
          /** @constructor */
          function DataRequest(start, end, audio) {
            this.start = start;
            this.end = end;
            this.audio = audio;
          }
          DataRequest.prototype = {
            requests: {},
            open: function(mode, name) {
              this.name = name;
              this.requests[name] = this;
              Module['addRunDependency']('fp ' + this.name);
            },
            send: function() {},
            onload: function() {
              var byteArray = this.byteArray.subarray(this.start, this.end);
              this.finish(byteArray);
            },
            finish: function(byteArray) {
              var that = this;
      %s
              this.requests[this.name] = null;
            }
          };
      %s
        ''' % (create_preloaded if use_preload_plugins else create_data, '''
              var files = metadata['files'];
              for (var i = 0; i < files.length; ++i) {
                new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio'] || 0).open('GET', files[i]['filename']);
              }
      ''')

  counter = 0
  for file_ in data_files:
    filename = file_['dstpath']
    dirname = os.path.dirname(filename)
    basename = os.path.basename(filename)
    if file_['mode'] == 'embed':
      # Embed
      data = base64_encode(utils.read_binary(file_['srcpath']))
      code += '''var fileData%d = '%s';\n''' % (counter, data)
      code += ('''Module['FS_createDataFile']('%s', '%s', decodeBase64(fileData%d), true, true, false);\n'''
               % (dirname, basename, counter))
      counter += 1
    elif file_['mode'] == 'preload':
      # Preload
      counter += 1

      metadata_el = {
        'filename': file_['dstpath'],
        'start': file_['data_start'],
        'end': file_['data_end'],
      }
      if filename[-4:] in AUDIO_SUFFIXES:
        metadata_el['audio'] = 1

      metadata['files'].append(metadata_el)
    else:
      assert 0

  if has_preloaded:
    if not lz4:
      # Get the big archive and split it up
      use_data = '''
          // Reuse the bytearray from the XHR as the source for file reads.
          DataRequest.prototype.byteArray = byteArray;
    '''
      use_data += '''
            var files = metadata['files'];
            for (var i = 0; i < files.length; ++i) {
              DataRequest.prototype.requests[files[i].filename].onload();
            }
      '''
      use_data += ("          Module['removeRunDependency']('datafile_%s');\n"
                   % shared.JS.escape_for_js_string(data_target))

    else:
      # LZ4FS usage
      temp = data_target + '.orig'
      shutil.move(data_target, temp)
      meta = shared.run_js_tool(utils.path_from_root('tools/lz4-compress.js'),
                                [utils.path_from_root('third_party/mini-lz4.js'),
                                temp, data_target], stdout=PIPE)
      os.unlink(temp)
      use_data = '''
            var compressedData = %s;
            compressedData['data'] = byteArray;
            assert(typeof Module['LZ4'] === 'object', 'LZ4 not present - was your app build with  -s LZ4=1  ?');
            Module['LZ4'].loadPackage({ 'metadata': metadata, 'compressedData': compressedData }, %s);
            Module['removeRunDependency']('datafile_%s');
      ''' % (meta, "true" if use_preload_plugins else "false", shared.JS.escape_for_js_string(data_target))

    package_uuid = uuid.uuid4()
    package_name = data_target
    remote_package_size = os.path.getsize(package_name)
    remote_package_name = os.path.basename(package_name)
    ret += r'''
      var PACKAGE_PATH = '';
      if (typeof window === 'object') {
        PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.toString().substring(0, window.location.pathname.toString().lastIndexOf('/')) + '/');
      } else if (typeof process === 'undefined' && typeof location !== 'undefined') {
        // web worker
        PACKAGE_PATH = encodeURIComponent(location.pathname.toString().substring(0, location.pathname.toString().lastIndexOf('/')) + '/');
      }
      var PACKAGE_NAME = '%s';
      var REMOTE_PACKAGE_BASE = '%s';
      if (typeof Module['locateFilePackage'] === 'function' && !Module['locateFile']) {
        Module['locateFile'] = Module['locateFilePackage'];
        err('warning: you defined Module.locateFilePackage, that has been renamed to Module.locateFile (using your locateFilePackage for now)');
      }
      var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;
    ''' % (shared.JS.escape_for_js_string(data_target),
           shared.JS.escape_for_js_string(remote_package_name))
    metadata['remote_package_size'] = remote_package_size
    metadata['package_uuid'] = str(package_uuid)
    ret += '''
      var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
      var PACKAGE_UUID = metadata['package_uuid'];
    '''

    if use_preload_cache:
      code += r'''
        var indexedDB;
        if (typeof window === 'object') {
          indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
        } else if (typeof location !== 'undefined') {
          // worker
          indexedDB = self.indexedDB;
        } else {
          throw 'using IndexedDB to cache data can only be done on a web page or in a web worker';
        }
        var IDB_RO = "readonly";
        var IDB_RW = "readwrite";
        var DB_NAME = "''' + indexeddb_name + '''";
        var DB_VERSION = 1;
        var METADATA_STORE_NAME = 'METADATA';
        var PACKAGE_STORE_NAME = 'PACKAGES';
        function openDatabase(callback, errback) {
          try {
            var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
          } catch (e) {
            return errback(e);
          }
          openRequest.onupgradeneeded = function(event) {
            var db = event.target.result;

            if(db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
              db.deleteObjectStore(PACKAGE_STORE_NAME);
            }
            var packages = db.createObjectStore(PACKAGE_STORE_NAME);

            if(db.objectStoreNames.contains(METADATA_STORE_NAME)) {
              db.deleteObjectStore(METADATA_STORE_NAME);
            }
            var metadata = db.createObjectStore(METADATA_STORE_NAME);
          };
          openRequest.onsuccess = function(event) {
            var db = event.target.result;
            callback(db);
          };
          openRequest.onerror = function(error) {
            errback(error);
          };
        };

        // This is needed as chromium has a limit on per-entry files in IndexedDB
        // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
        // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
        // We set the chunk size to 64MB to stay well-below the limit
        var CHUNK_SIZE = 64 * 1024 * 1024;

        function cacheRemotePackage(
          db,
          packageName,
          packageData,
          packageMeta,
          callback,
          errback
        ) {
          var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
          var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
          var chunkSliceStart = 0;
          var nextChunkSliceStart = 0;
          var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
          var finishedChunks = 0;
          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            nextChunkSliceStart += CHUNK_SIZE;
            var putPackageRequest = packages.put(
              packageData.slice(chunkSliceStart, nextChunkSliceStart),
              'package/' + packageName + '/' + chunkId
            );
            chunkSliceStart = nextChunkSliceStart;
            putPackageRequest.onsuccess = function(event) {
              finishedChunks++;
              if (finishedChunks == chunkCount) {
                var transaction_metadata = db.transaction(
                  [METADATA_STORE_NAME],
                  IDB_RW
                );
                var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
                var putMetadataRequest = metadata.put(
                  {
                    'uuid': packageMeta.uuid,
                    'chunkCount': chunkCount
                  },
                  'metadata/' + packageName
                );
                putMetadataRequest.onsuccess = function(event) {
                  callback(packageData);
                };
                putMetadataRequest.onerror = function(error) {
                  errback(error);
                };
              }
            };
            putPackageRequest.onerror = function(error) {
              errback(error);
            };
          }
        }

        /* Check if there's a cached package, and if so whether it's the latest available */
        function checkCachedPackage(db, packageName, callback, errback) {
          var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
          var metadata = transaction.objectStore(METADATA_STORE_NAME);
          var getRequest = metadata.get('metadata/' + packageName);
          getRequest.onsuccess = function(event) {
            var result = event.target.result;
            if (!result) {
              return callback(false, null);
            } else {
              return callback(PACKAGE_UUID === result['uuid'], result);
            }
          };
          getRequest.onerror = function(error) {
            errback(error);
          };
        }

        function fetchCachedPackage(db, packageName, metadata, callback, errback) {
          var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
          var packages = transaction.objectStore(PACKAGE_STORE_NAME);

          var chunksDone = 0;
          var totalSize = 0;
          var chunkCount = metadata['chunkCount'];
          var chunks = new Array(chunkCount);

          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            var getRequest = packages.get('package/' + packageName + '/' + chunkId);
            getRequest.onsuccess = function(event) {
              // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
              if (chunkCount == 1) {
                callback(event.target.result);
              } else {
                chunksDone++;
                totalSize += event.target.result.byteLength;
                chunks.push(event.target.result);
                if (chunksDone == chunkCount) {
                  if (chunksDone == 1) {
                    callback(event.target.result);
                  } else {
                    var tempTyped = new Uint8Array(totalSize);
                    var byteOffset = 0;
                    for (var chunkId in chunks) {
                      var buffer = chunks[chunkId];
                      tempTyped.set(new Uint8Array(buffer), byteOffset);
                      byteOffset += buffer.byteLength;
                      buffer = undefined;
                    }
                    chunks = undefined;
                    callback(tempTyped.buffer);
                    tempTyped = undefined;
                  }
                }
              }
            };
            getRequest.onerror = function(error) {
              errback(error);
            };
          }
        }
      '''

    # add Node.js support code, if necessary
    node_support_code = ''
    if support_node:
      node_support_code = r'''
        if (typeof process === 'object' && typeof process.versions === 'object' && typeof process.versions.node === 'string') {
          require('fs').readFile(packageName, function(err, contents) {
            if (err) {
              errback(err);
            } else {
              callback(contents.buffer);
            }
          });
          return;
        }
      '''
    ret += r'''
      function fetchRemotePackage(packageName, packageSize, callback, errback) {
        %(node_support_code)s
        var xhr = new XMLHttpRequest();
        xhr.open('GET', packageName, true);
        xhr.responseType = 'arraybuffer';
        xhr.onprogress = function(event) {
          var url = packageName;
          var size = packageSize;
          if (event.total) size = event.total;
          if (event.loaded) {
            if (!xhr.addedTotal) {
              xhr.addedTotal = true;
              if (!Module.dataFileDownloads) Module.dataFileDownloads = {};
              Module.dataFileDownloads[url] = {
                loaded: event.loaded,
                total: size
              };
            } else {
              Module.dataFileDownloads[url].loaded = event.loaded;
            }
            var total = 0;
            var loaded = 0;
            var num = 0;
            for (var download in Module.dataFileDownloads) {
            var data = Module.dataFileDownloads[download];
              total += data.total;
              loaded += data.loaded;
              num++;
            }
            total = Math.ceil(total * Module.expectedDataFileDownloads/num);
            if (Module['setStatus']) Module['setStatus']('Downloading data... (' + loaded + '/' + total + ')');
          } else if (!Module.dataFileDownloads) {
            if (Module['setStatus']) Module['setStatus']('Downloading data...');
          }
        };
        xhr.onerror = function(event) {
          throw new Error("NetworkError for: " + packageName);
        }
        xhr.onload = function(event) {
          if (xhr.status == 200 || xhr.status == 304 || xhr.status == 206 || (xhr.status == 0 && xhr.response)) { // file URLs can return 0
            var packageData = xhr.response;
            callback(packageData);
          } else {
            throw new Error(xhr.statusText + " : " + xhr.responseURL);
          }
        };
        xhr.send(null);
      };

      function handleError(error) {
        console.error('package error:', error);
      };
    ''' % {'node_support_code': node_support_code}

    code += r'''
      function processPackageData(arrayBuffer) {
        assert(arrayBuffer, 'Loading data file failed.');
        assert(arrayBuffer instanceof ArrayBuffer, 'bad input to processPackageData');
        var byteArray = new Uint8Array(arrayBuffer);
        var curr;
        %s
      };
      Module['addRunDependency']('datafile_%s');
    ''' % (use_data, shared.JS.escape_for_js_string(data_target))
    # use basename because from the browser's point of view,
    # we need to find the datafile in the same dir as the html file

    code += r'''
      if (!Module.preloadResults) Module.preloadResults = {};
    '''

    if use_preload_cache:
      code += r'''
        function preloadFallback(error) {
          console.error(error);
          console.error('falling back to default preload behavior');
          fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
        };

        openDatabase(
          function(db) {
            checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
              function(useCached, metadata) {
                Module.preloadResults[PACKAGE_NAME] = {fromCache: useCached};
                if (useCached) {
                  fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
                } else {
                  fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
                    function(packageData) {
                      cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
                        function(error) {
                          console.error(error);
                          processPackageData(packageData);
                        });
                    }
                  , preloadFallback);
                }
              }
            , preloadFallback);
          }
        , preloadFallback);

        if (Module['setStatus']) Module['setStatus']('Downloading...');
      '''
    else:
      # Not using preload cache, so we might as well start the xhr ASAP,
      # potentially before JS parsing of the main codebase if it's after us.
      # Only tricky bit is the fetch is async, but also when runWithFS is called
      # is async, so we handle both orderings.
      ret += r'''
        var fetchedCallback = null;
        var fetched = Module['getPreloadedPackage'] ? Module['getPreloadedPackage'](REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE) : null;

        if (!fetched) fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, function(data) {
          if (fetchedCallback) {
            fetchedCallback(data);
            fetchedCallback = null;
          } else {
            fetched = data;
          }
        }, handleError);
      '''

      code += r'''
        Module.preloadResults[PACKAGE_NAME] = {fromCache: false};
        if (fetched) {
          processPackageData(fetched);
          fetched = null;
        } else {
          fetchedCallback = processPackageData;
        }
      '''

  ret += '''
    function runWithFS() {
  '''
  ret += code
  ret += '''
    }
    if (Module['calledRun']) {
      runWithFS();
    } else {
      if (!Module['preRun']) Module['preRun'] = [];
      Module["preRun"].push(runWithFS); // FS is not initialized yet, wait for it
    }
  '''

  if separate_metadata:
      _metadata_template = '''
    Module['removeRunDependency']('%(metadata_file)s');
   }

   function runMetaWithFS() {
    Module['addRunDependency']('%(metadata_file)s');
    var REMOTE_METADATA_NAME = Module['locateFile'] ? Module['locateFile']('%(metadata_file)s', '') : '%(metadata_file)s';
    var xhr = new XMLHttpRequest();
    xhr.onreadystatechange = function() {
     if (xhr.readyState === 4 && xhr.status === 200) {
       loadPackage(JSON.parse(xhr.responseText));
     }
    }
    xhr.open('GET', REMOTE_METADATA_NAME, true);
    xhr.overrideMimeType('application/json');
    xhr.send(null);
   }

   if (Module['calledRun']) {
    runMetaWithFS();
   } else {
    if (!Module['preRun']) Module['preRun'] = [];
    Module["preRun"].push(runMetaWithFS);
   }
  ''' % {'metadata_file': os.path.basename(jsoutput + '.metadata')}

  else:
      _metadata_template = '''
   }
   loadPackage(%s);
  ''' % json.dumps(metadata)

  ret += '''%s
  })();
  ''' % _metadata_template

  if force or len(data_files):
    if jsoutput is None:
      print(ret)
    else:
      # Overwrite the old jsoutput file (if exists) only when its content
      # differs from the current generated one, otherwise leave the file
      # untouched preserving its old timestamp
      if os.path.isfile(jsoutput):
        with open(jsoutput) as f:
          old = f.read()
        if old != ret:
          with open(jsoutput, 'w') as f:
            f.write(ret)
      else:
        with open(jsoutput, 'w') as f:
          f.write(ret)
      if separate_metadata:
        with open(jsoutput + '.metadata', 'w') as f:
          json.dump(metadata, f, separators=(',', ':'))

  return 0
Exemplo n.º 16
0
def generate_js(data_target, data_files, metadata):
    # emcc will add this to the output itself, so it is only needed for
    # standalone calls
    if options.from_emcc:
        ret = ''
    else:
        ret = '''
  var Module = typeof %(EXPORT_NAME)s !== 'undefined' ? %(EXPORT_NAME)s : {};\n''' % {
            "EXPORT_NAME": options.export_name
        }

    ret += '''
  if (!Module.expectedDataFileDownloads) {
    Module.expectedDataFileDownloads = 0;
  }

  Module.expectedDataFileDownloads++;
  (function() {
    // When running as a pthread, FS operations are proxied to the main thread, so we don't need to
    // fetch the .data bundle on the worker
    if (Module['ENVIRONMENT_IS_PTHREAD']) return;
    var loadPackage = function(metadata) {\n'''

    code = '''
      function assert(check, msg) {
        if (!check) throw msg + new Error().stack;
      }\n'''

    # Set up folders
    partial_dirs = []
    for file_ in data_files:
        dirname = os.path.dirname(file_.dstpath)
        dirname = dirname.lstrip(
            '/')  # absolute paths start with '/', remove that
        if dirname != '':
            parts = dirname.split('/')
            for i in range(len(parts)):
                partial = '/'.join(parts[:i + 1])
                if partial not in partial_dirs:
                    code += (
                        '''Module['FS_createPath'](%s, %s, true, true);\n''' %
                        (json.dumps('/' + '/'.join(parts[:i])),
                         json.dumps(parts[i])))
                    partial_dirs.append(partial)

    if options.has_preloaded:
        # Bundle all datafiles into one archive. Avoids doing lots of simultaneous
        # XHRs which has overhead.
        start = 0
        with open(data_target, 'wb') as data:
            for file_ in data_files:
                file_.data_start = start
                with open(file_.srcpath, 'rb') as f:
                    curr = f.read()
                file_.data_end = start + len(curr)
                if AV_WORKAROUND:
                    curr += '\x00'
                start += len(curr)
                data.write(curr)

        if start > 256 * 1024 * 1024:
            err('warning: file packager is creating an asset bundle of %d MB. '
                'this is very large, and browsers might have trouble loading it. '
                'see https://hacks.mozilla.org/2015/02/synchronous-execution-and-filesystem-access-in-emscripten/'
                % (start / (1024 * 1024)))

        create_preloaded = '''
          Module['FS_createPreloadedFile'](this.name, null, byteArray, true, true, function() {
            Module['removeRunDependency']('fp ' + that.name);
          }, function() {
            if (that.audio) {
              Module['removeRunDependency']('fp ' + that.name); // workaround for chromium bug 124926 (still no audio with this, but at least we don't hang)
            } else {
              err('Preloading file ' + that.name + ' failed');
            }
          }, false, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change\n'''
        create_data = '''// canOwn this data in the filesystem, it is a slide into the heap that will never change
          Module['FS_createDataFile'](this.name, null, byteArray, true, true, true);
          Module['removeRunDependency']('fp ' + that.name);'''

        if not options.lz4:
            # Data requests - for getting a block of data out of the big archive - have
            # a similar API to XHRs
            code += '''
      /** @constructor */
      function DataRequest(start, end, audio) {
        this.start = start;
        this.end = end;
        this.audio = audio;
      }
      DataRequest.prototype = {
        requests: {},
        open: function(mode, name) {
          this.name = name;
          this.requests[name] = this;
          Module['addRunDependency']('fp ' + this.name);
        },
        send: function() {},
        onload: function() {
          var byteArray = this.byteArray.subarray(this.start, this.end);
          this.finish(byteArray);
        },
        finish: function(byteArray) {
          var that = this;
          %s
          this.requests[this.name] = null;
        }
      };

      var files = metadata['files'];
      for (var i = 0; i < files.length; ++i) {
        new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio'] || 0).open('GET', files[i]['filename']);
      }\n''' % (create_preloaded
                if options.use_preload_plugins else create_data)

    if options.has_embedded and not options.obj_output:
        err('--obj-output is recommended when using --embed.  This outputs an object file for linking directly into your application is more effecient than JS encoding'
            )

    for counter, file_ in enumerate(data_files):
        filename = file_.dstpath
        dirname = os.path.dirname(filename)
        basename = os.path.basename(filename)
        if file_.mode == 'embed':
            if not options.obj_output:
                # Embed (only needed when not generating object file output)
                data = base64_encode(utils.read_binary(file_.srcpath))
                code += "      var fileData%d = '%s';\n" % (counter, data)
                # canOwn this data in the filesystem (i.e. there is no need to create a copy in the FS layer).
                code += (
                    "      Module['FS_createDataFile']('%s', '%s', decodeBase64(fileData%d), true, true, true);\n"
                    % (dirname, basename, counter))
        elif file_.mode == 'preload':
            # Preload
            metadata_el = {
                'filename': file_.dstpath,
                'start': file_.data_start,
                'end': file_.data_end,
            }
            if filename[-4:] in AUDIO_SUFFIXES:
                metadata_el['audio'] = 1

            metadata['files'].append(metadata_el)
        else:
            assert 0

    if options.has_preloaded:
        if not options.lz4:
            # Get the big archive and split it up
            use_data = '''// Reuse the bytearray from the XHR as the source for file reads.
          DataRequest.prototype.byteArray = byteArray;
          var files = metadata['files'];
          for (var i = 0; i < files.length; ++i) {
            DataRequest.prototype.requests[files[i].filename].onload();
          }'''
            use_data += (
                "          Module['removeRunDependency']('datafile_%s');\n" %
                js_manipulation.escape_for_js_string(data_target))

        else:
            # LZ4FS usage
            temp = data_target + '.orig'
            shutil.move(data_target, temp)
            meta = shared.run_js_tool(
                utils.path_from_root('tools/lz4-compress.js'), [
                    utils.path_from_root('third_party/mini-lz4.js'), temp,
                    data_target
                ],
                stdout=PIPE)
            os.unlink(temp)
            use_data = '''var compressedData = %s;
            compressedData['data'] = byteArray;
            assert(typeof Module['LZ4'] === 'object', 'LZ4 not present - was your app build with -sLZ4?');
            Module['LZ4'].loadPackage({ 'metadata': metadata, 'compressedData': compressedData }, %s);
            Module['removeRunDependency']('datafile_%s');''' % (
                meta, "true" if options.use_preload_plugins else "false",
                js_manipulation.escape_for_js_string(data_target))

        package_name = data_target
        remote_package_size = os.path.getsize(package_name)
        remote_package_name = os.path.basename(package_name)
        ret += '''
      var PACKAGE_PATH = '';
      if (typeof window === 'object') {
        PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.toString().substring(0, window.location.pathname.toString().lastIndexOf('/')) + '/');
      } else if (typeof process === 'undefined' && typeof location !== 'undefined') {
        // web worker
        PACKAGE_PATH = encodeURIComponent(location.pathname.toString().substring(0, location.pathname.toString().lastIndexOf('/')) + '/');
      }
      var PACKAGE_NAME = '%s';
      var REMOTE_PACKAGE_BASE = '%s';
      if (typeof Module['locateFilePackage'] === 'function' && !Module['locateFile']) {
        Module['locateFile'] = Module['locateFilePackage'];
        err('warning: you defined Module.locateFilePackage, that has been renamed to Module.locateFile (using your locateFilePackage for now)');
      }
      var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;\n''' % (
            js_manipulation.escape_for_js_string(data_target),
            js_manipulation.escape_for_js_string(remote_package_name))
        metadata['remote_package_size'] = remote_package_size
        ret += '''var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];\n'''

        if options.use_preload_cache:
            # Set the id to a hash of the preloaded data, so that caches survive over multiple builds
            # if the data has not changed.
            data = utils.read_binary(data_target)
            package_uuid = 'sha256-' + hashlib.sha256(data).hexdigest()
            metadata['package_uuid'] = str(package_uuid)

            code += r'''
        var PACKAGE_UUID = metadata['package_uuid'];
        var indexedDB;
        if (typeof window === 'object') {
          indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
        } else if (typeof location !== 'undefined') {
          // worker
          indexedDB = self.indexedDB;
        } else {
          throw 'using IndexedDB to cache data can only be done on a web page or in a web worker';
        }
        var IDB_RO = "readonly";
        var IDB_RW = "readwrite";
        var DB_NAME = "''' + options.indexeddb_name + '''";
        var DB_VERSION = 1;
        var METADATA_STORE_NAME = 'METADATA';
        var PACKAGE_STORE_NAME = 'PACKAGES';
        function openDatabase(callback, errback) {
          try {
            var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
          } catch (e) {
            return errback(e);
          }
          openRequest.onupgradeneeded = function(event) {
            var db = /** @type {IDBDatabase} */ (event.target.result);

            if (db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
              db.deleteObjectStore(PACKAGE_STORE_NAME);
            }
            var packages = db.createObjectStore(PACKAGE_STORE_NAME);

            if (db.objectStoreNames.contains(METADATA_STORE_NAME)) {
              db.deleteObjectStore(METADATA_STORE_NAME);
            }
            var metadata = db.createObjectStore(METADATA_STORE_NAME);
          };
          openRequest.onsuccess = function(event) {
            var db = /** @type {IDBDatabase} */ (event.target.result);
            callback(db);
          };
          openRequest.onerror = function(error) {
            errback(error);
          };
        };

        // This is needed as chromium has a limit on per-entry files in IndexedDB
        // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
        // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
        // We set the chunk size to 64MB to stay well-below the limit
        var CHUNK_SIZE = 64 * 1024 * 1024;

        function cacheRemotePackage(
          db,
          packageName,
          packageData,
          packageMeta,
          callback,
          errback
        ) {
          var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
          var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
          var chunkSliceStart = 0;
          var nextChunkSliceStart = 0;
          var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
          var finishedChunks = 0;
          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            nextChunkSliceStart += CHUNK_SIZE;
            var putPackageRequest = packages.put(
              packageData.slice(chunkSliceStart, nextChunkSliceStart),
              'package/' + packageName + '/' + chunkId
            );
            chunkSliceStart = nextChunkSliceStart;
            putPackageRequest.onsuccess = function(event) {
              finishedChunks++;
              if (finishedChunks == chunkCount) {
                var transaction_metadata = db.transaction(
                  [METADATA_STORE_NAME],
                  IDB_RW
                );
                var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
                var putMetadataRequest = metadata.put(
                  {
                    'uuid': packageMeta.uuid,
                    'chunkCount': chunkCount
                  },
                  'metadata/' + packageName
                );
                putMetadataRequest.onsuccess = function(event) {
                  callback(packageData);
                };
                putMetadataRequest.onerror = function(error) {
                  errback(error);
                };
              }
            };
            putPackageRequest.onerror = function(error) {
              errback(error);
            };
          }
        }

        /* Check if there's a cached package, and if so whether it's the latest available */
        function checkCachedPackage(db, packageName, callback, errback) {
          var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
          var metadata = transaction.objectStore(METADATA_STORE_NAME);
          var getRequest = metadata.get('metadata/' + packageName);
          getRequest.onsuccess = function(event) {
            var result = event.target.result;
            if (!result) {
              return callback(false, null);
            } else {
              return callback(PACKAGE_UUID === result['uuid'], result);
            }
          };
          getRequest.onerror = function(error) {
            errback(error);
          };
        }

        function fetchCachedPackage(db, packageName, metadata, callback, errback) {
          var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
          var packages = transaction.objectStore(PACKAGE_STORE_NAME);

          var chunksDone = 0;
          var totalSize = 0;
          var chunkCount = metadata['chunkCount'];
          var chunks = new Array(chunkCount);

          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            var getRequest = packages.get('package/' + packageName + '/' + chunkId);
            getRequest.onsuccess = function(event) {
              // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
              if (chunkCount == 1) {
                callback(event.target.result);
              } else {
                chunksDone++;
                totalSize += event.target.result.byteLength;
                chunks.push(event.target.result);
                if (chunksDone == chunkCount) {
                  if (chunksDone == 1) {
                    callback(event.target.result);
                  } else {
                    var tempTyped = new Uint8Array(totalSize);
                    var byteOffset = 0;
                    for (var chunkId in chunks) {
                      var buffer = chunks[chunkId];
                      tempTyped.set(new Uint8Array(buffer), byteOffset);
                      byteOffset += buffer.byteLength;
                      buffer = undefined;
                    }
                    chunks = undefined;
                    callback(tempTyped.buffer);
                    tempTyped = undefined;
                  }
                }
              }
            };
            getRequest.onerror = function(error) {
              errback(error);
            };
          }
        }\n'''

        # add Node.js support code, if necessary
        node_support_code = ''
        if options.support_node:
            node_support_code = '''
        if (typeof process === 'object' && typeof process.versions === 'object' && typeof process.versions.node === 'string') {
          require('fs').readFile(packageName, function(err, contents) {
            if (err) {
              errback(err);
            } else {
              callback(contents.buffer);
            }
          });
          return;
        }'''.strip()
        ret += '''
      function fetchRemotePackage(packageName, packageSize, callback, errback) {
        %(node_support_code)s
        var xhr = new XMLHttpRequest();
        xhr.open('GET', packageName, true);
        xhr.responseType = 'arraybuffer';
        xhr.onprogress = function(event) {
          var url = packageName;
          var size = packageSize;
          if (event.total) size = event.total;
          if (event.loaded) {
            if (!xhr.addedTotal) {
              xhr.addedTotal = true;
              if (!Module.dataFileDownloads) Module.dataFileDownloads = {};
              Module.dataFileDownloads[url] = {
                loaded: event.loaded,
                total: size
              };
            } else {
              Module.dataFileDownloads[url].loaded = event.loaded;
            }
            var total = 0;
            var loaded = 0;
            var num = 0;
            for (var download in Module.dataFileDownloads) {
            var data = Module.dataFileDownloads[download];
              total += data.total;
              loaded += data.loaded;
              num++;
            }
            total = Math.ceil(total * Module.expectedDataFileDownloads/num);
            if (Module['setStatus']) Module['setStatus']('Downloading data... (' + loaded + '/' + total + ')');
          } else if (!Module.dataFileDownloads) {
            if (Module['setStatus']) Module['setStatus']('Downloading data...');
          }
        };
        xhr.onerror = function(event) {
          throw new Error("NetworkError for: " + packageName);
        }
        xhr.onload = function(event) {
          if (xhr.status == 200 || xhr.status == 304 || xhr.status == 206 || (xhr.status == 0 && xhr.response)) { // file URLs can return 0
            var packageData = xhr.response;
            callback(packageData);
          } else {
            throw new Error(xhr.statusText + " : " + xhr.responseURL);
          }
        };
        xhr.send(null);
      };

      function handleError(error) {
        console.error('package error:', error);
      };\n''' % {
            'node_support_code': node_support_code
        }

        code += '''
      function processPackageData(arrayBuffer) {
        assert(arrayBuffer, 'Loading data file failed.');
        assert(arrayBuffer instanceof ArrayBuffer, 'bad input to processPackageData');
        var byteArray = new Uint8Array(arrayBuffer);
        var curr;
        %s
      };
      Module['addRunDependency']('datafile_%s');\n''' % (
            use_data, js_manipulation.escape_for_js_string(data_target))
        # use basename because from the browser's point of view,
        # we need to find the datafile in the same dir as the html file

        code += '''
      if (!Module.preloadResults) Module.preloadResults = {};\n'''

        if options.use_preload_cache:
            code += '''
        function preloadFallback(error) {
          console.error(error);
          console.error('falling back to default preload behavior');
          fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
        };

        openDatabase(
          function(db) {
            checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
              function(useCached, metadata) {
                Module.preloadResults[PACKAGE_NAME] = {fromCache: useCached};
                if (useCached) {
                  fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
                } else {
                  fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
                    function(packageData) {
                      cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
                        function(error) {
                          console.error(error);
                          processPackageData(packageData);
                        });
                    }
                  , preloadFallback);
                }
              }
            , preloadFallback);
          }
        , preloadFallback);

        if (Module['setStatus']) Module['setStatus']('Downloading...');\n'''
        else:
            # Not using preload cache, so we might as well start the xhr ASAP,
            # potentially before JS parsing of the main codebase if it's after us.
            # Only tricky bit is the fetch is async, but also when runWithFS is called
            # is async, so we handle both orderings.
            ret += '''
      var fetchedCallback = null;
      var fetched = Module['getPreloadedPackage'] ? Module['getPreloadedPackage'](REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE) : null;

      if (!fetched) fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, function(data) {
        if (fetchedCallback) {
          fetchedCallback(data);
          fetchedCallback = null;
        } else {
          fetched = data;
        }
      }, handleError);\n'''

            code += '''
      Module.preloadResults[PACKAGE_NAME] = {fromCache: false};
      if (fetched) {
        processPackageData(fetched);
        fetched = null;
      } else {
        fetchedCallback = processPackageData;
      }\n'''

    ret += '''
    function runWithFS() {\n'''
    ret += code
    ret += '''
    }
    if (Module['calledRun']) {
      runWithFS();
    } else {
      if (!Module['preRun']) Module['preRun'] = [];
      Module["preRun"].push(runWithFS); // FS is not initialized yet, wait for it
    }\n'''

    if options.separate_metadata:
        _metadata_template = '''
    Module['removeRunDependency']('%(metadata_file)s');
  }

  function runMetaWithFS() {
    Module['addRunDependency']('%(metadata_file)s');
    var REMOTE_METADATA_NAME = Module['locateFile'] ? Module['locateFile']('%(metadata_file)s', '') : '%(metadata_file)s';
    var xhr = new XMLHttpRequest();
    xhr.onreadystatechange = function() {
     if (xhr.readyState === 4 && xhr.status === 200) {
       loadPackage(JSON.parse(xhr.responseText));
     }
    }
    xhr.open('GET', REMOTE_METADATA_NAME, true);
    xhr.overrideMimeType('application/json');
    xhr.send(null);
  }

  if (Module['calledRun']) {
    runMetaWithFS();
  } else {
    if (!Module['preRun']) Module['preRun'] = [];
    Module["preRun"].push(runMetaWithFS);
  }\n''' % {
            'metadata_file': os.path.basename(options.jsoutput + '.metadata')
        }

    else:
        _metadata_template = '''
    }
    loadPackage(%s);\n''' % json.dumps(metadata)

    ret += '''%s
  })();\n''' % _metadata_template

    return ret
Exemplo n.º 17
0
    shared.try_delete(filename)
    try:
        shared.run_process(
            [COMP, '-m32', opts, fullname, '-o', filename + '1'] +
            CSMITH_CFLAGS + ['-w'])  # + shared.get_cflags()
    except CalledProcessError:
        print('Failed to compile natively using clang')
        notes['invalid'] += 1
        continue

    shared.run_process([
        COMP, '-m32', opts, '-emit-llvm', '-c', fullname, '-o', filename +
        '.bc'
    ] + CSMITH_CFLAGS + shared.get_cflags() + ['-w'])
    shared.run_process(
        [utils.path_from_root('tools/nativize_llvm.py'), filename + '.bc'],
        stderr=PIPE)
    shutil.move(filename + '.bc.run', filename + '2')
    shared.run_process([COMP, fullname, '-o', filename + '3'] + CSMITH_CFLAGS +
                       ['-w'])
    print('3) Run natively')
    try:
        correct1 = shared.timeout_run(
            Popen([filename + '1'], stdout=PIPE, stderr=PIPE), 3)
        if 'Segmentation fault' in correct1 or len(correct1) < 10:
            raise Exception('segfault')
        correct2 = shared.timeout_run(
            Popen([filename + '2'], stdout=PIPE, stderr=PIPE), 3)
        if 'Segmentation fault' in correct2 or len(correct2) < 10:
            raise Exception('segfault')
        correct3 = shared.timeout_run(