Example #1
0
def emscript(in_wasm, out_wasm, outfile_js, memfile, DEBUG):
    # Overview:
    #   * Run wasm-emscripten-finalize to extract metadata and modify the binary
    #     to use emscripten's wasm<->JS ABI
    #   * Use the metadata to generate the JS glue that goes with the wasm

    if settings.SINGLE_FILE:
        # placeholder strings for JS glue, to be replaced with subresource locations in do_binaryen
        settings.WASM_BINARY_FILE = '<<< WASM_BINARY_FILE >>>'
    else:
        # set file locations, so that JS glue can find what it needs
        settings.WASM_BINARY_FILE = js_manipulation.escape_for_js_string(
            os.path.basename(out_wasm))

    metadata = finalize_wasm(in_wasm, out_wasm, memfile, DEBUG)

    update_settings_glue(metadata, DEBUG)

    if settings.SIDE_MODULE:
        if metadata['asmConsts']:
            exit_with_error('EM_ASM is not supported in side modules')
        if metadata['emJsFuncs']:
            exit_with_error('EM_JS is not supported in side modules')
        logger.debug('emscript: skipping remaining js glue generation')
        return

    if DEBUG:
        logger.debug('emscript: js compiler glue')
        t = time.time()

    # memory and global initializers

    if settings.RELOCATABLE:
        dylink_sec = webassembly.parse_dylink_section(in_wasm)
        static_bump = align_memory(dylink_sec.mem_size)
        set_memory(static_bump)
        logger.debug('stack_base: %d, stack_max: %d, heap_base: %d',
                     settings.STACK_BASE, settings.STACK_MAX,
                     settings.HEAP_BASE)

        # When building relocatable output (e.g. MAIN_MODULE) the reported table
        # size does not include the reserved slot at zero for the null pointer.
        # So we need to offset the elements by 1.
        if settings.INITIAL_TABLE == -1:
            settings.INITIAL_TABLE = dylink_sec.table_size + 1

    glue, forwarded_data = compile_settings()
    if DEBUG:
        logger.debug('  emscript: glue took %s seconds' % (time.time() - t))
        t = time.time()

    forwarded_json = json.loads(forwarded_data)

    pre, post = glue.split('// EMSCRIPTEN_END_FUNCS')

    exports = metadata['exports']

    if settings.ASYNCIFY:
        exports += [
            'asyncify_start_unwind', 'asyncify_stop_unwind',
            'asyncify_start_rewind', 'asyncify_stop_rewind'
        ]

    report_missing_symbols(forwarded_json['libraryFunctions'])

    if not outfile_js:
        logger.debug('emscript: skipping remaining js glue generation')
        return

    if settings.MINIMAL_RUNTIME:
        # In MINIMAL_RUNTIME, atinit exists in the postamble part
        post = apply_static_code_hooks(forwarded_json, post)
    else:
        # In regular runtime, atinits etc. exist in the preamble part
        pre = apply_static_code_hooks(forwarded_json, pre)

    asm_consts = create_asm_consts(metadata)
    em_js_funcs = create_em_js(metadata)
    asm_const_pairs = ['%s: %s' % (key, value) for key, value in asm_consts]
    asm_const_map = 'var ASM_CONSTS = {\n  ' + ',  \n '.join(
        asm_const_pairs) + '\n};\n'
    pre = pre.replace('// === Body ===',
                      ('// === Body ===\n\n' + asm_const_map +
                       '\n'.join(em_js_funcs) + '\n'))

    with open(outfile_js, 'w') as out:
        out.write(normalize_line_endings(pre))
        pre = None

        invoke_funcs = metadata['invokeFuncs']
        sending = create_sending(invoke_funcs, metadata)
        receiving = create_receiving(exports)

        if settings.MINIMAL_RUNTIME:
            if settings.DECLARE_ASM_MODULE_EXPORTS:
                post = compute_minimal_runtime_initializer_and_exports(
                    post, exports, receiving)
            receiving = ''

        module = create_module(sending, receiving, invoke_funcs, metadata)

        write_output_file(out, module)

        out.write(normalize_line_endings(post))
        module = None
Example #2
0
def generate_js(data_target, data_files, metadata):
    # emcc will add this to the output itself, so it is only needed for
    # standalone calls
    if options.from_emcc:
        ret = ''
    else:
        ret = '''
  var Module = typeof %(EXPORT_NAME)s !== 'undefined' ? %(EXPORT_NAME)s : {};\n''' % {
            "EXPORT_NAME": options.export_name
        }

    ret += '''
  if (!Module.expectedDataFileDownloads) {
    Module.expectedDataFileDownloads = 0;
  }

  Module.expectedDataFileDownloads++;
  (function() {
    // When running as a pthread, FS operations are proxied to the main thread, so we don't need to
    // fetch the .data bundle on the worker
    if (Module['ENVIRONMENT_IS_PTHREAD']) return;
    var loadPackage = function(metadata) {\n'''

    code = '''
      function assert(check, msg) {
        if (!check) throw msg + new Error().stack;
      }\n'''

    # Set up folders
    partial_dirs = []
    for file_ in data_files:
        dirname = os.path.dirname(file_.dstpath)
        dirname = dirname.lstrip(
            '/')  # absolute paths start with '/', remove that
        if dirname != '':
            parts = dirname.split('/')
            for i in range(len(parts)):
                partial = '/'.join(parts[:i + 1])
                if partial not in partial_dirs:
                    code += (
                        '''Module['FS_createPath'](%s, %s, true, true);\n''' %
                        (json.dumps('/' + '/'.join(parts[:i])),
                         json.dumps(parts[i])))
                    partial_dirs.append(partial)

    if options.has_preloaded:
        # Bundle all datafiles into one archive. Avoids doing lots of simultaneous
        # XHRs which has overhead.
        start = 0
        with open(data_target, 'wb') as data:
            for file_ in data_files:
                file_.data_start = start
                with open(file_.srcpath, 'rb') as f:
                    curr = f.read()
                file_.data_end = start + len(curr)
                if AV_WORKAROUND:
                    curr += '\x00'
                start += len(curr)
                data.write(curr)

        if start > 256 * 1024 * 1024:
            err('warning: file packager is creating an asset bundle of %d MB. '
                'this is very large, and browsers might have trouble loading it. '
                'see https://hacks.mozilla.org/2015/02/synchronous-execution-and-filesystem-access-in-emscripten/'
                % (start / (1024 * 1024)))

        create_preloaded = '''
          Module['FS_createPreloadedFile'](this.name, null, byteArray, true, true, function() {
            Module['removeRunDependency']('fp ' + that.name);
          }, function() {
            if (that.audio) {
              Module['removeRunDependency']('fp ' + that.name); // workaround for chromium bug 124926 (still no audio with this, but at least we don't hang)
            } else {
              err('Preloading file ' + that.name + ' failed');
            }
          }, false, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change\n'''
        create_data = '''// canOwn this data in the filesystem, it is a slide into the heap that will never change
          Module['FS_createDataFile'](this.name, null, byteArray, true, true, true);
          Module['removeRunDependency']('fp ' + that.name);'''

        if not options.lz4:
            # Data requests - for getting a block of data out of the big archive - have
            # a similar API to XHRs
            code += '''
      /** @constructor */
      function DataRequest(start, end, audio) {
        this.start = start;
        this.end = end;
        this.audio = audio;
      }
      DataRequest.prototype = {
        requests: {},
        open: function(mode, name) {
          this.name = name;
          this.requests[name] = this;
          Module['addRunDependency']('fp ' + this.name);
        },
        send: function() {},
        onload: function() {
          var byteArray = this.byteArray.subarray(this.start, this.end);
          this.finish(byteArray);
        },
        finish: function(byteArray) {
          var that = this;
          %s
          this.requests[this.name] = null;
        }
      };

      var files = metadata['files'];
      for (var i = 0; i < files.length; ++i) {
        new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio'] || 0).open('GET', files[i]['filename']);
      }\n''' % (create_preloaded
                if options.use_preload_plugins else create_data)

    if options.has_embedded and not options.obj_output:
        err('--obj-output is recommended when using --embed.  This outputs an object file for linking directly into your application is more effecient than JS encoding'
            )

    for counter, file_ in enumerate(data_files):
        filename = file_.dstpath
        dirname = os.path.dirname(filename)
        basename = os.path.basename(filename)
        if file_.mode == 'embed':
            if not options.obj_output:
                # Embed (only needed when not generating object file output)
                data = base64_encode(utils.read_binary(file_.srcpath))
                code += "      var fileData%d = '%s';\n" % (counter, data)
                # canOwn this data in the filesystem (i.e. there is no need to create a copy in the FS layer).
                code += (
                    "      Module['FS_createDataFile']('%s', '%s', decodeBase64(fileData%d), true, true, true);\n"
                    % (dirname, basename, counter))
        elif file_.mode == 'preload':
            # Preload
            metadata_el = {
                'filename': file_.dstpath,
                'start': file_.data_start,
                'end': file_.data_end,
            }
            if filename[-4:] in AUDIO_SUFFIXES:
                metadata_el['audio'] = 1

            metadata['files'].append(metadata_el)
        else:
            assert 0

    if options.has_preloaded:
        if not options.lz4:
            # Get the big archive and split it up
            use_data = '''// Reuse the bytearray from the XHR as the source for file reads.
          DataRequest.prototype.byteArray = byteArray;
          var files = metadata['files'];
          for (var i = 0; i < files.length; ++i) {
            DataRequest.prototype.requests[files[i].filename].onload();
          }'''
            use_data += (
                "          Module['removeRunDependency']('datafile_%s');\n" %
                js_manipulation.escape_for_js_string(data_target))

        else:
            # LZ4FS usage
            temp = data_target + '.orig'
            shutil.move(data_target, temp)
            meta = shared.run_js_tool(
                utils.path_from_root('tools/lz4-compress.js'), [
                    utils.path_from_root('third_party/mini-lz4.js'), temp,
                    data_target
                ],
                stdout=PIPE)
            os.unlink(temp)
            use_data = '''var compressedData = %s;
            compressedData['data'] = byteArray;
            assert(typeof Module['LZ4'] === 'object', 'LZ4 not present - was your app build with -sLZ4?');
            Module['LZ4'].loadPackage({ 'metadata': metadata, 'compressedData': compressedData }, %s);
            Module['removeRunDependency']('datafile_%s');''' % (
                meta, "true" if options.use_preload_plugins else "false",
                js_manipulation.escape_for_js_string(data_target))

        package_name = data_target
        remote_package_size = os.path.getsize(package_name)
        remote_package_name = os.path.basename(package_name)
        ret += '''
      var PACKAGE_PATH = '';
      if (typeof window === 'object') {
        PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.toString().substring(0, window.location.pathname.toString().lastIndexOf('/')) + '/');
      } else if (typeof process === 'undefined' && typeof location !== 'undefined') {
        // web worker
        PACKAGE_PATH = encodeURIComponent(location.pathname.toString().substring(0, location.pathname.toString().lastIndexOf('/')) + '/');
      }
      var PACKAGE_NAME = '%s';
      var REMOTE_PACKAGE_BASE = '%s';
      if (typeof Module['locateFilePackage'] === 'function' && !Module['locateFile']) {
        Module['locateFile'] = Module['locateFilePackage'];
        err('warning: you defined Module.locateFilePackage, that has been renamed to Module.locateFile (using your locateFilePackage for now)');
      }
      var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;\n''' % (
            js_manipulation.escape_for_js_string(data_target),
            js_manipulation.escape_for_js_string(remote_package_name))
        metadata['remote_package_size'] = remote_package_size
        ret += '''var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];\n'''

        if options.use_preload_cache:
            # Set the id to a hash of the preloaded data, so that caches survive over multiple builds
            # if the data has not changed.
            data = utils.read_binary(data_target)
            package_uuid = 'sha256-' + hashlib.sha256(data).hexdigest()
            metadata['package_uuid'] = str(package_uuid)

            code += r'''
        var PACKAGE_UUID = metadata['package_uuid'];
        var indexedDB;
        if (typeof window === 'object') {
          indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
        } else if (typeof location !== 'undefined') {
          // worker
          indexedDB = self.indexedDB;
        } else {
          throw 'using IndexedDB to cache data can only be done on a web page or in a web worker';
        }
        var IDB_RO = "readonly";
        var IDB_RW = "readwrite";
        var DB_NAME = "''' + options.indexeddb_name + '''";
        var DB_VERSION = 1;
        var METADATA_STORE_NAME = 'METADATA';
        var PACKAGE_STORE_NAME = 'PACKAGES';
        function openDatabase(callback, errback) {
          try {
            var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
          } catch (e) {
            return errback(e);
          }
          openRequest.onupgradeneeded = function(event) {
            var db = /** @type {IDBDatabase} */ (event.target.result);

            if (db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
              db.deleteObjectStore(PACKAGE_STORE_NAME);
            }
            var packages = db.createObjectStore(PACKAGE_STORE_NAME);

            if (db.objectStoreNames.contains(METADATA_STORE_NAME)) {
              db.deleteObjectStore(METADATA_STORE_NAME);
            }
            var metadata = db.createObjectStore(METADATA_STORE_NAME);
          };
          openRequest.onsuccess = function(event) {
            var db = /** @type {IDBDatabase} */ (event.target.result);
            callback(db);
          };
          openRequest.onerror = function(error) {
            errback(error);
          };
        };

        // This is needed as chromium has a limit on per-entry files in IndexedDB
        // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
        // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
        // We set the chunk size to 64MB to stay well-below the limit
        var CHUNK_SIZE = 64 * 1024 * 1024;

        function cacheRemotePackage(
          db,
          packageName,
          packageData,
          packageMeta,
          callback,
          errback
        ) {
          var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
          var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
          var chunkSliceStart = 0;
          var nextChunkSliceStart = 0;
          var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
          var finishedChunks = 0;
          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            nextChunkSliceStart += CHUNK_SIZE;
            var putPackageRequest = packages.put(
              packageData.slice(chunkSliceStart, nextChunkSliceStart),
              'package/' + packageName + '/' + chunkId
            );
            chunkSliceStart = nextChunkSliceStart;
            putPackageRequest.onsuccess = function(event) {
              finishedChunks++;
              if (finishedChunks == chunkCount) {
                var transaction_metadata = db.transaction(
                  [METADATA_STORE_NAME],
                  IDB_RW
                );
                var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
                var putMetadataRequest = metadata.put(
                  {
                    'uuid': packageMeta.uuid,
                    'chunkCount': chunkCount
                  },
                  'metadata/' + packageName
                );
                putMetadataRequest.onsuccess = function(event) {
                  callback(packageData);
                };
                putMetadataRequest.onerror = function(error) {
                  errback(error);
                };
              }
            };
            putPackageRequest.onerror = function(error) {
              errback(error);
            };
          }
        }

        /* Check if there's a cached package, and if so whether it's the latest available */
        function checkCachedPackage(db, packageName, callback, errback) {
          var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
          var metadata = transaction.objectStore(METADATA_STORE_NAME);
          var getRequest = metadata.get('metadata/' + packageName);
          getRequest.onsuccess = function(event) {
            var result = event.target.result;
            if (!result) {
              return callback(false, null);
            } else {
              return callback(PACKAGE_UUID === result['uuid'], result);
            }
          };
          getRequest.onerror = function(error) {
            errback(error);
          };
        }

        function fetchCachedPackage(db, packageName, metadata, callback, errback) {
          var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
          var packages = transaction.objectStore(PACKAGE_STORE_NAME);

          var chunksDone = 0;
          var totalSize = 0;
          var chunkCount = metadata['chunkCount'];
          var chunks = new Array(chunkCount);

          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            var getRequest = packages.get('package/' + packageName + '/' + chunkId);
            getRequest.onsuccess = function(event) {
              // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
              if (chunkCount == 1) {
                callback(event.target.result);
              } else {
                chunksDone++;
                totalSize += event.target.result.byteLength;
                chunks.push(event.target.result);
                if (chunksDone == chunkCount) {
                  if (chunksDone == 1) {
                    callback(event.target.result);
                  } else {
                    var tempTyped = new Uint8Array(totalSize);
                    var byteOffset = 0;
                    for (var chunkId in chunks) {
                      var buffer = chunks[chunkId];
                      tempTyped.set(new Uint8Array(buffer), byteOffset);
                      byteOffset += buffer.byteLength;
                      buffer = undefined;
                    }
                    chunks = undefined;
                    callback(tempTyped.buffer);
                    tempTyped = undefined;
                  }
                }
              }
            };
            getRequest.onerror = function(error) {
              errback(error);
            };
          }
        }\n'''

        # add Node.js support code, if necessary
        node_support_code = ''
        if options.support_node:
            node_support_code = '''
        if (typeof process === 'object' && typeof process.versions === 'object' && typeof process.versions.node === 'string') {
          require('fs').readFile(packageName, function(err, contents) {
            if (err) {
              errback(err);
            } else {
              callback(contents.buffer);
            }
          });
          return;
        }'''.strip()
        ret += '''
      function fetchRemotePackage(packageName, packageSize, callback, errback) {
        %(node_support_code)s
        var xhr = new XMLHttpRequest();
        xhr.open('GET', packageName, true);
        xhr.responseType = 'arraybuffer';
        xhr.onprogress = function(event) {
          var url = packageName;
          var size = packageSize;
          if (event.total) size = event.total;
          if (event.loaded) {
            if (!xhr.addedTotal) {
              xhr.addedTotal = true;
              if (!Module.dataFileDownloads) Module.dataFileDownloads = {};
              Module.dataFileDownloads[url] = {
                loaded: event.loaded,
                total: size
              };
            } else {
              Module.dataFileDownloads[url].loaded = event.loaded;
            }
            var total = 0;
            var loaded = 0;
            var num = 0;
            for (var download in Module.dataFileDownloads) {
            var data = Module.dataFileDownloads[download];
              total += data.total;
              loaded += data.loaded;
              num++;
            }
            total = Math.ceil(total * Module.expectedDataFileDownloads/num);
            if (Module['setStatus']) Module['setStatus']('Downloading data... (' + loaded + '/' + total + ')');
          } else if (!Module.dataFileDownloads) {
            if (Module['setStatus']) Module['setStatus']('Downloading data...');
          }
        };
        xhr.onerror = function(event) {
          throw new Error("NetworkError for: " + packageName);
        }
        xhr.onload = function(event) {
          if (xhr.status == 200 || xhr.status == 304 || xhr.status == 206 || (xhr.status == 0 && xhr.response)) { // file URLs can return 0
            var packageData = xhr.response;
            callback(packageData);
          } else {
            throw new Error(xhr.statusText + " : " + xhr.responseURL);
          }
        };
        xhr.send(null);
      };

      function handleError(error) {
        console.error('package error:', error);
      };\n''' % {
            'node_support_code': node_support_code
        }

        code += '''
      function processPackageData(arrayBuffer) {
        assert(arrayBuffer, 'Loading data file failed.');
        assert(arrayBuffer instanceof ArrayBuffer, 'bad input to processPackageData');
        var byteArray = new Uint8Array(arrayBuffer);
        var curr;
        %s
      };
      Module['addRunDependency']('datafile_%s');\n''' % (
            use_data, js_manipulation.escape_for_js_string(data_target))
        # use basename because from the browser's point of view,
        # we need to find the datafile in the same dir as the html file

        code += '''
      if (!Module.preloadResults) Module.preloadResults = {};\n'''

        if options.use_preload_cache:
            code += '''
        function preloadFallback(error) {
          console.error(error);
          console.error('falling back to default preload behavior');
          fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
        };

        openDatabase(
          function(db) {
            checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
              function(useCached, metadata) {
                Module.preloadResults[PACKAGE_NAME] = {fromCache: useCached};
                if (useCached) {
                  fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
                } else {
                  fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
                    function(packageData) {
                      cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
                        function(error) {
                          console.error(error);
                          processPackageData(packageData);
                        });
                    }
                  , preloadFallback);
                }
              }
            , preloadFallback);
          }
        , preloadFallback);

        if (Module['setStatus']) Module['setStatus']('Downloading...');\n'''
        else:
            # Not using preload cache, so we might as well start the xhr ASAP,
            # potentially before JS parsing of the main codebase if it's after us.
            # Only tricky bit is the fetch is async, but also when runWithFS is called
            # is async, so we handle both orderings.
            ret += '''
      var fetchedCallback = null;
      var fetched = Module['getPreloadedPackage'] ? Module['getPreloadedPackage'](REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE) : null;

      if (!fetched) fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, function(data) {
        if (fetchedCallback) {
          fetchedCallback(data);
          fetchedCallback = null;
        } else {
          fetched = data;
        }
      }, handleError);\n'''

            code += '''
      Module.preloadResults[PACKAGE_NAME] = {fromCache: false};
      if (fetched) {
        processPackageData(fetched);
        fetched = null;
      } else {
        fetchedCallback = processPackageData;
      }\n'''

    ret += '''
    function runWithFS() {\n'''
    ret += code
    ret += '''
    }
    if (Module['calledRun']) {
      runWithFS();
    } else {
      if (!Module['preRun']) Module['preRun'] = [];
      Module["preRun"].push(runWithFS); // FS is not initialized yet, wait for it
    }\n'''

    if options.separate_metadata:
        _metadata_template = '''
    Module['removeRunDependency']('%(metadata_file)s');
  }

  function runMetaWithFS() {
    Module['addRunDependency']('%(metadata_file)s');
    var REMOTE_METADATA_NAME = Module['locateFile'] ? Module['locateFile']('%(metadata_file)s', '') : '%(metadata_file)s';
    var xhr = new XMLHttpRequest();
    xhr.onreadystatechange = function() {
     if (xhr.readyState === 4 && xhr.status === 200) {
       loadPackage(JSON.parse(xhr.responseText));
     }
    }
    xhr.open('GET', REMOTE_METADATA_NAME, true);
    xhr.overrideMimeType('application/json');
    xhr.send(null);
  }

  if (Module['calledRun']) {
    runMetaWithFS();
  } else {
    if (!Module['preRun']) Module['preRun'] = [];
    Module["preRun"].push(runMetaWithFS);
  }\n''' % {
            'metadata_file': os.path.basename(options.jsoutput + '.metadata')
        }

    else:
        _metadata_template = '''
    }
    loadPackage(%s);\n''' % json.dumps(metadata)

    ret += '''%s
  })();\n''' % _metadata_template

    return ret
Example #3
0
def emscript(in_wasm, out_wasm, outfile_js, memfile):
    # Overview:
    #   * Run wasm-emscripten-finalize to extract metadata and modify the binary
    #     to use emscripten's wasm<->JS ABI
    #   * Use the metadata to generate the JS glue that goes with the wasm

    if settings.SINGLE_FILE:
        # placeholder strings for JS glue, to be replaced with subresource locations in do_binaryen
        settings.WASM_BINARY_FILE = '<<< WASM_BINARY_FILE >>>'
    else:
        # set file locations, so that JS glue can find what it needs
        settings.WASM_BINARY_FILE = js_manipulation.escape_for_js_string(
            os.path.basename(out_wasm))

    metadata = finalize_wasm(in_wasm, out_wasm, memfile)

    update_settings_glue(out_wasm, metadata)

    if not settings.WASM_BIGINT and metadata['emJsFuncs']:
        module = webassembly.Module(in_wasm)
        types = module.get_types()
        import_map = {}
        for imp in module.get_imports():
            import_map[imp.field] = imp
        for em_js_func, raw in metadata.get('emJsFuncs', {}).items():
            c_sig = raw.split('<::>')[0].strip('()')
            if not c_sig or c_sig == 'void':
                c_sig = []
            else:
                c_sig = c_sig.split(',')
            if em_js_func in import_map:
                imp = import_map[em_js_func]
                assert (imp.kind == webassembly.ExternType.FUNC)
                signature = types[imp.type]
                if len(signature.params) != len(c_sig):
                    diagnostics.warning(
                        'em-js-i64',
                        'using 64-bit arguments in EM_JS function without WASM_BIGINT is not yet fully supported: `%s` (%s, %s)',
                        em_js_func, c_sig, signature.params)

    if settings.SIDE_MODULE:
        if metadata['asmConsts']:
            exit_with_error('EM_ASM is not supported in side modules')
        if metadata['emJsFuncs']:
            exit_with_error('EM_JS is not supported in side modules')
        logger.debug('emscript: skipping remaining js glue generation')
        return

    if DEBUG:
        logger.debug('emscript: js compiler glue')
        t = time.time()

    # memory and global initializers

    if settings.RELOCATABLE:
        dylink_sec = webassembly.parse_dylink_section(in_wasm)
        static_bump = align_memory(dylink_sec.mem_size)
        set_memory(static_bump)
        logger.debug('stack_base: %d, stack_max: %d, heap_base: %d',
                     settings.STACK_BASE, settings.STACK_MAX,
                     settings.HEAP_BASE)

        # When building relocatable output (e.g. MAIN_MODULE) the reported table
        # size does not include the reserved slot at zero for the null pointer.
        # So we need to offset the elements by 1.
        if settings.INITIAL_TABLE == -1:
            settings.INITIAL_TABLE = dylink_sec.table_size + 1

        if settings.ASYNCIFY:
            metadata['globalImports'] += [
                '__asyncify_state', '__asyncify_data'
            ]

    invoke_funcs = metadata['invokeFuncs']
    if invoke_funcs:
        settings.DEFAULT_LIBRARY_FUNCS_TO_INCLUDE += ['$getWasmTableEntry']

    glue, forwarded_data = compile_settings()
    if DEBUG:
        logger.debug('  emscript: glue took %s seconds' % (time.time() - t))
        t = time.time()

    forwarded_json = json.loads(forwarded_data)

    if forwarded_json['warnings']:
        diagnostics.warning('js-compiler',
                            'warnings in JS library compilation')

    pre, post = glue.split('// EMSCRIPTEN_END_FUNCS')

    if settings.ASSERTIONS:
        pre += "function checkIncomingModuleAPI() {\n"
        for sym in settings.ALL_INCOMING_MODULE_JS_API:
            if sym not in settings.INCOMING_MODULE_JS_API:
                pre += f"  ignoredModuleProp('{sym}');\n"
        pre += "}\n"

    exports = metadata['exports']

    if settings.ASYNCIFY:
        exports += [
            'asyncify_start_unwind', 'asyncify_stop_unwind',
            'asyncify_start_rewind', 'asyncify_stop_rewind'
        ]

    report_missing_symbols(forwarded_json['librarySymbols'])

    if not outfile_js:
        logger.debug('emscript: skipping remaining js glue generation')
        return

    if settings.MINIMAL_RUNTIME:
        # In MINIMAL_RUNTIME, atinit exists in the postamble part
        post = apply_static_code_hooks(forwarded_json, post)
    else:
        # In regular runtime, atinits etc. exist in the preamble part
        pre = apply_static_code_hooks(forwarded_json, pre)

    asm_consts = create_asm_consts(metadata)
    em_js_funcs = create_em_js(metadata)
    asm_const_pairs = ['%s: %s' % (key, value) for key, value in asm_consts]
    asm_const_map = 'var ASM_CONSTS = {\n  ' + ',  \n '.join(
        asm_const_pairs) + '\n};\n'
    pre = pre.replace('// === Body ===',
                      ('// === Body ===\n\n' + asm_const_map +
                       '\n'.join(em_js_funcs) + '\n'))

    with open(outfile_js, 'w', encoding='utf-8') as out:
        out.write(normalize_line_endings(pre))
        pre = None

        sending = create_sending(invoke_funcs, metadata)
        receiving = create_receiving(exports)

        if settings.MINIMAL_RUNTIME:
            if settings.DECLARE_ASM_MODULE_EXPORTS:
                post = compute_minimal_runtime_initializer_and_exports(
                    post, exports, receiving)
            receiving = ''

        module = create_module(sending, receiving, invoke_funcs, metadata)

        write_output_file(out, module)

        out.write(normalize_line_endings(post))
        module = None
Example #4
0
def main():
    data_files = []
    export_name = 'Module'
    leading = ''
    has_preloaded = False
    plugins = []
    jsoutput = None
    from_emcc = False
    force = True
    # If set to True, IndexedDB (IDBFS in library_idbfs.js) is used to locally
    # cache VFS XHR so that subsequent page loads can read the data from the
    # offline cache instead.
    use_preload_cache = False
    indexeddb_name = 'EM_PRELOAD_CACHE'
    # If set to True, the package metadata is stored separately from js-output
    # file which makes js-output file immutable to the package content changes.
    # If set to False, the package metadata is stored inside the js-output file
    # which makes js-output file to mutate on each invocation of this packager tool.
    separate_metadata = False
    lz4 = False
    use_preload_plugins = False
    support_node = True

    for arg in sys.argv[2:]:
        if arg == '--preload':
            has_preloaded = True
            leading = 'preload'
        elif arg == '--embed':
            leading = 'embed'
        elif arg == '--exclude':
            leading = 'exclude'
        elif arg == '--no-force':
            force = False
            leading = ''
        elif arg == '--use-preload-cache':
            use_preload_cache = True
            leading = ''
        elif arg.startswith('--indexedDB-name'):
            indexeddb_name = arg.split('=', 1)[1] if '=' in arg else None
            leading = ''
        elif arg == '--no-heap-copy':
            print(
                'ignoring legacy flag --no-heap-copy (that is the only mode supported now)'
            )
            leading = ''
        elif arg == '--separate-metadata':
            separate_metadata = True
            leading = ''
        elif arg == '--lz4':
            lz4 = True
            leading = ''
        elif arg == '--use-preload-plugins':
            use_preload_plugins = True
            leading = ''
        elif arg == '--no-node':
            support_node = False
            leading = ''
        elif arg.startswith('--js-output'):
            jsoutput = arg.split('=', 1)[1] if '=' in arg else None
            leading = ''
        elif arg.startswith('--export-name'):
            if '=' in arg:
                export_name = arg.split('=', 1)[1]
            leading = ''
        elif arg.startswith('--from-emcc'):
            from_emcc = True
            leading = ''
        elif arg.startswith('--plugin'):
            with open(arg.split('=', 1)[1]) as f:
                plugin = f.read()
            eval(plugin)  # should append itself to plugins
            leading = ''
        elif leading == 'preload' or leading == 'embed':
            mode = leading
            # position of @ if we're doing 'src@dst'. '__' is used to keep the index
            # same with the original if they escaped with '@@'.
            at_position = arg.replace('@@', '__').find('@')
            # '@@' in input string means there is an actual @ character, a single '@'
            # means the 'src@dst' notation.
            uses_at_notation = (at_position != -1)

            if uses_at_notation:
                srcpath = arg[0:at_position].replace('@@',
                                                     '@')  # split around the @
                dstpath = arg[at_position + 1:].replace('@@', '@')
            else:
                # Use source path as destination path.
                srcpath = dstpath = arg.replace('@@', '@')
            if os.path.isfile(srcpath) or os.path.isdir(srcpath):
                data_files.append({
                    'srcpath': srcpath,
                    'dstpath': dstpath,
                    'mode': mode,
                    'explicit_dst_path': uses_at_notation
                })
            else:
                print('error: ' + arg + ' does not exist', file=sys.stderr)
                return 1
        elif leading == 'exclude':
            excluded_patterns.append(arg)
        else:
            print('Unknown parameter:', arg, file=sys.stderr)
            return 1

    if (not force) and not data_files:
        has_preloaded = False
    if not has_preloaded or jsoutput is None:
        assert not separate_metadata, (
            'cannot separate-metadata without both --preloaded files '
            'and a specified --js-output')

    if not from_emcc:
        print(
            'Remember to build the main file with  -s FORCE_FILESYSTEM=1  '
            'so that it includes support for loading this file package',
            file=sys.stderr)

    if jsoutput and os.path.abspath(jsoutput) == os.path.abspath(data_target):
        print('error: TARGET should not be the same value of --js-output',
              file=sys.stderr)
        return 1

    ret = ''
    # emcc will add this to the output itself, so it is only needed for
    # standalone calls
    if not from_emcc:
        ret = '''
  var Module = typeof %(EXPORT_NAME)s !== 'undefined' ? %(EXPORT_NAME)s : {};
  ''' % {
            "EXPORT_NAME": export_name
        }

    ret += '''
  if (!Module.expectedDataFileDownloads) {
    Module.expectedDataFileDownloads = 0;
  }
  Module.expectedDataFileDownloads++;
  (function() {
    // When running as a pthread, FS operations are proxied to the main thread, so we don't need to
    // fetch the .data bundle on the worker
    if (Module['ENVIRONMENT_IS_PTHREAD']) return;
    var loadPackage = function(metadata) {
  '''

    code = '''
      function assert(check, msg) {
        if (!check) throw msg + new Error().stack;
      }
  '''

    for file_ in data_files:
        if not should_ignore(file_['srcpath']):
            if os.path.isdir(file_['srcpath']):
                add(file_['mode'], file_['srcpath'], file_['dstpath'])
            else:
                new_data_files.append(file_)
    data_files = [
        file_ for file_ in new_data_files
        if not os.path.isdir(file_['srcpath'])
    ]
    if len(data_files) == 0:
        print('Nothing to do!', file=sys.stderr)
        sys.exit(1)

    # Absolutize paths, and check that they make sense
    # os.getcwd() always returns the hard path with any symbolic links resolved,
    # even if we cd'd into a symbolic link.
    curr_abspath = os.path.abspath(os.getcwd())

    for file_ in data_files:
        if not file_['explicit_dst_path']:
            # This file was not defined with src@dst, so we inferred the destination
            # from the source. In that case, we require that the destination not be
            # under the current location
            path = file_['dstpath']
            # Use os.path.realpath to resolve any symbolic links to hard paths,
            # to match the structure in curr_abspath.
            abspath = os.path.realpath(os.path.abspath(path))
            if DEBUG:
                print(path, abspath, curr_abspath, file=sys.stderr)
            if not abspath.startswith(curr_abspath):
                print(
                    'Error: Embedding "%s" which is below the current directory '
                    '"%s". This is invalid since the current directory becomes the '
                    'root that the generated code will see' %
                    (path, curr_abspath),
                    file=sys.stderr)
                sys.exit(1)
            file_['dstpath'] = abspath[len(curr_abspath) + 1:]
            if os.path.isabs(path):
                print(
                    'Warning: Embedding an absolute file/directory name "%s" to the '
                    'virtual filesystem. The file will be made available in the '
                    'relative path "%s". You can use the explicit syntax '
                    '--preload-file srcpath@dstpath to explicitly specify the target '
                    'location the absolute source path should be directed to.'
                    % (path, file_['dstpath']),
                    file=sys.stderr)

    for file_ in data_files:
        # name in the filesystem, native and emulated
        file_['dstpath'] = file_['dstpath'].replace(os.path.sep, '/')
        # If user has submitted a directory name as the destination but omitted
        # the destination filename, use the filename from source file
        if file_['dstpath'].endswith('/'):
            file_['dstpath'] = file_['dstpath'] + os.path.basename(
                file_['srcpath'])
        # make destination path always relative to the root
        file_['dstpath'] = posixpath.normpath(
            os.path.join('/', file_['dstpath']))
        if DEBUG:
            print('Packaging file "%s" to VFS in path "%s".' %
                  (file_['srcpath'], file_['dstpath']),
                  file=sys.stderr)

    # Remove duplicates (can occur naively, for example preload dir/, preload dir/subdir/)
    seen = {}

    def was_seen(name):
        if seen.get(name):
            return True
        seen[name] = 1
        return False

    data_files = [
        file_ for file_ in data_files if not was_seen(file_['dstpath'])
    ]

    if AV_WORKAROUND:
        random.shuffle(data_files)

    # Apply plugins
    for file_ in data_files:
        for plugin in plugins:
            plugin(file_)

    metadata = {'files': []}

    # Set up folders
    partial_dirs = []
    for file_ in data_files:
        dirname = os.path.dirname(file_['dstpath'])
        dirname = dirname.lstrip(
            '/')  # absolute paths start with '/', remove that
        if dirname != '':
            parts = dirname.split('/')
            for i in range(len(parts)):
                partial = '/'.join(parts[:i + 1])
                if partial not in partial_dirs:
                    code += (
                        '''Module['FS_createPath'](%s, %s, true, true);\n''' %
                        (json.dumps('/' + '/'.join(parts[:i])),
                         json.dumps(parts[i])))
                    partial_dirs.append(partial)

    if has_preloaded:
        # Bundle all datafiles into one archive. Avoids doing lots of simultaneous
        # XHRs which has overhead.
        start = 0
        with open(data_target, 'wb') as data:
            for file_ in data_files:
                file_['data_start'] = start
                with open(file_['srcpath'], 'rb') as f:
                    curr = f.read()
                file_['data_end'] = start + len(curr)
                if AV_WORKAROUND:
                    curr += '\x00'
                start += len(curr)
                data.write(curr)

        # TODO: sha256sum on data_target
        if start > 256 * 1024 * 1024:
            print(
                'warning: file packager is creating an asset bundle of %d MB. '
                'this is very large, and browsers might have trouble loading it. '
                'see https://hacks.mozilla.org/2015/02/synchronous-execution-and-filesystem-access-in-emscripten/'
                % (start / (1024 * 1024)),
                file=sys.stderr)

        create_preloaded = '''
          Module['FS_createPreloadedFile'](this.name, null, byteArray, true, true, function() {
            Module['removeRunDependency']('fp ' + that.name);
          }, function() {
            if (that.audio) {
              Module['removeRunDependency']('fp ' + that.name); // workaround for chromium bug 124926 (still no audio with this, but at least we don't hang)
            } else {
              err('Preloading file ' + that.name + ' failed');
            }
          }, false, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change
  '''
        create_data = '''
          Module['FS_createDataFile'](this.name, null, byteArray, true, true, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change
          Module['removeRunDependency']('fp ' + that.name);
  '''

        if not lz4:
            # Data requests - for getting a block of data out of the big archive - have
            # a similar API to XHRs
            code += '''
          /** @constructor */
          function DataRequest(start, end, audio) {
            this.start = start;
            this.end = end;
            this.audio = audio;
          }
          DataRequest.prototype = {
            requests: {},
            open: function(mode, name) {
              this.name = name;
              this.requests[name] = this;
              Module['addRunDependency']('fp ' + this.name);
            },
            send: function() {},
            onload: function() {
              var byteArray = this.byteArray.subarray(this.start, this.end);
              this.finish(byteArray);
            },
            finish: function(byteArray) {
              var that = this;
      %s
              this.requests[this.name] = null;
            }
          };
      %s
        ''' % (create_preloaded if use_preload_plugins else create_data, '''
              var files = metadata['files'];
              for (var i = 0; i < files.length; ++i) {
                new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio'] || 0).open('GET', files[i]['filename']);
              }
      ''')

    counter = 0
    for file_ in data_files:
        filename = file_['dstpath']
        dirname = os.path.dirname(filename)
        basename = os.path.basename(filename)
        if file_['mode'] == 'embed':
            # Embed
            data = base64_encode(utils.read_binary(file_['srcpath']))
            code += '''var fileData%d = '%s';\n''' % (counter, data)
            code += (
                '''Module['FS_createDataFile']('%s', '%s', decodeBase64(fileData%d), true, true, false);\n'''
                % (dirname, basename, counter))
            counter += 1
        elif file_['mode'] == 'preload':
            # Preload
            counter += 1

            metadata_el = {
                'filename': file_['dstpath'],
                'start': file_['data_start'],
                'end': file_['data_end'],
            }
            if filename[-4:] in AUDIO_SUFFIXES:
                metadata_el['audio'] = 1

            metadata['files'].append(metadata_el)
        else:
            assert 0

    if has_preloaded:
        if not lz4:
            # Get the big archive and split it up
            use_data = '''
          // Reuse the bytearray from the XHR as the source for file reads.
          DataRequest.prototype.byteArray = byteArray;
    '''
            use_data += '''
            var files = metadata['files'];
            for (var i = 0; i < files.length; ++i) {
              DataRequest.prototype.requests[files[i].filename].onload();
            }
      '''
            use_data += (
                "          Module['removeRunDependency']('datafile_%s');\n" %
                js_manipulation.escape_for_js_string(data_target))

        else:
            # LZ4FS usage
            temp = data_target + '.orig'
            shutil.move(data_target, temp)
            meta = shared.run_js_tool(
                utils.path_from_root('tools/lz4-compress.js'), [
                    utils.path_from_root('third_party/mini-lz4.js'), temp,
                    data_target
                ],
                stdout=PIPE)
            os.unlink(temp)
            use_data = '''
            var compressedData = %s;
            compressedData['data'] = byteArray;
            assert(typeof Module['LZ4'] === 'object', 'LZ4 not present - was your app build with  -s LZ4=1  ?');
            Module['LZ4'].loadPackage({ 'metadata': metadata, 'compressedData': compressedData }, %s);
            Module['removeRunDependency']('datafile_%s');
      ''' % (meta, "true" if use_preload_plugins else "false",
             js_manipulation.escape_for_js_string(data_target))

        package_uuid = uuid.uuid4()
        package_name = data_target
        remote_package_size = os.path.getsize(package_name)
        remote_package_name = os.path.basename(package_name)
        ret += r'''
      var PACKAGE_PATH = '';
      if (typeof window === 'object') {
        PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.toString().substring(0, window.location.pathname.toString().lastIndexOf('/')) + '/');
      } else if (typeof process === 'undefined' && typeof location !== 'undefined') {
        // web worker
        PACKAGE_PATH = encodeURIComponent(location.pathname.toString().substring(0, location.pathname.toString().lastIndexOf('/')) + '/');
      }
      var PACKAGE_NAME = '%s';
      var REMOTE_PACKAGE_BASE = '%s';
      if (typeof Module['locateFilePackage'] === 'function' && !Module['locateFile']) {
        Module['locateFile'] = Module['locateFilePackage'];
        err('warning: you defined Module.locateFilePackage, that has been renamed to Module.locateFile (using your locateFilePackage for now)');
      }
      var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;
    ''' % (js_manipulation.escape_for_js_string(data_target),
           js_manipulation.escape_for_js_string(remote_package_name))
        metadata['remote_package_size'] = remote_package_size
        metadata['package_uuid'] = str(package_uuid)
        ret += '''
      var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
      var PACKAGE_UUID = metadata['package_uuid'];
    '''

        if use_preload_cache:
            code += r'''
        var indexedDB;
        if (typeof window === 'object') {
          indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
        } else if (typeof location !== 'undefined') {
          // worker
          indexedDB = self.indexedDB;
        } else {
          throw 'using IndexedDB to cache data can only be done on a web page or in a web worker';
        }
        var IDB_RO = "readonly";
        var IDB_RW = "readwrite";
        var DB_NAME = "''' + indexeddb_name + '''";
        var DB_VERSION = 1;
        var METADATA_STORE_NAME = 'METADATA';
        var PACKAGE_STORE_NAME = 'PACKAGES';
        function openDatabase(callback, errback) {
          try {
            var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
          } catch (e) {
            return errback(e);
          }
          openRequest.onupgradeneeded = function(event) {
            var db = event.target.result;

            if(db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
              db.deleteObjectStore(PACKAGE_STORE_NAME);
            }
            var packages = db.createObjectStore(PACKAGE_STORE_NAME);

            if(db.objectStoreNames.contains(METADATA_STORE_NAME)) {
              db.deleteObjectStore(METADATA_STORE_NAME);
            }
            var metadata = db.createObjectStore(METADATA_STORE_NAME);
          };
          openRequest.onsuccess = function(event) {
            var db = event.target.result;
            callback(db);
          };
          openRequest.onerror = function(error) {
            errback(error);
          };
        };

        // This is needed as chromium has a limit on per-entry files in IndexedDB
        // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
        // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
        // We set the chunk size to 64MB to stay well-below the limit
        var CHUNK_SIZE = 64 * 1024 * 1024;

        function cacheRemotePackage(
          db,
          packageName,
          packageData,
          packageMeta,
          callback,
          errback
        ) {
          var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
          var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
          var chunkSliceStart = 0;
          var nextChunkSliceStart = 0;
          var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
          var finishedChunks = 0;
          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            nextChunkSliceStart += CHUNK_SIZE;
            var putPackageRequest = packages.put(
              packageData.slice(chunkSliceStart, nextChunkSliceStart),
              'package/' + packageName + '/' + chunkId
            );
            chunkSliceStart = nextChunkSliceStart;
            putPackageRequest.onsuccess = function(event) {
              finishedChunks++;
              if (finishedChunks == chunkCount) {
                var transaction_metadata = db.transaction(
                  [METADATA_STORE_NAME],
                  IDB_RW
                );
                var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
                var putMetadataRequest = metadata.put(
                  {
                    'uuid': packageMeta.uuid,
                    'chunkCount': chunkCount
                  },
                  'metadata/' + packageName
                );
                putMetadataRequest.onsuccess = function(event) {
                  callback(packageData);
                };
                putMetadataRequest.onerror = function(error) {
                  errback(error);
                };
              }
            };
            putPackageRequest.onerror = function(error) {
              errback(error);
            };
          }
        }

        /* Check if there's a cached package, and if so whether it's the latest available */
        function checkCachedPackage(db, packageName, callback, errback) {
          var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
          var metadata = transaction.objectStore(METADATA_STORE_NAME);
          var getRequest = metadata.get('metadata/' + packageName);
          getRequest.onsuccess = function(event) {
            var result = event.target.result;
            if (!result) {
              return callback(false, null);
            } else {
              return callback(PACKAGE_UUID === result['uuid'], result);
            }
          };
          getRequest.onerror = function(error) {
            errback(error);
          };
        }

        function fetchCachedPackage(db, packageName, metadata, callback, errback) {
          var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
          var packages = transaction.objectStore(PACKAGE_STORE_NAME);

          var chunksDone = 0;
          var totalSize = 0;
          var chunkCount = metadata['chunkCount'];
          var chunks = new Array(chunkCount);

          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            var getRequest = packages.get('package/' + packageName + '/' + chunkId);
            getRequest.onsuccess = function(event) {
              // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
              if (chunkCount == 1) {
                callback(event.target.result);
              } else {
                chunksDone++;
                totalSize += event.target.result.byteLength;
                chunks.push(event.target.result);
                if (chunksDone == chunkCount) {
                  if (chunksDone == 1) {
                    callback(event.target.result);
                  } else {
                    var tempTyped = new Uint8Array(totalSize);
                    var byteOffset = 0;
                    for (var chunkId in chunks) {
                      var buffer = chunks[chunkId];
                      tempTyped.set(new Uint8Array(buffer), byteOffset);
                      byteOffset += buffer.byteLength;
                      buffer = undefined;
                    }
                    chunks = undefined;
                    callback(tempTyped.buffer);
                    tempTyped = undefined;
                  }
                }
              }
            };
            getRequest.onerror = function(error) {
              errback(error);
            };
          }
        }
      '''

        # add Node.js support code, if necessary
        node_support_code = ''
        if support_node:
            node_support_code = r'''
        if (typeof process === 'object' && typeof process.versions === 'object' && typeof process.versions.node === 'string') {
          require('fs').readFile(packageName, function(err, contents) {
            if (err) {
              errback(err);
            } else {
              callback(contents.buffer);
            }
          });
          return;
        }
      '''
        ret += r'''
      function fetchRemotePackage(packageName, packageSize, callback, errback) {
        %(node_support_code)s
        var xhr = new XMLHttpRequest();
        xhr.open('GET', packageName, true);
        xhr.responseType = 'arraybuffer';
        xhr.onprogress = function(event) {
          var url = packageName;
          var size = packageSize;
          if (event.total) size = event.total;
          if (event.loaded) {
            if (!xhr.addedTotal) {
              xhr.addedTotal = true;
              if (!Module.dataFileDownloads) Module.dataFileDownloads = {};
              Module.dataFileDownloads[url] = {
                loaded: event.loaded,
                total: size
              };
            } else {
              Module.dataFileDownloads[url].loaded = event.loaded;
            }
            var total = 0;
            var loaded = 0;
            var num = 0;
            for (var download in Module.dataFileDownloads) {
            var data = Module.dataFileDownloads[download];
              total += data.total;
              loaded += data.loaded;
              num++;
            }
            total = Math.ceil(total * Module.expectedDataFileDownloads/num);
            if (Module['setStatus']) Module['setStatus']('Downloading data... (' + loaded + '/' + total + ')');
          } else if (!Module.dataFileDownloads) {
            if (Module['setStatus']) Module['setStatus']('Downloading data...');
          }
        };
        xhr.onerror = function(event) {
          throw new Error("NetworkError for: " + packageName);
        }
        xhr.onload = function(event) {
          if (xhr.status == 200 || xhr.status == 304 || xhr.status == 206 || (xhr.status == 0 && xhr.response)) { // file URLs can return 0
            var packageData = xhr.response;
            callback(packageData);
          } else {
            throw new Error(xhr.statusText + " : " + xhr.responseURL);
          }
        };
        xhr.send(null);
      };

      function handleError(error) {
        console.error('package error:', error);
      };
    ''' % {
            'node_support_code': node_support_code
        }

        code += r'''
      function processPackageData(arrayBuffer) {
        assert(arrayBuffer, 'Loading data file failed.');
        assert(arrayBuffer instanceof ArrayBuffer, 'bad input to processPackageData');
        var byteArray = new Uint8Array(arrayBuffer);
        var curr;
        %s
      };
      Module['addRunDependency']('datafile_%s');
    ''' % (use_data, js_manipulation.escape_for_js_string(data_target))
        # use basename because from the browser's point of view,
        # we need to find the datafile in the same dir as the html file

        code += r'''
      if (!Module.preloadResults) Module.preloadResults = {};
    '''

        if use_preload_cache:
            code += r'''
        function preloadFallback(error) {
          console.error(error);
          console.error('falling back to default preload behavior');
          fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
        };

        openDatabase(
          function(db) {
            checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
              function(useCached, metadata) {
                Module.preloadResults[PACKAGE_NAME] = {fromCache: useCached};
                if (useCached) {
                  fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
                } else {
                  fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
                    function(packageData) {
                      cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
                        function(error) {
                          console.error(error);
                          processPackageData(packageData);
                        });
                    }
                  , preloadFallback);
                }
              }
            , preloadFallback);
          }
        , preloadFallback);

        if (Module['setStatus']) Module['setStatus']('Downloading...');
      '''
        else:
            # Not using preload cache, so we might as well start the xhr ASAP,
            # potentially before JS parsing of the main codebase if it's after us.
            # Only tricky bit is the fetch is async, but also when runWithFS is called
            # is async, so we handle both orderings.
            ret += r'''
        var fetchedCallback = null;
        var fetched = Module['getPreloadedPackage'] ? Module['getPreloadedPackage'](REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE) : null;

        if (!fetched) fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, function(data) {
          if (fetchedCallback) {
            fetchedCallback(data);
            fetchedCallback = null;
          } else {
            fetched = data;
          }
        }, handleError);
      '''

            code += r'''
        Module.preloadResults[PACKAGE_NAME] = {fromCache: false};
        if (fetched) {
          processPackageData(fetched);
          fetched = null;
        } else {
          fetchedCallback = processPackageData;
        }
      '''

    ret += '''
    function runWithFS() {
  '''
    ret += code
    ret += '''
    }
    if (Module['calledRun']) {
      runWithFS();
    } else {
      if (!Module['preRun']) Module['preRun'] = [];
      Module["preRun"].push(runWithFS); // FS is not initialized yet, wait for it
    }
  '''

    if separate_metadata:
        _metadata_template = '''
    Module['removeRunDependency']('%(metadata_file)s');
   }

   function runMetaWithFS() {
    Module['addRunDependency']('%(metadata_file)s');
    var REMOTE_METADATA_NAME = Module['locateFile'] ? Module['locateFile']('%(metadata_file)s', '') : '%(metadata_file)s';
    var xhr = new XMLHttpRequest();
    xhr.onreadystatechange = function() {
     if (xhr.readyState === 4 && xhr.status === 200) {
       loadPackage(JSON.parse(xhr.responseText));
     }
    }
    xhr.open('GET', REMOTE_METADATA_NAME, true);
    xhr.overrideMimeType('application/json');
    xhr.send(null);
   }

   if (Module['calledRun']) {
    runMetaWithFS();
   } else {
    if (!Module['preRun']) Module['preRun'] = [];
    Module["preRun"].push(runMetaWithFS);
   }
  ''' % {
            'metadata_file': os.path.basename(jsoutput + '.metadata')
        }

    else:
        _metadata_template = '''
   }
   loadPackage(%s);
  ''' % json.dumps(metadata)

    ret += '''%s
  })();
  ''' % _metadata_template

    if force or len(data_files):
        if jsoutput is None:
            print(ret)
        else:
            # Overwrite the old jsoutput file (if exists) only when its content
            # differs from the current generated one, otherwise leave the file
            # untouched preserving its old timestamp
            if os.path.isfile(jsoutput):
                with open(jsoutput) as f:
                    old = f.read()
                if old != ret:
                    with open(jsoutput, 'w') as f:
                        f.write(ret)
            else:
                with open(jsoutput, 'w') as f:
                    f.write(ret)
            if separate_metadata:
                with open(jsoutput + '.metadata', 'w') as f:
                    json.dump(metadata, f, separators=(',', ':'))

    return 0