Example #1
0
  def test_zzz_box2d(self): # Called thus so it runs late in the alphabetical cycle... it is long
    src = open(path_from_root('tests', 'box2d', 'Benchmark.cpp'), 'r').read()

    def lib_builder(name, native, env_init):
      return self.get_library('box2d', [os.path.join('box2d.a')], configure=None, native=native, cache_name_extra=name, env_init=env_init)

    self.do_benchmark('box2d', src, 'frame averages', shared_args=['-I' + path_from_root('tests', 'box2d')], lib_builder=lib_builder)
Example #2
0
  def zzz_test_zzz_lzma(self):
    src = open(path_from_root('tests', 'lzma', 'benchmark.c'), 'r').read()

    def lib_builder(name, native, env_init):
      return self.get_library('lzma', [os.path.join('lzma.a')], configure=None, native=native, cache_name_extra=name, env_init=env_init)

    self.do_benchmark('lzma', src, 'ok.', shared_args=['-I' + path_from_root('tests', 'lzma')], lib_builder=lib_builder)
Example #3
0
  def test_zzz_zlib(self):
    src = open(path_from_root('tests', 'zlib', 'benchmark.c'), 'r').read()

    def lib_builder(name, native, env_init):
      return self.get_library('zlib', os.path.join('libz.a'), make_args=['libz.a'], native=native, cache_name_extra=name, env_init=env_init)

    self.do_benchmark('zlib', src, '''ok.''',
                      force_c=True, shared_args=['-I' + path_from_root('tests', 'zlib')], lib_builder=lib_builder)
Example #4
0
  def test_zzz_bullet(self): # Called thus so it runs late in the alphabetical cycle... it is long
    src = open(path_from_root('tests', 'bullet', 'Demos', 'Benchmarks', 'BenchmarkDemo.cpp'), 'r').read()
    src += open(path_from_root('tests', 'bullet', 'Demos', 'Benchmarks', 'main.cpp'), 'r').read()

    def lib_builder(name, native, env_init):
      return self.get_library('bullet', [os.path.join('src', '.libs', 'libBulletDynamics.a'),
                                         os.path.join('src', '.libs', 'libBulletCollision.a'),
                                         os.path.join('src', '.libs', 'libLinearMath.a')],
                              configure_args=['--disable-demos', '--disable-dependency-tracking'], native=native, cache_name_extra=name, env_init=env_init)

    self.do_benchmark('bullet', src, '\nok.\n',
                      shared_args=['-I' + path_from_root('tests', 'bullet', 'src'), '-I' + path_from_root('tests', 'bullet', 'Demos', 'Benchmarks')],
                      lib_builder=lib_builder)
Example #5
0
  def test_enet(self):
    # this is also a good test of raw usage of emconfigure and emmake
    shared.try_delete(self.in_dir('enet'))
    shutil.copytree(path_from_root('tests', 'enet'), self.in_dir('enet'))
    with chdir(self.in_dir('enet')):
      run_process([PYTHON, path_from_root('emconfigure'), './configure'])
      run_process([PYTHON, path_from_root('emmake'), 'make'])
      enet = [self.in_dir('enet', '.libs', 'libenet.a'), '-I' + path_from_root('tests', 'enet', 'include')]

    for harness in [
      CompiledServerHarness(os.path.join('sockets', 'test_enet_server.c'), enet, 49210)
    ]:
      with harness:
        self.btest(os.path.join('sockets', 'test_enet_client.c'), expected='0', args=enet + ['-DSOCKK=%d' % harness.listen_port])
Example #6
0
  def __enter__(self):
    # compile the server
    # NOTE empty filename support is a hack to support
    # the current test_enet
    if self.filename:
      proc = run_process([CLANG_CC, path_from_root('tests', self.filename), '-o', 'server', '-DSOCKK=%d' % self.target_port] + shared.get_clang_native_args() + self.args, env=shared.get_clang_native_env(), stdout=PIPE, stderr=PIPE)
      print('Socket server build: out:', proc.stdout or '', '/ err:', proc.stderr or '')
      process = Popen([os.path.abspath('server')])
      self.processes.append(process)

    # start the websocket proxy
    print('running websockify on %d, forward to tcp %d' % (self.listen_port, self.target_port), file=sys.stderr)
    wsp = websockify.WebSocketProxy(verbose=True, listen_port=self.listen_port, target_host="127.0.0.1", target_port=self.target_port, run_once=True)
    self.websockify = multiprocessing.Process(target=wsp.start_server)
    self.websockify.start()
    self.processes.append(self.websockify)
    # Make sure both the actual server and the websocket proxy are running
    for i in range(10):
      try:
        if self.do_server_check:
            server_sock = socket.create_connection(('localhost', self.target_port), timeout=1)
            server_sock.close()
        proxy_sock = socket.create_connection(('localhost', self.listen_port), timeout=1)
        proxy_sock.close()
        break
      except:
        time.sleep(1)
    else:
      clean_processes(self.processes)
      raise Exception('[Websockify failed to start up in a timely manner]')

    print('[Websockify on process %s]' % str(self.processes[-2:]))
Example #7
0
  def test_sockets_select_server_closes_connection_rw(self):
    sockets_include = '-I' + path_from_root('tests', 'sockets')

    for harness in [
      WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DCLOSE_CLIENT_AFTER_ECHO'], 49200),
      CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DCLOSE_CLIENT_AFTER_ECHO'], 49201)
    ]:
      with harness:
        self.btest(os.path.join('sockets', 'test_sockets_select_server_closes_connection_client_rw.c'), expected='266', args=[sockets_include, '-DSOCKK=%d' % harness.listen_port])
Example #8
0
  def __enter__(self):
    # assuming this is only used for WebSocket tests at the moment, validate that
    # the ws module is installed
    child = run_process(NODE_JS + ['-e', 'require("ws");'], check=False)
    global node_ws_module_installed
    # Attempt to automatically install ws module for Node.js.
    if child.returncode != 0 and not node_ws_module_installed:
      node_ws_module_installed = True
      run_process([NPM, 'install', path_from_root('tests', 'sockets', 'ws')], cwd=os.path.dirname(EMCC))
      # Did installation succeed?
      child = run_process(NODE_JS + ['-e', 'require("ws");'], check=False)
    assert child.returncode == 0, 'ws module for Node.js not installed, and automatic installation failed! Please run \'npm install\' from %s' % shared.__rootpath__

    # compile the server
    proc = run_process([PYTHON, EMCC, path_from_root('tests', self.filename), '-o', 'server.js', '-DSOCKK=%d' % self.listen_port] + self.args)
    print('Socket server build: out:', proc.stdout or '', '/ err:', proc.stderr or '')

    process = Popen(NODE_JS + ['server.js'])
    self.processes.append(process)
Example #9
0
 def create_optimizer():
   shared.logging.debug('building native optimizer: ' + name)
   output = shared.Cache.get_path(name)
   shared.try_delete(output)
   for compiler in [shared.CLANG, 'g++', 'clang++']: # try our clang first, otherwise hope for a system compiler in the path
     shared.logging.debug('  using ' + compiler)
     try:
       out, err = subprocess.Popen([compiler,
                                    shared.path_from_root('tools', 'optimizer', 'parser.cpp'),
                                    shared.path_from_root('tools', 'optimizer', 'simple_ast.cpp'),
                                    shared.path_from_root('tools', 'optimizer', 'optimizer.cpp'),
                                    shared.path_from_root('tools', 'optimizer', 'optimizer-shared.cpp'),
                                    shared.path_from_root('tools', 'optimizer', 'optimizer-main.cpp'),
                                    '-O3', '-std=c++11', '-fno-exceptions', '-fno-rtti', '-o', output] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
       outs.append(out)
       errs.append(err)
     except OSError:
       if compiler == shared.CLANG: raise # otherwise, OSError is likely due to g++ or clang++ not being in the path
     if os.path.exists(output): return output
   raise NativeOptimizerCreationException()
Example #10
0
  def test_sockets_echo_bigdata(self):
    sockets_include = '-I' + path_from_root('tests', 'sockets')

    # generate a large string literal to use as our message
    message = ''
    for i in range(256 * 256 * 2):
        message += str(unichr(ord('a') + (i % 26)))

    # re-write the client test with this literal (it's too big to pass via command line)
    input_filename = path_from_root('tests', 'sockets', 'test_sockets_echo_client.c')
    input = open(input_filename).read()
    output = input.replace('#define MESSAGE "pingtothepong"', '#define MESSAGE "%s"' % message)

    harnesses = [
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=0'], 49172), 0),
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=1'], 49173), 1)
    ]

    if not WINDOWS: # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
      harnesses += [(WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 49171), 0)]

    for harness, datagram in harnesses:
      with harness:
        self.btest(output, expected='0', args=[sockets_include, '-DSOCKK=%d' % harness.listen_port, '-DTEST_DGRAM=%d' % datagram], force_c=True)
Example #11
0
  def lua(self, benchmark, expected, output_parser=None, args_processor=None):
    shutil.copyfile(path_from_root('tests', 'lua', benchmark + '.lua'), benchmark + '.lua')

    def lib_builder(name, native, env_init):
      ret = self.get_library('lua_native' if native else 'lua', [os.path.join('src', 'lua'), os.path.join('src', 'liblua.a')], make=['make', 'generic'], configure=None, native=native, cache_name_extra=name, env_init=env_init)
      if native:
        return ret
      shutil.copyfile(ret[0], ret[0] + '.bc')
      ret[0] += '.bc'
      return ret

    self.do_benchmark('lua_' + benchmark, '', expected,
                      force_c=True, args=[benchmark + '.lua', DEFAULT_ARG],
                      emcc_args=['--embed-file', benchmark + '.lua', '-s', 'FORCE_FILESYSTEM=1'],
                      lib_builder=lib_builder, native_exec=os.path.join('building', 'lua_native', 'src', 'lua'),
                      output_parser=output_parser, args_processor=args_processor)
Example #12
0
 def fasta(self, name, double_rep, emcc_args=[]):
   src = open(path_from_root('tests', 'fasta.cpp'), 'r').read().replace('double', double_rep)
   src = src.replace('   const size_t n = ( argc > 1 ) ? atoi( argv[1] ) : 512;', '''
     int n;
     int arg = argc > 1 ? argv[1][0] - '0' : 3;
     switch(arg) {
       case 0: return 0; break;
       case 1: n = 19000000/20; break;
       case 2: n = 19000000/2; break;
       case 3: n = 19000000; break;
       case 4: n = 19000000*5; break;
       case 5: n = 19000000*10; break;
       default: printf("error: %d\\n", arg); return -1;
     }
   ''')
   assert 'switch(arg)' in src
   self.do_benchmark('fasta', src, '')
Example #13
0
    def test_sockets_async_echo(self):
        # Run with ./runner.py sockets.test_sockets_async_echo
        sockets_include = '-I' + path_from_root('tests', 'sockets')

        # Websockify-proxied servers can't run dgram tests
        harnesses = [
            (CompiledServerHarness(
                os.path.join('sockets', 'test_sockets_echo_server.c'),
                [sockets_include, '-DTEST_DGRAM=0', '-DTEST_ASYNC=1'],
                49167), 0),
            (CompiledServerHarness(
                os.path.join('sockets', 'test_sockets_echo_server.c'),
                [sockets_include, '-DTEST_DGRAM=1', '-DTEST_ASYNC=1'],
                49168), 1),
            # The following forces non-NULL addr and addlen parameters for the accept call
            (CompiledServerHarness(
                os.path.join('sockets', 'test_sockets_echo_server.c'), [
                    sockets_include, '-DTEST_DGRAM=0', '-DTEST_ACCEPT_ADDR=1',
                    '-DTEST_ASYNC=1'
                ], 49169), 0)
        ]

        if not WINDOWS:  # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
            harnesses += [(WebsockifyServerHarness(
                os.path.join('sockets', 'test_sockets_echo_server.c'),
                [sockets_include, '-DTEST_ASYNC=1'], 49166), 0)]

        for harness, datagram in harnesses:
            print('harness:', harness)
            with harness:
                self.btest(os.path.join('sockets',
                                        'test_sockets_echo_client.c'),
                           expected='0',
                           args=[
                               '-DSOCKK=%d' % harness.listen_port,
                               '-DTEST_DGRAM=%d' % datagram, '-DTEST_ASYNC=1',
                               sockets_include
                           ])

        # Deliberately attempt a connection on a port that will fail to test the error callback and getsockopt
        print('expect fail')
        self.btest(os.path.join('sockets', 'test_sockets_echo_client.c'),
                   expected='0',
                   args=['-DSOCKK=49169', '-DTEST_ASYNC=1', sockets_include])
Example #14
0
 def fasta(self, name, double_rep, emcc_args=[]):
     src = open(path_from_root('tests', 'fasta.cpp'),
                'r').read().replace('double', double_rep)
     src = src.replace(
         '   const size_t n = ( argc > 1 ) ? atoi( argv[1] ) : 512;', '''
   int n;
   int arg = argc > 1 ? argv[1][0] - '0' : 3;
   switch(arg) {
     case 0: return 0; break;
     case 1: n = 19000000/20; break;
     case 2: n = 19000000/2; break;
     case 3: n = 19000000; break;
     case 4: n = 19000000*5; break;
     case 5: n = 19000000*10; break;
     default: printf("error: %d\\n", arg); return -1;
   }
 ''')
     assert 'switch(arg)' in src
     self.do_benchmark('fasta', src, '')
Example #15
0
 def test_fannkuch(self):
   src = open(path_from_root('tests', 'fannkuch.cpp'), 'r').read().replace(
     'int n = argc > 1 ? atoi(argv[1]) : 0;',
     '''
       int n;
       int arg = argc > 1 ? argv[1][0] - '0' : 3;
       switch(arg) {
         case 0: return 0; break;
         case 1: n = 9; break;
         case 2: n = 10; break;
         case 3: n = 11; break;
         case 4: n = 11; break;
         case 5: n = 12; break;
         default: printf("error: %d\\n", arg); return -1;
       }
     '''
   )
   assert 'switch(arg)' in src
   self.do_benchmark('fannkuch', src, 'Pfannkuchen(')
Example #16
0
    def test_posix_proxy_sockets(self):
        # Build the websocket bridge server
        self.run_process(
            ['cmake',
             path_from_root('tools/websocket_to_posix_proxy')])
        self.run_process(['cmake', '--build', '.'])
        if os.name == 'nt':  # This is not quite exact, instead of "isWindows()" this should be "If CMake defaults to building with Visual Studio", but there is no good check for that, so assume Windows==VS.
            proxy_server = self.in_dir('Debug', 'websocket_to_posix_proxy.exe')
        else:
            proxy_server = self.in_dir('websocket_to_posix_proxy')

        with BackgroundServerProcess([proxy_server, '8080']):
            with PythonTcpEchoServerProcess('7777'):
                # Build and run the TCP echo client program with Emscripten
                self.btest_exit(test_file('websocket/tcp_echo_client.c'),
                                args=[
                                    '-lwebsocket', '-sPROXY_POSIX_SOCKETS',
                                    '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD'
                                ])
Example #17
0
 def test_fannkuch(self):
   src = open(path_from_root('tests', 'fannkuch.cpp'), 'r').read().replace(
     'int n = argc > 1 ? atoi(argv[1]) : 0;',
     '''
       int n;
       int arg = argc > 1 ? argv[1][0] - '0' : 3;
       switch(arg) {
         case 0: return 0; break;
         case 1: n = 9; break;
         case 2: n = 10; break;
         case 3: n = 11; break;
         case 4: n = 11; break;
         case 5: n = 12; break;
         default: printf("error: %d\\n", arg); return -1;
       }
     '''
   )
   assert 'switch(arg)' in src
   self.do_benchmark('fannkuch', src, 'Pfannkuchen(')
Example #18
0
    def lua(self,
            benchmark,
            expected,
            output_parser=None,
            args_processor=None):
        self.emcc_args.remove('-Werror')
        shutil.copyfile(
            path_from_root('tests', 'third_party', 'lua', benchmark + '.lua'),
            benchmark + '.lua')

        def lib_builder(name, native, env_init):
            ret = self.get_library(os.path.join(
                'third_party', 'lua_native' if native else 'lua'), [
                    os.path.join('src', 'lua.o'),
                    os.path.join('src', 'liblua.a')
                ],
                                   make=['make', 'generic'],
                                   configure=None,
                                   native=native,
                                   cache_name_extra=name,
                                   env_init=env_init)
            if native:
                return ret
            shutil.copyfile(ret[0], ret[0] + '.bc')
            ret[0] += '.bc'
            return ret

        self.do_benchmark(
            'lua_' + benchmark,
            '',
            expected,
            force_c=True,
            args=[benchmark + '.lua', DEFAULT_ARG],
            emcc_args=[
                '--embed-file', benchmark + '.lua', '-s', 'FORCE_FILESYSTEM=1',
                '-s', 'MINIMAL_RUNTIME=0'
            ],  # not minimal because of files
            lib_builder=lib_builder,
            native_exec=os.path.join('building', 'third_party', 'lua_native',
                                     'src', 'lua'),
            output_parser=output_parser,
            args_processor=args_processor)
Example #19
0
    def test_sockets_select_server_closes_connection_rw(self):
        sockets_include = '-I' + path_from_root('tests', 'sockets')

        for harness in [
                WebsockifyServerHarness(
                    os.path.join('sockets', 'test_sockets_echo_server.c'),
                    [sockets_include, '-DCLOSE_CLIENT_AFTER_ECHO'], 49200),
                CompiledServerHarness(
                    os.path.join('sockets', 'test_sockets_echo_server.c'),
                    [sockets_include, '-DCLOSE_CLIENT_AFTER_ECHO'], 49201)
        ]:
            with harness:
                self.btest(os.path.join(
                    'sockets',
                    'test_sockets_select_server_closes_connection_client_rw.c'
                ),
                           expected='266',
                           args=[
                               sockets_include,
                               '-DSOCKK=%d' % harness.listen_port
                           ])
Example #20
0
    def __enter__(self):
        # assuming this is only used for WebSocket tests at the moment, validate that
        # the ws module is installed
        global npm_checked
        if not npm_checked:
            child = run_process(NODE_JS + ['-e', 'require("ws");'],
                                check=False)
            assert child.returncode == 0, '"ws" node module not found.  you may need to run npm install'
            npm_checked = True

        # compile the server
        proc = run_process([
            PYTHON, EMCC,
            path_from_root('tests', self.filename), '-o', 'server.js',
            '-DSOCKK=%d' % self.listen_port
        ] + self.args)
        print('Socket server build: out:', proc.stdout or '', '/ err:',
              proc.stderr or '')

        process = Popen(NODE_JS + ['server.js'])
        self.processes.append(process)
Example #21
0
def check_engine(engine):
    if type(engine) is list:
        engine_path = engine[0]
    else:
        engine_path = engine
    global WORKING_ENGINES
    if engine_path in WORKING_ENGINES:
        return WORKING_ENGINES[engine_path]
    try:
        logging.debug('Checking JS engine %s' % engine)
        output = run_js(shared.path_from_root('tests', 'hello_world.js'),
                        engine,
                        skip_check=True)
        if 'hello, world!' in output:
            WORKING_ENGINES[engine_path] = True
    except Exception as e:
        logging.info(
            'Checking JS engine %s failed. Check your config file. Details: %s'
            % (str(engine), str(e)))
        WORKING_ENGINES[engine_path] = False
    return WORKING_ENGINES[engine_path]
Example #22
0
    def test_sockets_echo(self):
        sockets_include = '-I' + path_from_root('tests', 'sockets')

        # Note: in the WebsockifyServerHarness and CompiledServerHarness tests below, explicitly use consecutive server listen ports,
        # because server teardown might not occur deterministically (python dtor time) and is a bit racy.
        # WebsockifyServerHarness uses two port numbers, x and x-1, so increment it by two.
        # CompiledServerHarness only uses one. Start with 49160 & 49159 as the first server port addresses. If adding new tests,
        # increment the used port addresses below.

        # Websockify-proxied servers can't run dgram tests
        harnesses = [
            (CompiledServerHarness(
                os.path.join('sockets', 'test_sockets_echo_server.c'),
                [sockets_include, '-DTEST_DGRAM=0'], 49161), 0),
            (CompiledServerHarness(
                os.path.join('sockets', 'test_sockets_echo_server.c'),
                [sockets_include, '-DTEST_DGRAM=1'], 49162), 1),
            # The following forces non-NULL addr and addlen parameters for the accept call
            (CompiledServerHarness(
                os.path.join('sockets', 'test_sockets_echo_server.c'),
                [sockets_include, '-DTEST_DGRAM=0', '-DTEST_ACCEPT_ADDR=1'],
                49163), 0)
        ]

        if not WINDOWS:  # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
            harnesses += [(WebsockifyServerHarness(
                os.path.join('sockets', 'test_sockets_echo_server.c'),
                [sockets_include], 49160), 0)]

        for harness, datagram in harnesses:
            with harness:
                self.btest(os.path.join('sockets',
                                        'test_sockets_echo_client.c'),
                           expected='0',
                           args=[
                               '-DSOCKK=%d' % harness.listen_port,
                               '-DTEST_DGRAM=%d' % datagram, sockets_include
                           ])
Example #23
0
  def test_sockets_echo(self):
    sockets_include = '-I' + path_from_root('tests', 'sockets')

    # Note: in the WebsockifyServerHarness and CompiledServerHarness tests below, explicitly use consecutive server listen ports,
    # because server teardown might not occur deterministically (python dtor time) and is a bit racy.
    # WebsockifyServerHarness uses two port numbers, x and x-1, so increment it by two.
    # CompiledServerHarness only uses one. Start with 49160 & 49159 as the first server port addresses. If adding new tests,
    # increment the used port addresses below.

    # Websockify-proxied servers can't run dgram tests
    harnesses = [
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=0'], 49161), 0),
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=1'], 49162), 1),
      # The following forces non-NULL addr and addlen parameters for the accept call
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=0', '-DTEST_ACCEPT_ADDR=1'], 49163), 0)
    ]

    if not WINDOWS: # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
      harnesses += [(WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 49160), 0)]

    for harness, datagram in harnesses:
      with harness:
        self.btest(os.path.join('sockets', 'test_sockets_echo_client.c'), expected='0', args=['-DSOCKK=%d' % harness.listen_port, '-DTEST_DGRAM=%d' % datagram, sockets_include])
Example #24
0
  def test_sockets_async_echo(self):
    # Run with ./runner.py sockets.test_sockets_async_echo
    sockets_include = '-I' + path_from_root('tests', 'sockets')

    # Websockify-proxied servers can't run dgram tests
    harnesses = [
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=0', '-DTEST_ASYNC=1'], 49167), 0),
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=1', '-DTEST_ASYNC=1'], 49168), 1),
      # The following forces non-NULL addr and addlen parameters for the accept call
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=0', '-DTEST_ACCEPT_ADDR=1', '-DTEST_ASYNC=1'], 49169), 0)
    ]

    if not WINDOWS: # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
      harnesses += [(WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_ASYNC=1'], 49166), 0)]

    for harness, datagram in harnesses:
      print('harness:', harness)
      with harness:
        self.btest(os.path.join('sockets', 'test_sockets_echo_client.c'), expected='0', args=['-DSOCKK=%d' % harness.listen_port, '-DTEST_DGRAM=%d' % datagram, '-DTEST_ASYNC=1', sockets_include])

    # Deliberately attempt a connection on a port that will fail to test the error callback and getsockopt
    print('expect fail')
    self.btest(os.path.join('sockets', 'test_sockets_echo_client.c'), expected='0', args=['-DSOCKK=49169', '-DTEST_ASYNC=1', sockets_include])
Example #25
0
def PythonTcpEchoServerProcess(port):
  return BackgroundServerProcess([PYTHON, path_from_root('tests', 'websocket', 'tcp_echo_server.py'), port])
Example #26
0
  def test_webrtc(self): # XXX see src/settings.js, this is disabled pending investigation
    self.skipTest('WebRTC support is not up to date.')
    host_src = 'webrtc_host.c'
    peer_src = 'webrtc_peer.c'

    host_outfile = 'host.html'
    peer_outfile = 'peer.html'

    host_filepath = path_from_root('tests', 'sockets', host_src)
    temp_host_filepath = os.path.join(self.get_dir(), os.path.basename(host_src))
    with open(host_filepath) as f:
      host_src = f.read()
    with open(temp_host_filepath, 'w') as f:
      f.write(self.with_report_result(host_src))

    peer_filepath = path_from_root('tests', 'sockets', peer_src)
    temp_peer_filepath = os.path.join(self.get_dir(), os.path.basename(peer_src))
    with open(peer_filepath) as f:
      peer_src = f.read()
    with open(temp_peer_filepath, 'w') as f:
      f.write(self.with_report_result(peer_src))

    open(os.path.join(self.get_dir(), 'host_pre.js'), 'w').write('''
      var Module = {
        webrtc: {
          broker: 'http://localhost:8182',
          session: undefined,
          onpeer: function(peer, route) {
            window.open('http://localhost:8888/peer.html?' + route);
            // iframe = document.createElement("IFRAME");
            // iframe.setAttribute("src", "http://localhost:8888/peer.html?" + route);
            // iframe.style.display = "none";
            // document.body.appendChild(iframe);
            peer.listen();
          },
          onconnect: function(peer) {
          },
          ondisconnect: function(peer) {
          },
          onerror: function(error) {
            console.error(error);
          }
        },
        setStatus: function(text) {
          console.log('status: ' + text);
        }
      };
    ''')

    open(os.path.join(self.get_dir(), 'peer_pre.js'), 'w').write('''
      var Module = {
        webrtc: {
          broker: 'http://localhost:8182',
          session: window.location.toString().split('?')[1],
          onpeer: function(peer, route) {
            peer.connect(Module['webrtc']['session']);
          },
          onconnect: function(peer) {
          },
          ondisconnect: function(peer) {
            // Calling window.close() from this handler hangs my browser, so run it in the next turn
            setTimeout(window.close, 0);
          },
          onerror: function(error) {
            console.error(error);
          },
        },
        setStatus: function(text) {
          console.log('status: ' + text);
        }
      };
    ''')

    run_process([EMCC, temp_host_filepath, '-o', host_outfile] + ['-s', 'GL_TESTING=1', '--pre-js', 'host_pre.js', '-s', 'SOCKET_WEBRTC=1', '-s', 'SOCKET_DEBUG=1'])
    run_process([EMCC, temp_peer_filepath, '-o', peer_outfile] + ['-s', 'GL_TESTING=1', '--pre-js', 'peer_pre.js', '-s', 'SOCKET_WEBRTC=1', '-s', 'SOCKET_DEBUG=1'])

    # note: you may need to run this manually yourself, if npm is not in the path, or if you need a version that is not in the path
    run_process([NPM, 'install', path_from_root('tests', 'sockets', 'p2p')])
    broker = Popen(NODE_JS + [path_from_root('tests', 'sockets', 'p2p', 'broker', 'p2p-broker.js')])

    expected = '1'
    self.run_browser(host_outfile, '.', ['/report_result?' + e for e in expected])

    broker.kill()
Example #27
0
def do_wasm2c(infile):
  assert Settings.STANDALONE_WASM
  WASM2C = NODE_JS + [path_from_root('node_modules', 'wasm2c', 'wasm2c.js')]
  WASM2C_DIR = path_from_root('node_modules', 'wasm2c')
  c_file = unsuffixed(infile) + '.wasm.c'
  h_file = unsuffixed(infile) + '.wasm.h'
  cmd = WASM2C + [infile, '-o', c_file]
  check_call(cmd)
  total = '''\
/*
* This file was generated by emcc+wasm2c. To compile it, use something like
*
*   $CC FILE.c -O2 -lm -DWASM_RT_MAX_CALL_STACK_DEPTH=8000
*/
'''
  SEP = '\n/* ==================================== */\n'

  def bundle_file(total, filename):
    with open(filename) as f:
      total += '// ' + filename + '\n' + f.read() + SEP
    return total

  # hermeticize the C file, by bundling in the wasm2c/ includes
  headers = [
    (WASM2C_DIR, 'wasm-rt.h'),
    (WASM2C_DIR, 'wasm-rt-impl.h'),
    ('', h_file)
  ]
  for header in headers:
    total = bundle_file(total, os.path.join(header[0], header[1]))
  # add the wasm2c output
  with open(c_file) as read_c:
    c = read_c.read()
  total += c + SEP
  # add the wasm2c runtime
  total = bundle_file(total, os.path.join(WASM2C_DIR, 'wasm-rt-impl.c'))
  # add the support code
  support_files = ['base']
  if Settings.AUTODEBUG:
    support_files.append('autodebug')
  if Settings.EXPECT_MAIN:
    # TODO: add an option for direct OS access. For now, do that when building
    #       an executable with main, as opposed to a library
    support_files.append('os')
    support_files.append('main')
  else:
    support_files.append('os_sandboxed')
    support_files.append('reactor')
    # for a reactor, also append wasmbox_* API definitions
    with open(h_file, 'a') as f:
      f.write('''
// wasmbox_* API
// TODO: optional prefixing
extern void wasmbox_init(void);
''')
  for support_file in support_files:
    total = bundle_file(total, path_from_root('tools', 'wasm2c', support_file + '.c'))
  # remove #includes of the headers we bundled
  for header in headers:
    total = total.replace('#include "%s"\n' % header[1], '/* include of %s */\n' % header[1])
  # generate the necessary invokes
  invokes = []
  for sig in re.findall(r"\/\* import\: 'env' 'invoke_(\w+)' \*\/", total):
    all_func_types = get_func_types(total)

    def name(i):
      return 'a' + str(i)

    wabt_sig = sig[0] + 'i' + sig[1:]
    typed_args = [s_to_c(sig[i]) + ' ' + name(i) for i in range(1, len(sig))]
    full_typed_args = ['u32 fptr'] + typed_args
    types = [s_to_c(sig[i]) for i in range(1, len(sig))]
    args = [name(i) for i in range(1, len(sig))]
    c_func_type = s_to_c(sig[0]) + ' (*)(' + (', '.join(types) if types else 'void') + ')'
    if sig not in all_func_types:
      exit_with_error('could not find signature ' + sig + ' in function types ' + str(all_func_types))
    type_index = all_func_types[sig]

    invokes.append(r'''
IMPORT_IMPL(%(return_type)s, Z_envZ_invoke_%(sig)sZ_%(wabt_sig)s, (%(full_typed_args)s), {
  VERBOSE_LOG("invoke\n"); // waka
  u32 sp = Z_stackSaveZ_iv();
  if (next_setjmp >= MAX_SETJMP_STACK) {
    abort_with_message("too many nested setjmps");
  }
  u32 id = next_setjmp++;
  int result = setjmp(setjmp_stack[id]);
  %(declare_return)s
  if (result == 0) {
    %(receive)sCALL_INDIRECT(w2c___indirect_function_table, %(c_func_type)s, %(type_index)s, fptr %(args)s);
    /* if we got here, no longjmp or exception happened, we returned normally */
  } else {
    /* A longjmp or an exception took us here. */
    Z_stackRestoreZ_vi(sp);
    Z_setThrewZ_vii(1, 0);
  }
  next_setjmp--;
  %(return)s
});
''' % {
      'return_type': s_to_c(sig[0]) if sig[0] != 'v' else 'void',
      'sig': sig,
      'wabt_sig': wabt_sig,
      'full_typed_args': ', '.join(full_typed_args),
      'type_index': type_index,
      'c_func_type': c_func_type,
      'args': (', ' + ', '.join(args)) if args else '',
      'declare_return': (s_to_c(sig[0]) + ' returned_value = 0;') if sig[0] != 'v' else '',
      'receive': 'returned_value = ' if sig[0] != 'v' else '',
      'return': 'return returned_value;' if sig[0] != 'v' else ''
    })

  total += '\n'.join(invokes)
  # write out the final file
  with open(c_file, 'w') as out:
    out.write(total)
Example #28
0
def run():
  configure_path = shared.path_from_root('emconfigure')

  exit(subprocess.call([shared.PYTHON, configure_path] + sys.argv[1:]))
Example #29
0
 def test_base64(self):
     src = open(path_from_root('tests', 'base64.cpp'), 'r').read()
     self.do_benchmark('base64', src, 'decode')
Example #30
0
 def test_skinning(self):
     src = open(path_from_root('tests', 'skinning_test_no_simd.cpp'),
                'r').read()
     self.do_benchmark('skinning', src, 'blah=0.000000')
Example #31
0
  def test_nodejs_sockets_echo(self):
    # This test checks that sockets work when the client code is run in Node.js
    # Run with ./runner.py sockets.test_nodejs_sockets_echo
    if NODE_JS not in JS_ENGINES:
      self.skipTest('node is not present')

    sockets_include = '-I' + path_from_root('tests', 'sockets')

    harnesses = [
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=0'], 59162), 0),
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=1'], 59164), 1)
    ]

    if not WINDOWS: # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
      harnesses += [(WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 59160), 0)]

    # Basic test of node client against both a Websockified and compiled echo server.
    for harness, datagram in harnesses:
      with harness:
        run_process([EMCC, path_from_root('tests', 'sockets', 'test_sockets_echo_client.c'), '-o', 'client.js', '-DSOCKK=%d' % harness.listen_port, '-DTEST_DGRAM=%d' % datagram], stdout=PIPE, stderr=PIPE)

        out = run_js('client.js', full_output=True)
        self.assertContained('do_msg_read: read 14 bytes', out)

    if not WINDOWS: # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
      # Test against a Websockified server with compile time configured WebSocket subprotocol. We use a Websockified
      # server because as long as the subprotocol list contains binary it will configure itself to accept binary
      # This test also checks that the connect url contains the correct subprotocols.
      print("\nTesting compile time WebSocket configuration.\n")
      for harness in [
        WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 59166)
      ]:
        with harness:
          run_process([EMCC, path_from_root('tests', 'sockets', 'test_sockets_echo_client.c'), '-o', 'client.js', '-s', 'SOCKET_DEBUG=1', '-s', 'WEBSOCKET_SUBPROTOCOL="base64, binary"', '-DSOCKK=59166'], stdout=PIPE, stderr=PIPE)

          out = run_js('client.js', full_output=True)
          self.assertContained('do_msg_read: read 14 bytes', out)
          self.assertContained(['connect: ws://127.0.0.1:59166, base64,binary', 'connect: ws://127.0.0.1:59166/, base64,binary'], out)

      # Test against a Websockified server with runtime WebSocket configuration. We specify both url and subprotocol.
      # In this test we have *deliberately* used the wrong port '-DSOCKK=12345' to configure the echo_client.c, so
      # the connection would fail without us specifying a valid WebSocket URL in the configuration.
      print("\nTesting runtime WebSocket configuration.\n")
      for harness in [
        WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 59168)
      ]:
        with harness:
          open(os.path.join(self.get_dir(), 'websocket_pre.js'), 'w').write('''
          var Module = {
            websocket: {
              url: 'ws://localhost:59168/testA/testB',
              subprotocol: 'text, base64, binary',
            }
          };
          ''')

          run_process([EMCC, path_from_root('tests', 'sockets', 'test_sockets_echo_client.c'), '-o', 'client.js', '--pre-js', 'websocket_pre.js', '-s', 'SOCKET_DEBUG=1', '-DSOCKK=12345'], stdout=PIPE, stderr=PIPE)

          out = run_js('client.js', full_output=True)
          self.assertContained('do_msg_read: read 14 bytes', out)
          self.assertContained('connect: ws://localhost:59168/testA/testB, text,base64,binary', out)
Example #32
0
      def create_optimizer_cmake():
        shared.logging.debug('building native optimizer via CMake: ' + name)
        output = shared.Cache.get_path(name)
        shared.try_delete(output)

        if NATIVE_OPTIMIZER == '1':
          cmake_build_type = 'RelWithDebInfo'
        elif NATIVE_OPTIMIZER == '2':
          cmake_build_type = 'Release'
        elif NATIVE_OPTIMIZER == 'g':
          cmake_build_type = 'Debug'

        build_path = shared.Cache.get_path('optimizer_build_' + cmake_build_type)
        shared.try_delete(os.path.join(build_path, 'CMakeCache.txt'))

        log_output = None if DEBUG else subprocess.PIPE
        if not os.path.exists(build_path):
          os.mkdir(build_path)

        if WINDOWS:
          # Poor man's check for whether or not we should attempt 64 bit build
          if os.environ.get('ProgramFiles(x86)'):
            cmake_generators = [
              'Visual Studio 15 2017 Win64',
              'Visual Studio 15 2017',
              'Visual Studio 14 2015 Win64',
              'Visual Studio 14 2015',
              'Visual Studio 12 Win64', # The year component is omitted for compatibility with older CMake.
              'Visual Studio 12',
              'Visual Studio 11 Win64',
              'Visual Studio 11',
              'MinGW Makefiles',
              'Unix Makefiles',
            ]
          else:
            cmake_generators = [
              'Visual Studio 15 2017',
              'Visual Studio 14 2015',
              'Visual Studio 12',
              'Visual Studio 11',
              'MinGW Makefiles',
              'Unix Makefiles',
            ]
        else:
          cmake_generators = ['Unix Makefiles']

        for cmake_generator in cmake_generators:
          # Delete CMakeCache.txt so that we can switch to a new CMake generator.
          shared.try_delete(os.path.join(build_path, 'CMakeCache.txt'))
          proc = subprocess.Popen(['cmake', '-G', cmake_generator, '-DCMAKE_BUILD_TYPE='+cmake_build_type, shared.path_from_root('tools', 'optimizer')], cwd=build_path, stdin=log_output, stdout=log_output, stderr=log_output)
          proc.communicate()
          if proc.returncode == 0:
            make = ['cmake', '--build', build_path]
            if 'Visual Studio' in cmake_generator:
              make += ['--config', cmake_build_type, '--', '/nologo', '/verbosity:minimal']

            proc = subprocess.Popen(make, cwd=build_path, stdin=log_output, stdout=log_output, stderr=log_output)
            proc.communicate()
            if proc.returncode == 0:
              if WINDOWS and 'Visual Studio' in cmake_generator:
                shutil.copyfile(os.path.join(build_path, cmake_build_type, 'optimizer.exe'), output)
              else:
                shutil.copyfile(os.path.join(build_path, 'optimizer'), output)
              return output

        raise NativeOptimizerCreationException()
Example #33
0
def inspect_headers(headers, cpp_opts):
  code = ['#include <stdio.h>', '#include <stddef.h>']
  for header in headers:
    code.append('#include "' + header['name'] + '"')

  code.append('int main() {')
  c_descent('structs', code)
  for header in headers:
    for name, struct in header['structs'].items():
      gen_inspect_code([name], struct, code)

  c_ascent(code)
  c_descent('defines', code)
  for header in headers:
    for name, type_ in header['defines'].items():
      # Add the necessary python type, if missing.
      if '%' not in type_:
        if type_[-1] in ('d', 'i', 'u'):
          # integer
          type_ = 'i%' + type_
        elif type_[-1] in ('f', 'F', 'e', 'E', 'g', 'G'):
          # float
          type_ = 'f%' + type_
        elif type_[-1] in ('x', 'X', 'a', 'A', 'c', 's'):
          # hexadecimal or string
          type_ = 's%' + type_

      c_set(name, type_, name, code)

  code.append('return 0;')
  code.append('}')

  # Write the source code to a temporary file.
  src_file = tempfile.mkstemp('.c')
  show('Generating C code... ' + src_file[1])
  os.write(src_file[0], shared.asbytes('\n'.join(code)))

  js_file = tempfile.mkstemp('.js')

  # Close all unneeded FDs.
  os.close(src_file[0])
  os.close(js_file[0])

  # Remove dangerous env modifications
  env = os.environ.copy()
  env['EMCC_FORCE_STDLIBS'] = 'libcompiler_rt'
  env['EMCC_ONLY_FORCED_STDLIBS'] = '1'

  info = []
  # Compile the program.
  show('Compiling generated code...')
  # -Oz optimizes enough to avoid warnings on code size/num locals
  cmd = [shared.EMCC] + cpp_opts + ['-o', js_file[1], src_file[1],
                                    '-O0',
                                    '-Werror',
                                    '-Wno-format',
                                    '-I', shared.path_from_root(),
                                    '-s', 'BOOTSTRAPPING_STRUCT_INFO=1',
                                    '-s', 'WARN_ON_UNDEFINED_SYMBOLS=0',
                                    '-s', 'STRICT=1',
                                    # Use SINGLE_FILE=1 so there is only a single
                                    # file to cleanup.
                                    '-s', 'SINGLE_FILE=1']

  # Default behavior for emcc is to warn for binaryen version check mismatches
  # so we should try to match that behavior.
  cmd += ['-Wno-error=version-check']

  # TODO(sbc): Remove this one we remove the test_em_config_env_var test
  cmd += ['-Wno-deprecated']

  if shared.Settings.LTO:
    cmd += ['-flto=' + shared.Settings.LTO]

  show(shared.shlex_join(cmd))
  try:
    subprocess.check_call(cmd, env=env)
  except subprocess.CalledProcessError as e:
    sys.stderr.write('FAIL: Compilation failed!: %s\n' % e.cmd)
    sys.exit(1)

  # Run the compiled program.
  show('Calling generated program... ' + js_file[1])
  info = shared.run_js_tool(js_file[1], stdout=shared.PIPE).splitlines()

  # Remove all temporary files.
  os.unlink(src_file[1])

  if os.path.exists(js_file[1]):
    os.unlink(js_file[1])

  # Parse the output of the program into a dict.
  return parse_c_output(info)
        run_process([LLVM_COMPILER] + params + triple +
                    [filename + '.clean.bc', '-o', filename + '.s'],
                    check=False)
        print('s => o')
        run_process(['as', filename + '.s', '-o', filename + '.o'],
                    check=False)
        if os.path.exists(filename + '.o'):
            break
    if os.path.exists(filename + '.o'):
        break

if not os.path.exists(filename + '.o'):
    print('tools/nativize_llvm.py: Failed to convert "' + filename + '" to "' +
          filename + '.o"!',
          file=sys.stderr)
    sys.exit(1)

print('o => runnable')
args = [
    'g++',
    path_from_root('system', 'lib', 'debugging.cpp'), filename + '.o', '-o',
    filename + '.run'
] + ['-l' + lib for lib in libs]
supportes_no_pie = 'no-pie' in run_process(['g++', '-dumpspecs'],
                                           stdout=PIPE).stdout
if supportes_no_pie:
    args.append('-no-pie')
run_process(args)

sys.exit(0)
Example #35
0
    shared.try_delete(filename)
    try:
        shared.run_process(
            [COMP, '-m32', opts, fullname, '-o', filename + '1'] +
            CSMITH_CFLAGS + ['-w'])  # + shared.get_cflags()
    except CalledProcessError:
        print('Failed to compile natively using clang')
        notes['invalid'] += 1
        continue

    shared.run_process([
        COMP, '-m32', opts, '-emit-llvm', '-c', fullname, '-o', filename +
        '.bc'
    ] + CSMITH_CFLAGS + shared.get_cflags() + ['-w'])
    shared.run_process(
        [shared.path_from_root('tools', 'nativize_llvm.py'), filename + '.bc'],
        stderr=PIPE)
    shutil.move(filename + '.bc.run', filename + '2')
    shared.run_process([COMP, fullname, '-o', filename + '3'] + CSMITH_CFLAGS +
                       ['-w'])
    print('3) Run natively')
    try:
        correct1 = shared.jsrun.timeout_run(
            Popen([filename + '1'], stdout=PIPE, stderr=PIPE), 3)
        if 'Segmentation fault' in correct1 or len(correct1) < 10:
            raise Exception('segfault')
        correct2 = shared.jsrun.timeout_run(
            Popen([filename + '2'], stdout=PIPE, stderr=PIPE), 3)
        if 'Segmentation fault' in correct2 or len(correct2) < 10:
            raise Exception('segfault')
        correct3 = shared.jsrun.timeout_run(
Example #36
0
data_files = [file_ for file_ in data_files if not was_seen(file_['dstpath'])]

if AV_WORKAROUND:
    random.shuffle(data_files)

# Apply plugins
for file_ in data_files:
    for plugin in plugins:
        plugin(file_)

metadata = {'files': []}

# Crunch files
if crunch:
    shutil.copyfile(shared.path_from_root('tools', 'crunch-worker.js'),
                    'crunch-worker.js')
    ret += '''
    var decrunchWorker = new Worker('crunch-worker.js');
    var decrunchCallbacks = [];
    decrunchWorker.onmessage = function(msg) {
      decrunchCallbacks[msg.data.callbackID](msg.data.data);
      console.log('decrunched ' + msg.data.filename + ' in ' + msg.data.time + ' ms, ' + msg.data.data.length + ' bytes');
      decrunchCallbacks[msg.data.callbackID] = null;
    };
    function requestDecrunch(filename, data, callback) {
      decrunchWorker.postMessage({
        filename: filename,
        data: new Uint8Array(data),
        callbackID: decrunchCallbacks.length
      });
Example #37
0
import shutil
import sys
import tempfile
from subprocess import Popen

__rootpath__ = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(__rootpath__)

from tools.shared import PYTHON, WINDOWS, CLANG_CXX, EMCC, PIPE, V8_ENGINE
from tools.shared import path_from_root, run_process, test_file
import clang_native

temp_dir = tempfile.mkdtemp()

# System info
system_info = Popen([PYTHON, path_from_root('emrun'), '--system_info'],
                    stdout=PIPE,
                    stderr=PIPE).communicate()

# Native info
native_info = Popen(['clang', '-v'], stdout=PIPE, stderr=PIPE).communicate()

# Emscripten info
emscripten_info = Popen([EMCC, '-v'], stdout=PIPE, stderr=PIPE).communicate()


def run_benchmark(benchmark_file, results_file, build_args):
    # Run native build
    out_file = os.path.join(temp_dir, 'benchmark_sse_native')
    if WINDOWS:
        out_file += '.exe'
Example #38
0
                 stdout=open(fullname, 'w'))
  print '1) Generate source... %.2f K' % (len(open(fullname).read())/1024.)

  tried += 1

  print '2) Compile natively'
  shared.try_delete(filename)
  try:
    shared.check_execute([COMP, '-m32', opts, fullname, '-o', filename + '1'] + CSMITH_CFLAGS + ['-w']) #  + shared.EMSDK_OPTS
  except CalledProcessError as e:
    print 'Failed to compile natively using clang'
    notes['invalid'] += 1
    continue

  shared.check_execute([COMP, '-m32', opts, '-emit-llvm', '-c', fullname, '-o', filename + '.bc'] + CSMITH_CFLAGS + shared.EMSDK_OPTS + ['-w'])
  shared.check_execute([shared.path_from_root('tools', 'nativize_llvm.py'), filename + '.bc'], stderr=PIPE)
  shutil.move(filename + '.bc.run', filename + '2')
  shared.check_execute([COMP, fullname, '-o', filename + '3'] + CSMITH_CFLAGS + ['-w'])
  print '3) Run natively'
  try:
    correct1 = shared.jsrun.timeout_run(Popen([filename + '1'], stdout=PIPE, stderr=PIPE), 3)
    if 'Segmentation fault' in correct1 or len(correct1) < 10: raise Exception('segfault')
    correct2 = shared.jsrun.timeout_run(Popen([filename + '2'], stdout=PIPE, stderr=PIPE), 3)
    if 'Segmentation fault' in correct2 or len(correct2) < 10: raise Exception('segfault')
    correct3 = shared.jsrun.timeout_run(Popen([filename + '3'], stdout=PIPE, stderr=PIPE), 3)
    if 'Segmentation fault' in correct3 or len(correct3) < 10: raise Exception('segfault')
    if correct1 != correct3: raise Exception('clang opts change result')
  except Exception, e:
    print 'Failed or infinite looping in native, skipping', e
    notes['invalid'] += 1
    continue
Example #39
0
  def test_nodejs_sockets_echo(self):
    # This test checks that sockets work when the client code is run in Node.js
    # Run with ./runner.py sockets.test_nodejs_sockets_echo
    if NODE_JS not in JS_ENGINES:
      self.skipTest('node is not present')

    sockets_include = '-I' + path_from_root('tests', 'sockets')

    harnesses = [
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=0'], 59162), 0),
      (CompiledServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=1'], 59164), 1)
    ]

    if not WINDOWS: # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
      harnesses += [(WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 59160), 0)]

    # Basic test of node client against both a Websockified and compiled echo server.
    for harness, datagram in harnesses:
      with harness:
        run_process([PYTHON, EMCC, path_from_root('tests', 'sockets', 'test_sockets_echo_client.c'), '-o', 'client.js', '-DSOCKK=%d' % harness.listen_port, '-DTEST_DGRAM=%d' % datagram], stdout=PIPE, stderr=PIPE)

        out = run_js('client.js', engine=NODE_JS, full_output=True)
        self.assertContained('do_msg_read: read 14 bytes', out)

    if not WINDOWS: # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows.
      # Test against a Websockified server with compile time configured WebSocket subprotocol. We use a Websockified
      # server because as long as the subprotocol list contains binary it will configure itself to accept binary
      # This test also checks that the connect url contains the correct subprotocols.
      print("\nTesting compile time WebSocket configuration.\n")
      for harness in [
        WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 59166)
      ]:
        with harness:
          run_process([PYTHON, EMCC, path_from_root('tests', 'sockets', 'test_sockets_echo_client.c'), '-o', 'client.js', '-s', 'SOCKET_DEBUG=1', '-s', 'WEBSOCKET_SUBPROTOCOL="base64, binary"', '-DSOCKK=59166'], stdout=PIPE, stderr=PIPE)

          out = run_js('client.js', engine=NODE_JS, full_output=True)
          self.assertContained('do_msg_read: read 14 bytes', out)
          self.assertContained(['connect: ws://127.0.0.1:59166, base64,binary', 'connect: ws://127.0.0.1:59166/, base64,binary'], out)

      # Test against a Websockified server with runtime WebSocket configuration. We specify both url and subprotocol.
      # In this test we have *deliberately* used the wrong port '-DSOCKK=12345' to configure the echo_client.c, so
      # the connection would fail without us specifying a valid WebSocket URL in the configuration.
      print("\nTesting runtime WebSocket configuration.\n")
      for harness in [
        WebsockifyServerHarness(os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 59168)
      ]:
        with harness:
          open(os.path.join(self.get_dir(), 'websocket_pre.js'), 'w').write('''
          var Module = {
            websocket: {
              url: 'ws://localhost:59168/testA/testB',
              subprotocol: 'text, base64, binary',
            }
          };
          ''')

          run_process([PYTHON, EMCC, path_from_root('tests', 'sockets', 'test_sockets_echo_client.c'), '-o', 'client.js', '--pre-js', 'websocket_pre.js', '-s', 'SOCKET_DEBUG=1', '-DSOCKK=12345'], stdout=PIPE, stderr=PIPE)

          out = run_js('client.js', engine=NODE_JS, full_output=True)
          self.assertContained('do_msg_read: read 14 bytes', out)
          self.assertContained('connect: ws://localhost:59168/testA/testB, text,base64,binary', out)
Example #40
0
        DataRequest.prototype.byteArray = byteArray;
  '''
    use_data += '''
          var files = metadata.files;
          for (var i = 0; i < files.length; ++i) {
            DataRequest.prototype.requests[files[i].filename].onload();
          }
    '''
    use_data += ("          Module['removeRunDependency']('datafile_%s');\n"
                 % shared.JS.escape_for_js_string(data_target))

  else:
    # LZ4FS usage
    temp = data_target + '.orig'
    shutil.move(data_target, temp)
    meta = run_js(shared.path_from_root('tools', 'lz4-compress.js'),
                  shared.NODE_JS,
                  [shared.path_from_root('src', 'mini-lz4.js'),
                   temp, data_target], stdout=PIPE)
    os.unlink(temp)
    use_data = '''
          var compressedData = %s;
          compressedData.data = byteArray;
          assert(typeof LZ4 === 'object', 'LZ4 not present - was your app build with  -s LZ4=1  ?');
          LZ4.loadPackage({ 'metadata': metadata, 'compressedData': compressedData });
          Module['removeRunDependency']('datafile_%s');
    ''' % (meta, shared.JS.escape_for_js_string(data_target))

  package_uuid = uuid.uuid4()
  package_name = data_target
  statinfo = os.stat(package_name)
Example #41
0
def main(args):
    global QUIET

    default_json = shared.path_from_root('src', 'struct_info.json')
    parser = argparse.ArgumentParser(
        description='Generate JSON infos for structs.')
    parser.add_argument(
        'headers',
        nargs='*',
        help=
        'A header (.h) file or a JSON file with a list of structs and their fields (defaults to src/struct_info.json)',
        default=[default_json])
    parser.add_argument('-q',
                        dest='quiet',
                        action='store_true',
                        default=False,
                        help='Don\'t output anything besides error messages.')
    parser.add_argument(
        '-f',
        dest='list_fields',
        action='store_true',
        default=False,
        help='Output a list of structs and fields for the given headers.')
    parser.add_argument('-c',
                        dest='pretty_print',
                        action='store_false',
                        default=True,
                        help="Compress JSON output (don't pretty print)")
    parser.add_argument(
        '-o',
        dest='output',
        metavar='path',
        default=None,
        help=
        'Path to the JSON file that will be written. If omitted, the generated data will be printed to stdout.'
    )
    parser.add_argument('-I',
                        dest='includes',
                        metavar='dir',
                        action='append',
                        default=[],
                        help='Add directory to include search path')
    parser.add_argument('-D',
                        dest='defines',
                        metavar='define',
                        action='append',
                        default=[],
                        help='Pass a define to the preprocessor')
    parser.add_argument('-U',
                        dest='undefines',
                        metavar='undefine',
                        action='append',
                        default=[],
                        help='Pass an undefine to the preprocessor')
    args = parser.parse_args(args)

    QUIET = args.quiet

    # Avoid parsing problems due to gcc specifc syntax.
    cpp_opts = ['-D_GNU_SOURCE']

    # Add the user options to the list as well.
    for path in args.includes:
        cpp_opts.append('-I' + path)

    for arg in args.defines:
        cpp_opts.append('-D' + arg)

    for arg in args.undefines:
        cpp_opts.append('-U' + arg)

    if args.list_fields:
        # Just parse the given headers and output the result.
        data = []
        for path in args.headers:
            if path[-5:] == '.json':
                show('WARN: Skipping "' + path +
                     '" because it\'s already a JSON file!')
            else:
                data.append(parse_header(path, cpp_opts))

        output_json(data, not args.pretty_print, args.output)
        return 0

    # Look for structs in all passed headers.
    header_files = []
    structs = {}
    defines = {}

    for header in args.headers:
        if header[-5:] == '.json':
            # This is a JSON file, parse it.
            parse_json(header, header_files, structs, defines)
        else:
            # If the passed file isn't a JSON file, assume it's a header.
            header_files.append(header)
            data = parse_header(header, cpp_opts)
            structs.update(data['structs'])
            defines.extend(data['defines'])

    # Inspect all collected structs.
    struct_info = inspect_code(header_files, cpp_opts, structs, defines)
    output_json(struct_info, not args.pretty_print, args.output)
    return 0
Example #42
0
 def test_native_function_pointers(self):
   def output_parser(output):
     return float(re.search('Total time: ([\d\.]+)', output).group(1))
   self.do_benchmark('native_functions', open(path_from_root('tests', 'benchmark_ffis.cpp')).read(), 'Total time:', output_parser=output_parser, shared_args=['-DBENCHMARK_FUNCTION_POINTER=1', '-DBUILD_FOR_SHELL', '-I' + path_from_root('tests')])
Example #43
0
 def test_foreign_functions(self):
   def output_parser(output):
     return float(re.search('Total time: ([\d\.]+)', output).group(1))
   self.do_benchmark('foreign_functions', open(path_from_root('tests', 'benchmark_ffis.cpp')).read(), 'Total time:', output_parser=output_parser, emcc_args=['--js-library', path_from_root('tests/benchmark_ffis.js')], shared_args=['-DBENCHMARK_FOREIGN_FUNCTION=1', '-DBUILD_FOR_SHELL', '-I' + path_from_root('tests')])
Example #44
0
 def test_memset_16mb(self):
   def output_parser(output):
     return float(re.search('Total time: ([\d\.]+)', output).group(1))
   self.do_benchmark('memset_16mb', open(path_from_root('tests', 'benchmark_memset.cpp')).read(), 'Total time:', output_parser=output_parser, shared_args=['-DMIN_COPY=1048576', '-DBUILD_FOR_SHELL', '-I' + path_from_root('tests')])
Example #45
0
 def test_havlak(self):
     src = open(path_from_root('tests', 'havlak.cpp'), 'r').read()
     self.do_benchmark('havlak', src, 'Found', shared_args=['-std=c++11'])
Example #46
0
 def test_matrix_multiply(self):
   def output_parser(output):
     return float(re.search('Total elapsed: ([\d\.]+)', output).group(1))
   self.do_benchmark('matrix_multiply', open(path_from_root('tests', 'matrix_multiply.cpp')).read(), 'Total time:', output_parser=output_parser, shared_args=['-I' + path_from_root('tests')])
Example #47
0
def run_on_js(filename, passes, js_engine, source_map=False, extra_info=None, just_split=False, just_concat=False):
  with ToolchainProfiler.profile_block('js_optimizer.split_markers'):
    if not isinstance(passes, list):
      passes = [passes]

    js = open(filename).read()
    if os.linesep != '\n':
      js = js.replace(os.linesep, '\n') # we assume \n in the splitting code

    # Find suffix
    suffix_marker = '// EMSCRIPTEN_GENERATED_FUNCTIONS'
    suffix_start = js.find(suffix_marker)
    suffix = ''
    if suffix_start >= 0:
      suffix_end = js.find('\n', suffix_start)
      suffix = js[suffix_start:suffix_end] + '\n'
      # if there is metadata, we will run only on the generated functions. If there isn't, we will run on everything.

    # Find markers
    start_funcs = js.find(start_funcs_marker)
    end_funcs = js.rfind(end_funcs_marker)

    if start_funcs < 0 or end_funcs < start_funcs or not suffix:
      shared.exit_with_error('Invalid input file. Did not contain appropriate markers. (start_funcs: %s, end_funcs: %s, suffix_start: %s' % (start_funcs, end_funcs, suffix_start))

    minify_globals = 'minifyNames' in passes and 'asm' in passes
    if minify_globals:
      passes = [p if p != 'minifyNames' else 'minifyLocals' for p in passes]
      start_asm = js.find(start_asm_marker)
      end_asm = js.rfind(end_asm_marker)
      assert (start_asm >= 0) == (end_asm >= 0)

    closure = 'closure' in passes
    if closure:
      passes = [p for p in passes if p != 'closure'] # we will do it manually

    cleanup = 'cleanup' in passes
    if cleanup:
      passes = [p for p in passes if p != 'cleanup'] # we will do it manually

  if not minify_globals:
    with ToolchainProfiler.profile_block('js_optimizer.no_minify_globals'):
      pre = js[:start_funcs + len(start_funcs_marker)]
      post = js[end_funcs + len(end_funcs_marker):]
      js = js[start_funcs + len(start_funcs_marker):end_funcs]
      if 'asm' not in passes: # can have Module[..] and inlining prevention code, push those to post
        class Finals(object):
          buf = []
        def process(line):
          if len(line) and (line.startswith(('Module[', 'if (globalScope)')) or line.endswith('["X"]=1;')):
            Finals.buf.append(line)
            return False
          return True
        js = '\n'.join(filter(process, js.split('\n')))
        post = '\n'.join(Finals.buf) + '\n' + post
      post = end_funcs_marker + post
  else:
    with ToolchainProfiler.profile_block('js_optimizer.minify_globals'):
      # We need to split out the asm shell as well, for minification
      pre = js[:start_asm + len(start_asm_marker)]
      post = js[end_asm:]
      asm_shell = js[start_asm + len(start_asm_marker):start_funcs + len(start_funcs_marker)] + '''
EMSCRIPTEN_FUNCS();
''' + js[end_funcs + len(end_funcs_marker):end_asm + len(end_asm_marker)]
      js = js[start_funcs + len(start_funcs_marker):end_funcs]

      # we assume there is a maximum of one new name per line
      minifier = Minifier(js, js_engine)
      def check_symbol_mapping(p):
        if p.startswith('symbolMap='):
          minifier.symbols_file = p.split('=', 1)[1]
          return False
        if p == 'profilingFuncs':
          minifier.profiling_funcs = True
          return False
        return True
      passes = list(filter(check_symbol_mapping, passes))
      asm_shell_pre, asm_shell_post = minifier.minify_shell(asm_shell, 'minifyWhitespace' in passes, source_map).split('EMSCRIPTEN_FUNCS();');
      # Restore a comment for Closure Compiler
      asm_open_bracket = asm_shell_pre.find('(')
      asm_shell_pre = asm_shell_pre[:asm_open_bracket+1] + '/** @suppress {uselessCode} */' + asm_shell_pre[asm_open_bracket+1:]
      asm_shell_post = asm_shell_post.replace('});', '})');
      pre += asm_shell_pre + '\n' + start_funcs_marker
      post = end_funcs_marker + asm_shell_post + post

      minify_info = minifier.serialize()

      if extra_info:
        for key, value in extra_info.items():
          assert key not in minify_info or value == minify_info[key], [key, value, minify_info[key]]
          minify_info[key] = value

      #if DEBUG: print >> sys.stderr, 'minify info:', minify_info

  with ToolchainProfiler.profile_block('js_optimizer.remove_suffix_and_split'):
    # remove suffix if no longer needed
    if suffix and 'last' in passes:
      suffix_start = post.find(suffix_marker)
      suffix_end = post.find('\n', suffix_start)
      post = post[:suffix_start] + post[suffix_end:]

    total_size = len(js)
    funcs = split_funcs(js, just_split)
    js = None

  with ToolchainProfiler.profile_block('js_optimizer.split_to_chunks'):
    # if we are making source maps, we want our debug numbering to start from the
    # top of the file, so avoid breaking the JS into chunks
    cores = 1 if source_map else shared.Building.get_num_cores()

    if not just_split:
      intended_num_chunks = int(round(cores * NUM_CHUNKS_PER_CORE))
      chunk_size = min(MAX_CHUNK_SIZE, max(MIN_CHUNK_SIZE, total_size / intended_num_chunks))
      chunks = shared.chunkify(funcs, chunk_size)
    else:
      # keep same chunks as before
      chunks = [f[1] for f in funcs]

    chunks = [chunk for chunk in chunks if len(chunk)]
    if DEBUG and len(chunks): print('chunkification: num funcs:', len(funcs), 'actual num chunks:', len(chunks), 'chunk size range:', max(map(len, chunks)), '-', min(map(len, chunks)), file=sys.stderr)
    funcs = None

    if len(chunks):
      serialized_extra_info = suffix_marker + '\n'
      if minify_globals:
        serialized_extra_info += '// EXTRA_INFO:' + json.dumps(minify_info)
      elif extra_info:
        serialized_extra_info += '// EXTRA_INFO:' + json.dumps(extra_info)
      with ToolchainProfiler.profile_block('js_optimizer.write_chunks'):
        def write_chunk(chunk, i):
          temp_file = temp_files.get('.jsfunc_%d.js' % i).name
          with open(temp_file, 'w') as f:
            f.write(chunk)
            f.write(serialized_extra_info)
          return temp_file
        filenames = [write_chunk(chunks[i], i) for i in range(len(chunks))]
    else:
      filenames = []

  with ToolchainProfiler.profile_block('run_optimizer'):
    if len(filenames):
      if not use_native(passes, source_map):
        commands = [js_engine +
            [JS_OPTIMIZER, filename, 'noPrintMetadata'] +
            (['--debug'] if source_map else []) + passes for filename in filenames]
      else:
        # use the native optimizer
        shared.logging.debug('js optimizer using native')
        assert not source_map # XXX need to use js optimizer
        commands = [[get_native_optimizer(), filename] + passes for filename in filenames]
      #print [' '.join(command) for command in commands]

      cores = min(cores, len(filenames))
      if len(chunks) > 1 and cores >= 2:
        # We can parallelize
        if DEBUG: print('splitting up js optimization into %d chunks, using %d cores  (total: %.2f MB)' % (len(chunks), cores, total_size/(1024*1024.)), file=sys.stderr)
        with ToolchainProfiler.profile_block('optimizer_pool'):
          pool = shared.Building.get_multiprocessing_pool()
          filenames = pool.map(run_on_chunk, commands, chunksize=1)
      else:
        # We can't parallize, but still break into chunks to avoid uglify/node memory issues
        if len(chunks) > 1 and DEBUG: print('splitting up js optimization into %d chunks' % (len(chunks)), file=sys.stderr)
        filenames = [run_on_chunk(command) for command in commands]
    else:
      filenames = []

    for filename in filenames: temp_files.note(filename)

  with ToolchainProfiler.profile_block('split_closure_cleanup'):
    wasm_pthreads_memory_growth = shared.Settings.WASM and shared.Settings.USE_PTHREADS and shared.Settings.ALLOW_MEMORY_GROWTH
    if closure or cleanup or wasm_pthreads_memory_growth:
      # run on the shell code, everything but what we js-optimize
      start_asm = '// EMSCRIPTEN_START_ASM\n'
      end_asm = '// EMSCRIPTEN_END_ASM\n'
      cl_sep = 'wakaUnknownBefore(); var asm=wakaUnknownAfter(wakaGlobal,wakaEnv,wakaBuffer)\n'

      with temp_files.get_file('.cl.js') as cle:
        pre_1, pre_2 = pre.split(start_asm)
        post_1, post_2 = post.split(end_asm)
        with open(cle, 'w') as f:
          f.write(pre_1)
          f.write(cl_sep)
          f.write(post_2)
        cld = cle
        if wasm_pthreads_memory_growth:
          if DEBUG: print('supporting wasm memory growth with pthreads', file=sys.stderr)
          cld = run_on_chunk(js_engine + [JS_OPTIMIZER, cld, 'growableHeap'])
          with open(cld, 'r') as f:
            src = f.read()
          with open(cld, 'w') as f:
            with open(shared.path_from_root('src', 'growableHeap.js')) as g:
              f.write(g.read() + '\n')
            f.write(src)
            f.write(suffix_marker)
        if closure:
          if DEBUG: print('running closure on shell code', file=sys.stderr)
          cld = shared.Building.closure_compiler(cld, pretty='minifyWhitespace' not in passes)
          temp_files.note(cld)
        elif cleanup:
          if DEBUG: print('running cleanup on shell code', file=sys.stderr)
          acorn_passes = ['JSDCE']
          if 'minifyWhitespace' in passes:
            acorn_passes.append('minifyWhitespace')
          cld = shared.Building.acorn_optimizer(cld, acorn_passes)
          temp_files.note(cld)
        coutput = open(cld).read()

      coutput = coutput.replace('wakaUnknownBefore();', start_asm)
      after = 'wakaUnknownAfter'
      start = coutput.find(after)
      end = coutput.find(')', start)
      # If the closure comment to suppress useless code is present, we need to look one
      # brace past it, as the first is in there. Otherwise, the first brace is the
      # start of the function body (what we want).
      USELESS_CODE_COMMENT = '/** @suppress {uselessCode} */ '
      USELESS_CODE_COMMENT_BODY = 'uselessCode'
      brace = pre_2.find('{') + 1
      has_useless_code_comment = False
      if pre_2[brace:brace + len(USELESS_CODE_COMMENT_BODY)] == USELESS_CODE_COMMENT_BODY:
        brace = pre_2.find('{', brace) + 1
        has_useless_code_comment = True
      pre = coutput[:start] + '(' + (USELESS_CODE_COMMENT if has_useless_code_comment else '') + 'function(global,env,buffer) {\n' + pre_2[brace:]
      post = post_1 + end_asm + coutput[end+1:]

  with ToolchainProfiler.profile_block('write_pre'):
    filename += '.jo.js'
    f = open(filename, 'w')
    f.write(pre);
    pre = None

  with ToolchainProfiler.profile_block('sort_or_concat'):
    if not just_concat:
      # sort functions by size, to make diffing easier and to improve aot times
      funcses = []
      for out_file in filenames:
        funcses.append(split_funcs(open(out_file).read(), False))
      funcs = [item for sublist in funcses for item in sublist]
      funcses = None
      if not os.environ.get('EMCC_NO_OPT_SORT'):
        funcs.sort(key=lambda x: (len(x[1]), x[0]), reverse=True)

      if 'last' in passes and len(funcs):
        count = funcs[0][1].count('\n')
        if count > 3000:
          print('warning: Output contains some very large functions (%s lines in %s), consider building source files with -Os or -Oz, and/or trying OUTLINING_LIMIT to break them up (see settings.js; note that the parameter there affects AST nodes, while we measure lines here, so the two may not match up)' % (count, funcs[0][0]), file=sys.stderr)

      for func in funcs:
        f.write(func[1])
      funcs = None
    else:
      # just concat the outputs
      for out_file in filenames:
        f.write(open(out_file).read())

  with ToolchainProfiler.profile_block('write_post'):
    f.write('\n')
    f.write(post);
    # No need to write suffix: if there was one, it is inside post which exists when suffix is there
    f.write('\n')
    f.close()

  return filename
Example #48
0
 def test_zzz_java_nbody(self): # tests xmlvm compiled java, including bitcasts of doubles, i64 math, etc.
   args = [path_from_root('tests', 'nbody-java', x) for x in os.listdir(path_from_root('tests', 'nbody-java')) if x.endswith('.c')] + \
          ['-I' + path_from_root('tests', 'nbody-java')]
   self.do_benchmark('nbody_java', '', '''Time(s)''',
                     force_c=True, emcc_args=args + ['-s', 'PRECISE_I64_MATH=1', '--llvm-lto', '2'], native_args=args + ['-lgc', '-std=c99', '-target', 'x86_64-pc-linux-gnu', '-lm'])
Example #49
0
        def create_optimizer_cmake():
            shared.logging.debug('building native optimizer via CMake: ' +
                                 name)
            output = shared.Cache.get_path(name)
            shared.try_delete(output)

            if NATIVE_OPTIMIZER == '1':
                cmake_build_type = 'RelWithDebInfo'
            elif NATIVE_OPTIMIZER == '2':
                cmake_build_type = 'Release'
            elif NATIVE_OPTIMIZER == 'g':
                cmake_build_type = 'Debug'

            build_path = shared.Cache.get_path('optimizer_build_' +
                                               cmake_build_type)
            shared.try_delete(os.path.join(build_path, 'CMakeCache.txt'))

            if not os.path.exists(build_path):
                os.mkdir(build_path)

            if WINDOWS:
                # Poor man's check for whether or not we should attempt 64 bit build
                if os.environ.get('ProgramFiles(x86)'):
                    cmake_generators = [
                        'Visual Studio 15 2017 Win64',
                        'Visual Studio 15 2017',
                        'Visual Studio 14 2015 Win64',
                        'Visual Studio 14 2015',
                        'Visual Studio 12 Win64',  # The year component is omitted for compatibility with older CMake.
                        'Visual Studio 12',
                        'Visual Studio 11 Win64',
                        'Visual Studio 11',
                        'MinGW Makefiles',
                        'Unix Makefiles',
                    ]
                else:
                    cmake_generators = [
                        'Visual Studio 15 2017',
                        'Visual Studio 14 2015',
                        'Visual Studio 12',
                        'Visual Studio 11',
                        'MinGW Makefiles',
                        'Unix Makefiles',
                    ]
            else:
                cmake_generators = ['Unix Makefiles']

            for cmake_generator in cmake_generators:
                # Delete CMakeCache.txt so that we can switch to a new CMake generator.
                shared.try_delete(os.path.join(build_path, 'CMakeCache.txt'))
                proc = subprocess.Popen([
                    'cmake', '-G', cmake_generator,
                    '-DCMAKE_BUILD_TYPE=' + cmake_build_type,
                    shared.path_from_root('tools', 'optimizer')
                ],
                                        cwd=build_path,
                                        stdin=log_output,
                                        stdout=log_output,
                                        stderr=log_output)
                proc.communicate()
                if proc.returncode == 0:
                    make = ['cmake', '--build', build_path]
                    if 'Visual Studio' in cmake_generator:
                        make += [
                            '--config', cmake_build_type, '--', '/nologo',
                            '/verbosity:minimal'
                        ]

                    proc = subprocess.Popen(make,
                                            cwd=build_path,
                                            stdin=log_output,
                                            stdout=log_output,
                                            stderr=log_output)
                    proc.communicate()
                    if proc.returncode == 0:
                        if WINDOWS and 'Visual Studio' in cmake_generator:
                            shutil.copyfile(
                                os.path.join(build_path, cmake_build_type,
                                             'optimizer.exe'), output)
                        else:
                            shutil.copyfile(
                                os.path.join(build_path, 'optimizer'), output)
                        return output

            assert False
Example #50
0
 def test_websocket_send(self):
   with NodeJsWebSocketEchoServerProcess():
     self.btest(path_from_root('tests', 'websocket', 'test_websocket_send.c'), expected='101', args=['-lwebsocket', '-s', 'NO_EXIT_RUNTIME=1', '-s', 'WEBSOCKET_DEBUG=1'])
            def create_optimizer_cmake():
                shared.logging.debug('building native optimizer via CMake: ' +
                                     name)
                output = shared.Cache.get_path(name)
                shared.try_delete(output)

                if NATIVE_OPTIMIZER == '1':
                    cmake_build_type = 'RelWithDebInfo'
                elif NATIVE_OPTIMIZER == '2':
                    cmake_build_type = 'Release'
                elif NATIVE_OPTIMIZER == 'g':
                    cmake_build_type = 'Debug'

                build_path = shared.Cache.get_path('optimizer_build_' +
                                                   cmake_build_type)
                shared.try_delete(os.path.join(build_path, 'CMakeCache.txt'))

                log_output = None if DEBUG else subprocess.PIPE
                if not os.path.exists(build_path):
                    os.mkdir(build_path)

                if WINDOWS:
                    # Poor man's check for whether or not we should attempt 64 bit build
                    if os.environ.get('ProgramFiles(x86)'):
                        cmake_generators = [
                            'Visual Studio 12 Win64', 'Visual Studio 12',
                            'Visual Studio 11 Win64', 'Visual Studio 11',
                            'MinGW Makefiles', 'Unix Makefiles'
                        ]
                    else:
                        cmake_generators = [
                            'Visual Studio 12', 'Visual Studio 11',
                            'MinGW Makefiles', 'Unix Makefiles'
                        ]
                else:
                    cmake_generators = ['Unix Makefiles']

                for cmake_generator in cmake_generators:
                    proc = subprocess.Popen([
                        'cmake', '-G', cmake_generator,
                        '-DCMAKE_BUILD_TYPE=' + cmake_build_type,
                        shared.path_from_root('tools', 'optimizer')
                    ],
                                            cwd=build_path,
                                            stdin=log_output,
                                            stdout=log_output,
                                            stderr=log_output)
                    proc.communicate()
                    make_env = os.environ.copy()
                    if proc.returncode == 0:
                        if 'Visual Studio' in cmake_generator:
                            ret = find_msbuild(
                                os.path.join(build_path,
                                             'asmjs_optimizer.sln'), make_env)
                            make = [
                                ret[0], '/t:Build',
                                '/p:Configuration=' + cmake_build_type,
                                '/nologo', '/verbosity:minimal',
                                'asmjs_optimizer.sln'
                            ]
                            make_env = ret[1]
                        elif 'MinGW' in cmake_generator:
                            make = ['mingw32-make']
                        else:
                            make = ['make']

                        proc = subprocess.Popen(make,
                                                cwd=build_path,
                                                stdin=log_output,
                                                stdout=log_output,
                                                stderr=log_output,
                                                env=make_env)
                        proc.communicate()
                        if proc.returncode == 0:
                            if WINDOWS and 'Visual Studio' in cmake_generator:
                                shutil.copyfile(
                                    os.path.join(build_path, cmake_build_type,
                                                 'optimizer.exe'), output)
                            else:
                                shutil.copyfile(
                                    os.path.join(build_path, 'optimizer'),
                                    output)
                            return output
                        else:
                            shared.try_delete(
                                os.path.join(build_path, 'CMakeCache.txt'))
                            # Proceed to next iteration of the loop to try next possible CMake generator.

                raise NativeOptimizerCreationException()
Example #52
0
 def test_skinning(self):
   src = open(path_from_root('tests', 'skinning_test_no_simd.cpp'), 'r').read()
   self.do_benchmark('skinning', src, 'blah=0.000000')
Example #53
0
def main():
    data_files = []
    export_name = 'Module'
    leading = ''
    has_preloaded = False
    plugins = []
    jsoutput = None
    from_emcc = False
    force = True
    # If set to True, IndexedDB (IDBFS in library_idbfs.js) is used to locally
    # cache VFS XHR so that subsequent page loads can read the data from the
    # offline cache instead.
    use_preload_cache = False
    indexeddb_name = 'EM_PRELOAD_CACHE'
    # If set to True, the blob received from XHR is moved to the Emscripten HEAP,
    # optimizing for mmap() performance (if ALLOW_MEMORY_GROWTH=0).
    # If set to False, the XHR blob is kept intact, and fread()s etc. are performed
    # directly to that data. This optimizes for minimal memory usage and fread()
    # performance.
    heap_copy = True
    # If set to True, the package metadata is stored separately from js-output
    # file which makes js-output file immutable to the package content changes.
    # If set to False, the package metadata is stored inside the js-output file
    # which makes js-output file to mutate on each invocation of this packager tool.
    separate_metadata = False
    lz4 = False
    use_preload_plugins = False

    for arg in sys.argv[2:]:
        if arg == '--preload':
            has_preloaded = True
            leading = 'preload'
        elif arg == '--embed':
            leading = 'embed'
        elif arg == '--exclude':
            leading = 'exclude'
        elif arg == '--no-force':
            force = False
            leading = ''
        elif arg == '--use-preload-cache':
            use_preload_cache = True
            leading = ''
        elif arg.startswith('--indexedDB-name'):
            indexeddb_name = arg.split('=', 1)[1] if '=' in arg else None
            leading = ''
        elif arg == '--no-heap-copy':
            heap_copy = False
            leading = ''
        elif arg == '--separate-metadata':
            separate_metadata = True
            leading = ''
        elif arg == '--lz4':
            lz4 = True
            leading = ''
        elif arg == '--use-preload-plugins':
            use_preload_plugins = True
            leading = ''
        elif arg.startswith('--js-output'):
            jsoutput = arg.split('=', 1)[1] if '=' in arg else None
            leading = ''
        elif arg.startswith('--export-name'):
            if '=' in arg:
                export_name = arg.split('=', 1)[1]
            leading = ''
        elif arg.startswith('--from-emcc'):
            from_emcc = True
            leading = ''
        elif arg.startswith('--plugin'):
            plugin = open(arg.split('=', 1)[1], 'r').read()
            eval(plugin)  # should append itself to plugins
            leading = ''
        elif leading == 'preload' or leading == 'embed':
            mode = leading
            # position of @ if we're doing 'src@dst'. '__' is used to keep the index
            # same with the original if they escaped with '@@'.
            at_position = arg.replace('@@', '__').find('@')
            # '@@' in input string means there is an actual @ character, a single '@'
            # means the 'src@dst' notation.
            uses_at_notation = (at_position != -1)

            if uses_at_notation:
                srcpath = arg[0:at_position].replace('@@',
                                                     '@')  # split around the @
                dstpath = arg[at_position + 1:].replace('@@', '@')
            else:
                # Use source path as destination path.
                srcpath = dstpath = arg.replace('@@', '@')
            if os.path.isfile(srcpath) or os.path.isdir(srcpath):
                data_files.append({
                    'srcpath': srcpath,
                    'dstpath': dstpath,
                    'mode': mode,
                    'explicit_dst_path': uses_at_notation
                })
            else:
                print('Warning: ' + arg + ' does not exist, ignoring.',
                      file=sys.stderr)
        elif leading == 'exclude':
            excluded_patterns.append(arg)
        else:
            print('Unknown parameter:', arg, file=sys.stderr)
            sys.exit(1)

    if (not force) and not data_files:
        has_preloaded = False
    if not has_preloaded or jsoutput is None:
        assert not separate_metadata, (
            'cannot separate-metadata without both --preloaded files '
            'and a specified --js-output')

    if not from_emcc:
        print(
            'Remember to build the main file with  -s FORCE_FILESYSTEM=1  '
            'so that it includes support for loading this file package',
            file=sys.stderr)

    ret = ''
    # emcc.py will add this to the output itself, so it is only needed for
    # standalone calls
    if not from_emcc:
        ret = '''
  var Module = typeof %(EXPORT_NAME)s !== 'undefined' ? %(EXPORT_NAME)s : {};
  ''' % {
            "EXPORT_NAME": export_name
        }

    ret += '''
  if (!Module.expectedDataFileDownloads) {
    Module.expectedDataFileDownloads = 0;
    Module.finishedDataFileDownloads = 0;
  }
  Module.expectedDataFileDownloads++;
  (function() {
   var loadPackage = function(metadata) {
  '''

    code = '''
      function assert(check, msg) {
        if (!check) throw msg + new Error().stack;
      }
  '''

    for file_ in data_files:
        if not should_ignore(file_['srcpath']):
            if os.path.isdir(file_['srcpath']):
                add(file_['mode'], file_['srcpath'], file_['dstpath'])
            else:
                new_data_files.append(file_)
    data_files = [
        file_ for file_ in new_data_files
        if not os.path.isdir(file_['srcpath'])
    ]
    if len(data_files) == 0:
        print('Nothing to do!', file=sys.stderr)
        sys.exit(1)

    # Absolutize paths, and check that they make sense
    # os.getcwd() always returns the hard path with any symbolic links resolved,
    # even if we cd'd into a symbolic link.
    curr_abspath = os.path.abspath(os.getcwd())

    for file_ in data_files:
        if not file_['explicit_dst_path']:
            # This file was not defined with src@dst, so we inferred the destination
            # from the source. In that case, we require that the destination not be
            # under the current location
            path = file_['dstpath']
            # Use os.path.realpath to resolve any symbolic links to hard paths,
            # to match the structure in curr_abspath.
            abspath = os.path.realpath(os.path.abspath(path))
            if DEBUG:
                print(path, abspath, curr_abspath, file=sys.stderr)
            if not abspath.startswith(curr_abspath):
                print(
                    'Error: Embedding "%s" which is below the current directory '
                    '"%s". This is invalid since the current directory becomes the '
                    'root that the generated code will see' %
                    (path, curr_abspath),
                    file=sys.stderr)
                sys.exit(1)
            file_['dstpath'] = abspath[len(curr_abspath) + 1:]
            if os.path.isabs(path):
                print(
                    'Warning: Embedding an absolute file/directory name "%s" to the '
                    'virtual filesystem. The file will be made available in the '
                    'relative path "%s". You can use the explicit syntax '
                    '--preload-file srcpath@dstpath to explicitly specify the target '
                    'location the absolute source path should be directed to.'
                    % (path, file_['dstpath']),
                    file=sys.stderr)

    for file_ in data_files:
        # name in the filesystem, native and emulated
        file_['dstpath'] = file_['dstpath'].replace(os.path.sep, '/')
        # If user has submitted a directory name as the destination but omitted
        # the destination filename, use the filename from source file
        if file_['dstpath'].endswith('/'):
            file_['dstpath'] = file_['dstpath'] + os.path.basename(
                file_['srcpath'])
        # make destination path always relative to the root
        file_['dstpath'] = posixpath.normpath(
            os.path.join('/', file_['dstpath']))
        if DEBUG:
            print('Packaging file "%s" to VFS in path "%s".' %
                  (file_['srcpath'], file_['dstpath']),
                  file=sys.stderr)

    # Remove duplicates (can occur naively, for example preload dir/, preload dir/subdir/)
    seen = {}

    def was_seen(name):
        if seen.get(name):
            return True
        seen[name] = 1
        return False

    data_files = [
        file_ for file_ in data_files if not was_seen(file_['dstpath'])
    ]

    if AV_WORKAROUND:
        random.shuffle(data_files)

    # Apply plugins
    for file_ in data_files:
        for plugin in plugins:
            plugin(file_)

    metadata = {'files': []}

    # Set up folders
    partial_dirs = []
    for file_ in data_files:
        dirname = os.path.dirname(file_['dstpath'])
        dirname = dirname.lstrip(
            '/')  # absolute paths start with '/', remove that
        if dirname != '':
            parts = dirname.split('/')
            for i in range(len(parts)):
                partial = '/'.join(parts[:i + 1])
                if partial not in partial_dirs:
                    code += (
                        '''Module['FS_createPath']('/%s', '%s', true, true);\n'''
                        % ('/'.join(parts[:i]), parts[i]))
                    partial_dirs.append(partial)

    if has_preloaded:
        # Bundle all datafiles into one archive. Avoids doing lots of simultaneous
        # XHRs which has overhead.
        data = open(data_target, 'wb')
        start = 0
        for file_ in data_files:
            file_['data_start'] = start
            curr = open(file_['srcpath'], 'rb').read()
            file_['data_end'] = start + len(curr)
            if AV_WORKAROUND:
                curr += '\x00'
            start += len(curr)
            data.write(curr)
        data.close()
        # TODO: sha256sum on data_target
        if start > 256 * 1024 * 1024:
            print(
                'warning: file packager is creating an asset bundle of %d MB. '
                'this is very large, and browsers might have trouble loading it. '
                'see https://hacks.mozilla.org/2015/02/synchronous-execution-and-filesystem-access-in-emscripten/'
                % (start / (1024 * 1024)),
                file=sys.stderr)

        create_preloaded = '''
          Module['FS_createPreloadedFile'](this.name, null, byteArray, true, true, function() {
            Module['removeRunDependency']('fp ' + that.name);
          }, function() {
            if (that.audio) {
              Module['removeRunDependency']('fp ' + that.name); // workaround for chromium bug 124926 (still no audio with this, but at least we don't hang)
            } else {
              err('Preloading file ' + that.name + ' failed');
            }
          }, false, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change
  '''
        create_data = '''
          Module['FS_createDataFile'](this.name, null, byteArray, true, true, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change
          Module['removeRunDependency']('fp ' + that.name);
  '''

        # Data requests - for getting a block of data out of the big archive - have
        # a similar API to XHRs
        code += '''
      function DataRequest(start, end, audio) {
        this.start = start;
        this.end = end;
        this.audio = audio;
      }
      DataRequest.prototype = {
        requests: {},
        open: function(mode, name) {
          this.name = name;
          this.requests[name] = this;
          Module['addRunDependency']('fp ' + this.name);
        },
        send: function() {},
        onload: function() {
          var byteArray = this.byteArray.subarray(this.start, this.end);
          this.finish(byteArray);
        },
        finish: function(byteArray) {
          var that = this;
  %s
          this.requests[this.name] = null;
        }
      };
  %s
    ''' % (create_preloaded if use_preload_plugins else create_data, '''
          var files = metadata['files'];
          for (var i = 0; i < files.length; ++i) {
            new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio']).open('GET', files[i]['filename']);
          }
  ''' if not lz4 else '')

    counter = 0
    for file_ in data_files:
        filename = file_['dstpath']
        dirname = os.path.dirname(filename)
        basename = os.path.basename(filename)
        if file_['mode'] == 'embed':
            # Embed
            data = list(bytearray(open(file_['srcpath'], 'rb').read()))
            code += '''var fileData%d = [];\n''' % counter
            if data:
                parts = []
                chunk_size = 10240
                start = 0
                while start < len(data):
                    parts.append(
                        '''fileData%d.push.apply(fileData%d, %s);\n''' %
                        (counter, counter, str(
                            data[start:start + chunk_size])))
                    start += chunk_size
                code += ''.join(parts)
            code += (
                '''Module['FS_createDataFile']('%s', '%s', fileData%d, true, true, false);\n'''
                % (dirname, basename, counter))
            counter += 1
        elif file_['mode'] == 'preload':
            # Preload
            counter += 1
            metadata['files'].append({
                'filename':
                file_['dstpath'],
                'start':
                file_['data_start'],
                'end':
                file_['data_end'],
                'audio':
                1 if filename[-4:] in AUDIO_SUFFIXES else 0,
            })
        else:
            assert 0

    if has_preloaded:
        if not lz4:
            # Get the big archive and split it up
            if heap_copy:
                use_data = '''
          // copy the entire loaded file into a spot in the heap. Files will refer to slices in that. They cannot be freed though
          // (we may be allocating before malloc is ready, during startup).
          var ptr = Module['getMemory'](byteArray.length);
          Module['HEAPU8'].set(byteArray, ptr);
          DataRequest.prototype.byteArray = Module['HEAPU8'].subarray(ptr, ptr+byteArray.length);
    '''
            else:
                use_data = '''
          // Reuse the bytearray from the XHR as the source for file reads.
          DataRequest.prototype.byteArray = byteArray;
    '''
            use_data += '''
            var files = metadata['files'];
            for (var i = 0; i < files.length; ++i) {
              DataRequest.prototype.requests[files[i].filename].onload();
            }
      '''
            use_data += (
                "          Module['removeRunDependency']('datafile_%s');\n" %
                shared.JS.escape_for_js_string(data_target))

        else:
            # LZ4FS usage
            temp = data_target + '.orig'
            shutil.move(data_target, temp)
            meta = run_js(shared.path_from_root('tools', 'lz4-compress.js'),
                          shared.NODE_JS, [
                              shared.path_from_root('src', 'mini-lz4.js'),
                              temp, data_target
                          ],
                          stdout=PIPE)
            os.unlink(temp)
            use_data = '''
            var compressedData = %s;
            compressedData['data'] = byteArray;
            assert(typeof LZ4 === 'object', 'LZ4 not present - was your app build with  -s LZ4=1  ?');
            LZ4.loadPackage({ 'metadata': metadata, 'compressedData': compressedData });
            Module['removeRunDependency']('datafile_%s');
      ''' % (meta, shared.JS.escape_for_js_string(data_target))

        package_uuid = uuid.uuid4()
        package_name = data_target
        remote_package_size = os.path.getsize(package_name)
        remote_package_name = os.path.basename(package_name)
        ret += r'''
      var PACKAGE_PATH;
      if (typeof window === 'object') {
        PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.toString().substring(0, window.location.pathname.toString().lastIndexOf('/')) + '/');
      } else if (typeof location !== 'undefined') {
        // worker
        PACKAGE_PATH = encodeURIComponent(location.pathname.toString().substring(0, location.pathname.toString().lastIndexOf('/')) + '/');
      } else {
        throw 'using preloaded data can only be done on a web page or in a web worker';
      }
      var PACKAGE_NAME = '%s';
      var REMOTE_PACKAGE_BASE = '%s';
      if (typeof Module['locateFilePackage'] === 'function' && !Module['locateFile']) {
        Module['locateFile'] = Module['locateFilePackage'];
        err('warning: you defined Module.locateFilePackage, that has been renamed to Module.locateFile (using your locateFilePackage for now)');
      }
      var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;
    ''' % (shared.JS.escape_for_js_string(data_target),
           shared.JS.escape_for_js_string(remote_package_name))
        metadata['remote_package_size'] = remote_package_size
        metadata['package_uuid'] = str(package_uuid)
        ret += '''
      var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
      var PACKAGE_UUID = metadata['package_uuid'];
    '''

        if use_preload_cache:
            code += r'''
        var indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
        var IDB_RO = "readonly";
        var IDB_RW = "readwrite";
        var DB_NAME = "''' + indexeddb_name + '''";
        var DB_VERSION = 1;
        var METADATA_STORE_NAME = 'METADATA';
        var PACKAGE_STORE_NAME = 'PACKAGES';
        function openDatabase(callback, errback) {
          try {
            var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
          } catch (e) {
            return errback(e);
          }
          openRequest.onupgradeneeded = function(event) {
            var db = event.target.result;

            if(db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
              db.deleteObjectStore(PACKAGE_STORE_NAME);
            }
            var packages = db.createObjectStore(PACKAGE_STORE_NAME);

            if(db.objectStoreNames.contains(METADATA_STORE_NAME)) {
              db.deleteObjectStore(METADATA_STORE_NAME);
            }
            var metadata = db.createObjectStore(METADATA_STORE_NAME);
          };
          openRequest.onsuccess = function(event) {
            var db = event.target.result;
            callback(db);
          };
          openRequest.onerror = function(error) {
            errback(error);
          };
        };

        // This is needed as chromium has a limit on per-entry files in IndexedDB
        // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
        // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
        // We set the chunk size to 64MB to stay well-below the limit
        var CHUNK_SIZE = 64 * 1024 * 1024;

        function cacheRemotePackage(
          db,
          packageName,
          packageData,
          packageMeta,
          callback,
          errback
        ) {
          var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
          var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
          var chunkSliceStart = 0;
          var nextChunkSliceStart = 0;
          var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
          var finishedChunks = 0;
          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            nextChunkSliceStart += CHUNK_SIZE;
            var putPackageRequest = packages.put(
              packageData.slice(chunkSliceStart, nextChunkSliceStart),
              'package/' + packageName + '/' + chunkId
            );
            chunkSliceStart = nextChunkSliceStart;
            putPackageRequest.onsuccess = function(event) {
              finishedChunks++;
              if (finishedChunks == chunkCount) {
                var transaction_metadata = db.transaction(
                  [METADATA_STORE_NAME],
                  IDB_RW
                );
                var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
                var putMetadataRequest = metadata.put(
                  {
                    'uuid': packageMeta.uuid,
                    'chunkCount': chunkCount
                  },
                  'metadata/' + packageName
                );
                putMetadataRequest.onsuccess = function(event) {
                  callback(packageData);
                };
                putMetadataRequest.onerror = function(error) {
                  errback(error);
                };
              }
            };
            putPackageRequest.onerror = function(error) {
              errback(error);
            };
          }
        }

        /* Check if there's a cached package, and if so whether it's the latest available */
        function checkCachedPackage(db, packageName, callback, errback) {
          var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
          var metadata = transaction.objectStore(METADATA_STORE_NAME);
          var getRequest = metadata.get('metadata/' + packageName);
          getRequest.onsuccess = function(event) {
            var result = event.target.result;
            if (!result) {
              return callback(false, null);
            } else {
              return callback(PACKAGE_UUID === result['uuid'], result);
            }
          };
          getRequest.onerror = function(error) {
            errback(error);
          };
        }

        function fetchCachedPackage(db, packageName, metadata, callback, errback) {
          var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
          var packages = transaction.objectStore(PACKAGE_STORE_NAME);

          var chunksDone = 0;
          var totalSize = 0;
          var chunkCount = metadata['chunkCount'];
          var chunks = new Array(chunkCount);

          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            var getRequest = packages.get('package/' + packageName + '/' + chunkId);
            getRequest.onsuccess = function(event) {
              // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
              if (chunkCount == 1) {
                callback(event.target.result);
              } else {
                chunksDone++;
                totalSize += event.target.result.byteLength;
                chunks.push(event.target.result);
                if (chunksDone == chunkCount) {
                  if (chunksDone == 1) {
                    callback(event.target.result);
                  } else {
                    var tempTyped = new Uint8Array(totalSize);
                    var byteOffset = 0;
                    for (var chunkId in chunks) {
                      var buffer = chunks[chunkId];
                      tempTyped.set(new Uint8Array(buffer), byteOffset);
                      byteOffset += buffer.byteLength;
                      buffer = undefined;
                    }
                    chunks = undefined;
                    callback(tempTyped.buffer);
                    tempTyped = undefined;
                  }
                }
              }
            };
            getRequest.onerror = function(error) {
              errback(error);
            };
          }
        }
      '''

        ret += r'''
      function fetchRemotePackage(packageName, packageSize, callback, errback) {
        var xhr = new XMLHttpRequest();
        xhr.open('GET', packageName, true);
        xhr.responseType = 'arraybuffer';
        xhr.onprogress = function(event) {
          var url = packageName;
          var size = packageSize;
          if (event.total) size = event.total;
          if (event.loaded) {
            if (!xhr.addedTotal) {
              xhr.addedTotal = true;
              if (!Module.dataFileDownloads) Module.dataFileDownloads = {};
              Module.dataFileDownloads[url] = {
                loaded: event.loaded,
                total: size
              };
            } else {
              Module.dataFileDownloads[url].loaded = event.loaded;
            }
            var total = 0;
            var loaded = 0;
            var num = 0;
            for (var download in Module.dataFileDownloads) {
            var data = Module.dataFileDownloads[download];
              total += data.total;
              loaded += data.loaded;
              num++;
            }
            total = Math.ceil(total * Module.expectedDataFileDownloads/num);
            if (Module['setStatus']) Module['setStatus']('Downloading data... (' + loaded + '/' + total + ')');
          } else if (!Module.dataFileDownloads) {
            if (Module['setStatus']) Module['setStatus']('Downloading data...');
          }
        };
        xhr.onerror = function(event) {
          throw new Error("NetworkError for: " + packageName);
        }
        xhr.onload = function(event) {
          if (xhr.status == 200 || xhr.status == 304 || xhr.status == 206 || (xhr.status == 0 && xhr.response)) { // file URLs can return 0
            var packageData = xhr.response;
            callback(packageData);
          } else {
            throw new Error(xhr.statusText + " : " + xhr.responseURL);
          }
        };
        xhr.send(null);
      };

      function handleError(error) {
        console.error('package error:', error);
      };
    '''

        code += r'''
      function processPackageData(arrayBuffer) {
        Module.finishedDataFileDownloads++;
        assert(arrayBuffer, 'Loading data file failed.');
        assert(arrayBuffer instanceof ArrayBuffer, 'bad input to processPackageData');
        var byteArray = new Uint8Array(arrayBuffer);
        var curr;
        %s
      };
      Module['addRunDependency']('datafile_%s');
    ''' % (use_data, shared.JS.escape_for_js_string(data_target))
        # use basename because from the browser's point of view,
        # we need to find the datafile in the same dir as the html file

        code += r'''
      if (!Module.preloadResults) Module.preloadResults = {};
    '''

        if use_preload_cache:
            code += r'''
        function preloadFallback(error) {
          console.error(error);
          console.error('falling back to default preload behavior');
          fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
        };

        openDatabase(
          function(db) {
            checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
              function(useCached, metadata) {
                Module.preloadResults[PACKAGE_NAME] = {fromCache: useCached};
                if (useCached) {
                  fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
                } else {
                  fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
                    function(packageData) {
                      cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
                        function(error) {
                          console.error(error);
                          processPackageData(packageData);
                        });
                    }
                  , preloadFallback);
                }
              }
            , preloadFallback);
          }
        , preloadFallback);

        if (Module['setStatus']) Module['setStatus']('Downloading...');
      '''
        else:
            # Not using preload cache, so we might as well start the xhr ASAP,
            # potentially before JS parsing of the main codebase if it's after us.
            # Only tricky bit is the fetch is async, but also when runWithFS is called
            # is async, so we handle both orderings.
            ret += r'''
        var fetchedCallback = null;
        var fetched = Module['getPreloadedPackage'] ? Module['getPreloadedPackage'](REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE) : null;

        if (!fetched) fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, function(data) {
          if (fetchedCallback) {
            fetchedCallback(data);
            fetchedCallback = null;
          } else {
            fetched = data;
          }
        }, handleError);
      '''

            code += r'''
        Module.preloadResults[PACKAGE_NAME] = {fromCache: false};
        if (fetched) {
          processPackageData(fetched);
          fetched = null;
        } else {
          fetchedCallback = processPackageData;
        }
      '''

    ret += '''
    function runWithFS() {
  '''
    ret += code
    ret += '''
    }
    if (Module['calledRun']) {
      runWithFS();
    } else {
      if (!Module['preRun']) Module['preRun'] = [];
      Module["preRun"].push(runWithFS); // FS is not initialized yet, wait for it
    }
  '''

    if separate_metadata:
        _metadata_template = '''
    Module['removeRunDependency']('%(metadata_file)s');
   }

   function runMetaWithFS() {
    Module['addRunDependency']('%(metadata_file)s');
    var REMOTE_METADATA_NAME = Module['locateFile'] ? Module['locateFile']('%(metadata_file)s', '') : '%(metadata_file)s';
    var xhr = new XMLHttpRequest();
    xhr.onreadystatechange = function() {
     if (xhr.readyState === 4 && xhr.status === 200) {
       loadPackage(JSON.parse(xhr.responseText));
     }
    }
    xhr.open('GET', REMOTE_METADATA_NAME, true);
    xhr.overrideMimeType('application/json');
    xhr.send(null);
   }

   if (Module['calledRun']) {
    runMetaWithFS();
   } else {
    if (!Module['preRun']) Module['preRun'] = [];
    Module["preRun"].push(runMetaWithFS);
   }
  ''' % {
            'metadata_file': os.path.basename(jsoutput + '.metadata')
        }

    else:
        _metadata_template = '''
   }
   loadPackage(%s);
  ''' % json.dumps(metadata)

    ret += '''%s
  })();
  ''' % _metadata_template

    if force or len(data_files):
        if jsoutput is None:
            print(ret)
        else:
            # Overwrite the old jsoutput file (if exists) only when its content
            # differs from the current generated one, otherwise leave the file
            # untouched preserving its old timestamp
            if os.path.isfile(jsoutput):
                f = open(jsoutput, 'r+')
                old = f.read()
                if old != ret:
                    f.seek(0)
                    f.write(ret)
                    f.truncate()
            else:
                f = open(jsoutput, 'w')
                f.write(ret)
            f.close()
            if separate_metadata:
                f = open(jsoutput + '.metadata', 'w')
                json.dump(metadata, f, separators=(',', ':'))
                f.close()

    return 0
Example #54
0
 def test_havlak(self):
   src = open(path_from_root('tests', 'havlak.cpp'), 'r').read()
   self.do_benchmark('havlak', src, 'Found', shared_args=['-std=c++11'])
Example #55
0
def main(args):
    global QUIET

    default_json_files = [
        shared.path_from_root('src', 'struct_info.json'),
        shared.path_from_root('src', 'struct_info_internal.json')
    ]
    parser = argparse.ArgumentParser(
        description='Generate JSON infos for structs.')
    parser.add_argument(
        'json',
        nargs='*',
        help=
        'JSON file with a list of structs and their fields (defaults to src/struct_info.json)',
        default=default_json_files)
    parser.add_argument('-q',
                        dest='quiet',
                        action='store_true',
                        default=False,
                        help='Don\'t output anything besides error messages.')
    parser.add_argument(
        '-o',
        dest='output',
        metavar='path',
        default=None,
        help=
        'Path to the JSON file that will be written. If omitted, the generated data will be printed to stdout.'
    )
    parser.add_argument('-I',
                        dest='includes',
                        metavar='dir',
                        action='append',
                        default=[],
                        help='Add directory to include search path')
    parser.add_argument('-D',
                        dest='defines',
                        metavar='define',
                        action='append',
                        default=[],
                        help='Pass a define to the preprocessor')
    parser.add_argument('-U',
                        dest='undefines',
                        metavar='undefine',
                        action='append',
                        default=[],
                        help='Pass an undefine to the preprocessor')
    args = parser.parse_args(args)

    QUIET = args.quiet

    # Avoid parsing problems due to gcc specifc syntax.
    cflags = ['-D_GNU_SOURCE']

    # Add the user options to the list as well.
    for path in args.includes:
        cflags.append('-I' + path)

    for arg in args.defines:
        cflags.append('-D' + arg)

    for arg in args.undefines:
        cflags.append('-U' + arg)

    internal_cflags = [
        '-I' + shared.path_from_root('system', 'lib', 'libc', 'musl', 'src',
                                     'internal'),
    ]

    # Look for structs in all passed headers.
    info = {'defines': {}, 'structs': {}}

    for f in args.json:
        # This is a JSON file, parse it.
        header_files = parse_json(f)
        # Inspect all collected structs.
        if 'internal' in f:
            use_cflags = cflags + internal_cflags
        else:
            use_cflags = cflags
        info_fragment = inspect_code(header_files, use_cflags)
        merge_info(info, info_fragment)

    output_json(info, args.output)
    return 0
Example #56
0
 def test_base64(self):
   src = open(path_from_root('tests', 'base64.cpp'), 'r').read()
   self.do_benchmark('base64', src, 'decode')
Example #57
0
        use_data += '''
          var files = metadata.files;
          for (var i = 0; i < files.length; ++i) {
            DataRequest.prototype.requests[files[i].filename].onload();
          }
    '''
        use_data += (
            "          Module['removeRunDependency']('datafile_%s');\n" %
            shared.JS.escape_for_js_string(data_target))

    else:
        # LZ4FS usage
        temp = data_target + '.orig'
        shutil.move(data_target, temp)
        meta = run_js(
            shared.path_from_root('tools', 'lz4-compress.js'),
            shared.NODE_JS,
            [shared.path_from_root('src', 'mini-lz4.js'), temp, data_target],
            stdout=PIPE)
        os.unlink(temp)
        use_data = '''
          var compressedData = %s;
          compressedData.data = byteArray;
          assert(typeof LZ4 === 'object', 'LZ4 not present - was your app build with  -s LZ4=1  ?');
          LZ4.loadPackage({ 'metadata': metadata, 'compressedData': compressedData });
          Module['removeRunDependency']('datafile_%s');
    ''' % (meta, shared.JS.escape_for_js_string(data_target))

    package_uuid = uuid.uuid4()
    package_name = data_target
    remote_package_size = os.path.getsize(package_name)
Example #58
0
 def test_life(self):
   src = open(path_from_root('tests', 'life.c'), 'r').read()
   self.do_benchmark('life', src, '''--------------------------------''', shared_args=['-std=c99'], force_c=True)
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License.  Both these licenses can be
# found in the LICENSE file.
"""WebIDL binder

https://emscripten.org/docs/porting/connecting_cpp_and_javascript/WebIDL-Binder.html
"""

import os
import sys

sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from tools import shared, utils

sys.path.append(shared.path_from_root('third_party'))
sys.path.append(shared.path_from_root('third_party', 'ply'))

import WebIDL

# CHECKS='FAST' will skip most argument type checks in the wrapper methods for
#                  performance (~3x faster than default).
# CHECKS='ALL' will do extensive argument type checking (~5x slower than default).
#                 This will catch invalid numbers, invalid pointers, invalid strings, etc.
# Anything else defaults to legacy mode for backward compatibility.
CHECKS = os.environ.get('IDL_CHECKS', 'DEFAULT')
# DEBUG=1 will print debug info in render_function
DEBUG = os.environ.get('IDL_VERBOSE') == '1'

if DEBUG:
    print("Debug print ON, CHECKS=%s" % CHECKS)
Example #60
0
 def test_linpack(self):
   def output_parser(output):
     mflops = re.search('Unrolled Double  Precision ([\d\.]+) Mflops', output).group(1)
     return 100.0 / float(mflops)
   self.do_benchmark('linpack_double', open(path_from_root('tests', 'linpack2.c')).read(), '''Unrolled Double  Precision''', force_c=True, output_parser=output_parser)