Exemplo n.º 1
0
def run_tha_test(isolated_hash, storage, cache, leak_temp_dir, result_json,
                 root_dir, hard_timeout, grace_period, extra_args):
    """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occured.
    root_dir: directory to the path to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    hard_timeout: kills the process if it lasts more than this amount of
                  seconds.
    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.

  Returns:
    Process exit code that should be used.
  """
    # run_isolated exit code. Depends on if result_json is used or not.
    result = map_and_run(isolated_hash, storage, cache, leak_temp_dir,
                         root_dir, hard_timeout, grace_period, extra_args)
    logging.info('Result:\n%s', tools.format_json(result, dense=True))
    if result_json:
        # We've found tests to delete 'work' when quitting, causing an exception
        # here. Try to recreate the directory if necessary.
        work_dir = os.path.dirname(result_json)
        if not fs.isdir(work_dir):
            fs.mkdir(work_dir)
        tools.write_json(result_json, result, dense=True)
        # Only return 1 if there was an internal error.
        return int(bool(result['internal_failure']))

    # Marshall into old-style inline output.
    if result['outputs_ref']:
        data = {
            'hash': result['outputs_ref']['isolated'],
            'namespace': result['outputs_ref']['namespace'],
            'storage': result['outputs_ref']['isolatedserver'],
        }
        sys.stdout.flush()
        print('[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
              tools.format_json(data, dense=True))
        sys.stdout.flush()
    return result['exit_code'] or int(bool(result['internal_failure']))
Exemplo n.º 2
0
def run_tha_test(data, result_json):
  """Runs an executable and records execution metadata.

  If isolated_hash is specified, downloads the dependencies in the cache,
  hardlinks them into a temporary directory and runs the command specified in
  the .isolated.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
  - data: TaskData instance.
  - result_json: File path to dump result metadata into. If set, the process
    exit code is always 0 unless an internal error occurred.

  Returns:
    Process exit code that should be used.
  """
  if result_json:
    # Write a json output file right away in case we get killed.
    result = {
      'exit_code': None,
      'had_hard_timeout': False,
      'internal_failure': 'Was terminated before completion',
      'outputs_ref': None,
      'version': 5,
    }
    tools.write_json(result_json, result, dense=True)

  # run_isolated exit code. Depends on if result_json is used or not.
  result = map_and_run(data, True)
  logging.info('Result:\n%s', tools.format_json(result, dense=True))

  if result_json:
    # We've found tests to delete 'work' when quitting, causing an exception
    # here. Try to recreate the directory if necessary.
    file_path.ensure_tree(os.path.dirname(result_json))
    tools.write_json(result_json, result, dense=True)
    # Only return 1 if there was an internal error.
    return int(bool(result['internal_failure']))

  # Marshall into old-style inline output.
  if result['outputs_ref']:
    data = {
      'hash': result['outputs_ref']['isolated'],
      'namespace': result['outputs_ref']['namespace'],
      'storage': result['outputs_ref']['isolatedserver'],
    }
    sys.stdout.flush()
    print(
        '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
        tools.format_json(data, dense=True))
    sys.stdout.flush()
  return result['exit_code'] or int(bool(result['internal_failure']))
def run_tha_test(isolated_hash, storage, cache, leak_temp_dir, result_json, root_dir, extra_args):
    """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occured.
    root_dir: directory to the path to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.

  Returns:
    Process exit code that should be used.
  """
    # run_isolated exit code. Depends on if result_json is used or not.
    result = map_and_run(isolated_hash, storage, cache, leak_temp_dir, root_dir, extra_args)
    logging.info("Result:\n%s", tools.format_json(result, dense=True))
    if result_json:
        # We've found tests to delete 'work' when quitting, causing an exception
        # here. Try to recreate the directory if necessary.
        work_dir = os.path.dirname(result_json)
        if not os.path.isdir(work_dir):
            os.mkdir(work_dir)
        tools.write_json(result_json, result, dense=True)
        # Only return 1 if there was an internal error.
        return int(bool(result["internal_failure"]))

    # Marshall into old-style inline output.
    if result["outputs_ref"]:
        data = {
            "hash": result["outputs_ref"]["isolated"],
            "namespace": result["outputs_ref"]["namespace"],
            "storage": result["outputs_ref"]["isolatedserver"],
        }
        sys.stdout.flush()
        print("[run_isolated_out_hack]%s[/run_isolated_out_hack]" % tools.format_json(data, dense=True))
    return result["exit_code"] or int(bool(result["internal_failure"]))
Exemplo n.º 4
0
def run_tha_test(command, isolated_hash, storage, isolate_cache, outputs,
                 init_named_caches, leak_temp_dir, result_json, root_dir,
                 hard_timeout, grace_period, bot_file, install_packages_fn,
                 use_symlinks):
    """Runs an executable and records execution metadata.

  Either command or isolated_hash must be specified.

  If isolated_hash is specified, downloads the dependencies in the cache,
  hardlinks them into a temporary directory and runs the command specified in
  the .isolated.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    command: a list of string; the command to run OR optional arguments to add
             to the command stated in the .isolated file if a command was
             specified.
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
                   The command specified in the .isolated is executed.
                   Mutually exclusive with command argument.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    isolate_cache: an isolateserver.LocalCache to keep from retrieving the
                   same objects constantly by caching the objects retrieved.
                   Can be on-disk or in-memory.
    init_named_caches: a function (run_dir) => context manager that creates
                      symlinks for named caches in |run_dir|.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occurred.
    root_dir: path to the directory to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    hard_timeout: kills the process if it lasts more than this amount of
                  seconds.
    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
    install_packages_fn: context manager dir => CipdInfo, see
      install_client_and_packages.
    use_symlinks: create tree with symlinks instead of hardlinks.

  Returns:
    Process exit code that should be used.
  """
    if result_json:
        # Write a json output file right away in case we get killed.
        result = {
            'exit_code': None,
            'had_hard_timeout': False,
            'internal_failure': 'Was terminated before completion',
            'outputs_ref': None,
            'version': 5,
        }
        tools.write_json(result_json, result, dense=True)

    # run_isolated exit code. Depends on if result_json is used or not.
    result = map_and_run(command, isolated_hash, storage, isolate_cache,
                         outputs, init_named_caches, leak_temp_dir, root_dir,
                         hard_timeout, grace_period, bot_file,
                         install_packages_fn, use_symlinks, True)
    logging.info('Result:\n%s', tools.format_json(result, dense=True))

    if result_json:
        # We've found tests to delete 'work' when quitting, causing an exception
        # here. Try to recreate the directory if necessary.
        file_path.ensure_tree(os.path.dirname(result_json))
        tools.write_json(result_json, result, dense=True)
        # Only return 1 if there was an internal error.
        return int(bool(result['internal_failure']))

    # Marshall into old-style inline output.
    if result['outputs_ref']:
        data = {
            'hash': result['outputs_ref']['isolated'],
            'namespace': result['outputs_ref']['namespace'],
            'storage': result['outputs_ref']['isolatedserver'],
        }
        sys.stdout.flush()
        print('[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
              tools.format_json(data, dense=True))
        sys.stdout.flush()
    return result['exit_code'] or int(bool(result['internal_failure']))
Exemplo n.º 5
0
def run_tha_test(isolated_hash, storage, cache, leak_temp_dir, extra_args):
  """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.
  """
  tmp_root = os.path.dirname(cache.cache_dir) if cache.cache_dir else None
  run_dir = make_temp_dir(u'run_tha_test', tmp_root)
  out_dir = unicode(make_temp_dir(u'isolated_out', tmp_root))
  result = 0
  try:
    try:
      bundle = isolateserver.fetch_isolated(
          isolated_hash=isolated_hash,
          storage=storage,
          cache=cache,
          outdir=run_dir,
          require_command=True)
    except isolated_format.IsolatedError:
      on_error.report(None)
      return 1

    change_tree_read_only(run_dir, bundle.read_only)
    cwd = os.path.normpath(os.path.join(run_dir, bundle.relative_cwd))
    command = bundle.command + extra_args

    file_path.ensure_command_has_abs_path(command, cwd)
    command = process_command(command, out_dir)
    logging.info('Running %s, cwd=%s' % (command, cwd))

    # TODO(csharp): This should be specified somewhere else.
    # TODO(vadimsh): Pass it via 'env_vars' in manifest.
    # Add a rotating log file if one doesn't already exist.
    env = os.environ.copy()
    if MAIN_DIR:
      env.setdefault('RUN_TEST_CASES_LOG_FILE',
          os.path.join(MAIN_DIR, RUN_TEST_CASES_LOG))
    sys.stdout.flush()
    with tools.Profiler('RunTest'):
      try:
        with subprocess42.Popen_with_handler(command, cwd=cwd, env=env) as p:
          p.communicate()
          result = p.returncode
      except OSError:
        on_error.report('Failed to run %s; cwd=%s' % (command, cwd))
        result = 1
    logging.info(
        'Command finished with exit code %d (%s)',
        result, hex(0xffffffff & result))
  finally:
    try:
      if leak_temp_dir:
        logging.warning('Deliberately leaking %s for later examination',
                        run_dir)
      else:
        try:
          if not file_path.rmtree(run_dir):
            print >> sys.stderr, (
                'Failed to delete the temporary directory, forcibly failing\n'
                'the task because of it. No zombie process can outlive a\n'
                'successful task run and still be marked as successful.\n'
                'Fix your stuff.')
            result = result or 1
        except OSError as exc:
          logging.error('Leaking run_dir %s: %s', run_dir, exc)
          result = 1

      # HACK(vadimsh): On Windows rmtree(run_dir) call above has
      # a synchronization effect: it finishes only when all task child processes
      # terminate (since a running process locks *.exe file). Examine out_dir
      # only after that call completes (since child processes may
      # write to out_dir too and we need to wait for them to finish).

      # Upload out_dir and generate a .isolated file out of this directory.
      # It is only done if files were written in the directory.
      if os.path.isdir(out_dir) and os.listdir(out_dir):
        with tools.Profiler('ArchiveOutput'):
          try:
            results = isolateserver.archive_files_to_storage(
                storage, [out_dir], None)
          except isolateserver.Aborted:
            # This happens when a signal SIGTERM was received while uploading
            # data. There is 2 causes:
            # - The task was too slow and was about to be killed anyway due to
            #   exceeding the hard timeout.
            # - The amount of data uploaded back is very large and took too much
            #   time to archive.
            #
            # There's 3 options to handle this:
            # - Ignore the upload failure as a silent failure. This can be
            #   detected client side by the fact no result file exists.
            # - Return as if the task failed. This is not factually correct.
            # - Return an internal failure. Sadly, it's impossible at this level
            #   at the moment.
            #
            # For now, silently drop the upload.
            #
            # In any case, the process only has a very short grace period so it
            # needs to exit right away.
            sys.stderr.write('Received SIGTERM while uploading')
            results = None

        if results:
          # TODO(maruel): Implement side-channel to publish this information.
          output_data = {
            'hash': results[0][0],
            'namespace': storage.namespace,
            'storage': storage.location,
          }
          sys.stdout.flush()
          print(
              '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
              tools.format_json(output_data, dense=True))

    finally:
      try:
        if os.path.isdir(out_dir) and not file_path.rmtree(out_dir):
          logging.error('Had difficulties removing out_dir %s', out_dir)
          result = result or 1
      except OSError as exc:
        # Only report on non-Windows or on Windows when the process had
        # succeeded. Due to the way file sharing works on Windows, it's sadly
        # expected that file deletion may fail when a test failed.
        logging.error('Failed to remove out_dir %s: %s', out_dir, exc)
        if sys.platform != 'win32' or not result:
          on_error.report(None)
        result = 1

  return result
Exemplo n.º 6
0
def run_tha_test(isolated_hash, storage, cache, leak_temp_dir, extra_args):
  """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.
  """
  run_dir = make_temp_dir(u'run_tha_test', cache.cache_dir)
  out_dir = unicode(make_temp_dir(u'isolated_out', cache.cache_dir))
  result = 0
  try:
    try:
      bundle = isolateserver.fetch_isolated(
          isolated_hash=isolated_hash,
          storage=storage,
          cache=cache,
          outdir=run_dir,
          require_command=True)
    except isolated_format.IsolatedError:
      on_error.report(None)
      return 1

    change_tree_read_only(run_dir, bundle.read_only)
    cwd = os.path.normpath(os.path.join(run_dir, bundle.relative_cwd))
    command = bundle.command + extra_args

    file_path.ensure_command_has_abs_path(command, cwd)
    command = process_command(command, out_dir)
    logging.info('Running %s, cwd=%s' % (command, cwd))

    # TODO(csharp): This should be specified somewhere else.
    # TODO(vadimsh): Pass it via 'env_vars' in manifest.
    # Add a rotating log file if one doesn't already exist.
    env = os.environ.copy()
    if MAIN_DIR:
      env.setdefault('RUN_TEST_CASES_LOG_FILE',
          os.path.join(MAIN_DIR, RUN_TEST_CASES_LOG))
    sys.stdout.flush()
    with tools.Profiler('RunTest'):
      try:
        with subprocess42.Popen_with_handler(command, cwd=cwd, env=env) as p:
          p.communicate()
          result = p.returncode
      except OSError:
        on_error.report('Failed to run %s; cwd=%s' % (command, cwd))
        result = 1
    logging.info(
        'Command finished with exit code %d (%s)',
        result, hex(0xffffffff & result))
  finally:
    try:
      if leak_temp_dir:
        logging.warning('Deliberately leaking %s for later examination',
                        run_dir)
      else:
        try:
          if not file_path.rmtree(run_dir):
            print >> sys.stderr, (
                'Failed to delete the temporary directory, forcibly failing\n'
                'the task because of it. No zombie process can outlive a\n'
                'successful task run and still be marked as successful.\n'
                'Fix your stuff.')
            result = result or 1
        except OSError:
          logging.warning('Leaking %s', run_dir)
          result = 1

      # HACK(vadimsh): On Windows rmtree(run_dir) call above has
      # a synchronization effect: it finishes only when all task child processes
      # terminate (since a running process locks *.exe file). Examine out_dir
      # only after that call completes (since child processes may
      # write to out_dir too and we need to wait for them to finish).

      # Upload out_dir and generate a .isolated file out of this directory.
      # It is only done if files were written in the directory.
      if os.path.isdir(out_dir) and os.listdir(out_dir):
        with tools.Profiler('ArchiveOutput'):
          results = isolateserver.archive_files_to_storage(
              storage, [out_dir], None)
        # TODO(maruel): Implement side-channel to publish this information.
        output_data = {
          'hash': results[0][0],
          'namespace': storage.namespace,
          'storage': storage.location,
        }
        sys.stdout.flush()
        print(
            '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
            tools.format_json(output_data, dense=True))

    finally:
      try:
        if os.path.isdir(out_dir) and not file_path.rmtree(out_dir):
          result = result or 1
      except OSError:
        # The error was already printed out. Report it but that's it. Only
        # report on non-Windows or on Windows when the process had succeeded.
        # Due to the way file sharing works on Windows, it's sadly expected that
        # file deletion may fail when a test failed.
        if sys.platform != 'win32' or not result:
          on_error.report(None)
        result = 1

  return result
def run_tha_test(isolated_hash, storage, cache, extra_args):
  """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the sha-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.
  """
  run_dir = make_temp_dir('run_tha_test', cache.cache_dir)
  out_dir = unicode(make_temp_dir('isolated_out', cache.cache_dir))
  result = 0
  try:
    try:
      settings = isolateserver.fetch_isolated(
          isolated_hash=isolated_hash,
          storage=storage,
          cache=cache,
          outdir=run_dir,
          require_command=True)
    except isolateserver.ConfigError:
      on_error.report(None)
      return 1

    change_tree_read_only(run_dir, settings.read_only)
    cwd = os.path.normpath(os.path.join(run_dir, settings.relative_cwd))
    command = settings.command + extra_args

    # subprocess.call doesn't consider 'cwd' when searching for executable.
    # Yet isolate can specify command relative to 'cwd'. Convert it to absolute
    # path if necessary.
    if not os.path.isabs(command[0]):
      command[0] = os.path.abspath(os.path.join(cwd, command[0]))
    command = process_command(command, out_dir)
    logging.info('Running %s, cwd=%s' % (command, cwd))

    # TODO(csharp): This should be specified somewhere else.
    # TODO(vadimsh): Pass it via 'env_vars' in manifest.
    # Add a rotating log file if one doesn't already exist.
    env = os.environ.copy()
    if MAIN_DIR:
      env.setdefault('RUN_TEST_CASES_LOG_FILE',
          os.path.join(MAIN_DIR, RUN_TEST_CASES_LOG))
    try:
      sys.stdout.flush()
      with tools.Profiler('RunTest'):
        result = subprocess.call(command, cwd=cwd, env=env)
        logging.info(
            'Command finished with exit code %d (%s)',
            result, hex(0xffffffff & result))
    except OSError:
      on_error.report('Failed to run %s; cwd=%s' % (command, cwd))
      result = 1

  finally:
    try:
      try:
        if not rmtree(run_dir):
          print >> sys.stderr, (
              'Failed to delete the temporary directory, forcibly failing the\n'
              'task because of it. No zombie process can outlive a successful\n'
              'task run and still be marked as successful. Fix your stuff.')
          result = result or 1
      except OSError:
        logging.warning('Leaking %s', run_dir)
        result = 1

      # HACK(vadimsh): On Windows rmtree(run_dir) call above has
      # a synchronization effect: it finishes only when all task child processes
      # terminate (since a running process locks *.exe file). Examine out_dir
      # only after that call completes (since child processes may
      # write to out_dir too and we need to wait for them to finish).

      # Upload out_dir and generate a .isolated file out of this directory.
      # It is only done if files were written in the directory.
      if os.listdir(out_dir):
        with tools.Profiler('ArchiveOutput'):
          results = isolateserver.archive_files_to_storage(
              storage, [out_dir], None)
        # TODO(maruel): Implement side-channel to publish this information.
        output_data = {
          'hash': results[0][0],
          'namespace': storage.namespace,
          'storage': storage.location,
        }
        sys.stdout.flush()
        print(
            '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
            tools.format_json(output_data, dense=True))

    finally:
      try:
        if os.path.isdir(out_dir) and not rmtree(out_dir):
          result = result or 1
      except OSError:
        # The error was already printed out. Report it but that's it.
        on_error.report(None)
        result = 1

  return result
Exemplo n.º 8
0
def run_tha_test(isolated_hash, storage, cache, extra_args):
    """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the sha-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.
  """
    run_dir = make_temp_dir("run_tha_test", cache.cache_dir)
    out_dir = unicode(make_temp_dir("isolated_out", cache.cache_dir))
    result = 0
    try:
        try:
            settings = isolateserver.fetch_isolated(
                isolated_hash=isolated_hash, storage=storage, cache=cache, outdir=run_dir, require_command=True
            )
        except isolateserver.ConfigError as e:
            tools.report_error(e)
            return 1

        change_tree_read_only(run_dir, settings.read_only)
        cwd = os.path.normpath(os.path.join(run_dir, settings.relative_cwd))
        command = settings.command + extra_args

        # subprocess.call doesn't consider 'cwd' when searching for executable.
        # Yet isolate can specify command relative to 'cwd'. Convert it to absolute
        # path if necessary.
        if not os.path.isabs(command[0]):
            command[0] = os.path.abspath(os.path.join(cwd, command[0]))
        command = process_command(command, out_dir)
        logging.info("Running %s, cwd=%s" % (command, cwd))

        # TODO(csharp): This should be specified somewhere else.
        # TODO(vadimsh): Pass it via 'env_vars' in manifest.
        # Add a rotating log file if one doesn't already exist.
        env = os.environ.copy()
        if MAIN_DIR:
            env.setdefault("RUN_TEST_CASES_LOG_FILE", os.path.join(MAIN_DIR, RUN_TEST_CASES_LOG))
        try:
            sys.stdout.flush()
            with tools.Profiler("RunTest"):
                result = subprocess.call(command, cwd=cwd, env=env)
                logging.info("Command finished with exit code %d (%s)", result, hex(0xFFFFFFFF & result))
        except OSError as e:
            tools.report_error("Failed to run %s; cwd=%s: %s" % (command, cwd, e))
            result = 1

    finally:
        try:
            try:
                rmtree(run_dir)
            except OSError:
                logging.warning("Leaking %s", run_dir)
                # Swallow the exception so it doesn't generate an infrastructure error.
                #
                # It usually happens on Windows when a child process is not properly
                # terminated, usually because of a test case starting child processes
                # that time out. This causes files to be locked and it becomes
                # impossible to delete them.
                #
                # Only report an infrastructure error if the test didn't fail. This is
                # because a swarming bot will likely not reboot. This situation will
                # cause accumulation of temporary hardlink trees.
                if not result:
                    raise

            # HACK(vadimsh): On Windows rmtree(run_dir) call above has
            # a synchronization effect: it finishes only when all task child processes
            # terminate (since a running process locks *.exe file). Examine out_dir
            # only after that call completes (since child processes may
            # write to out_dir too and we need to wait for them to finish).

            # Upload out_dir and generate a .isolated file out of this directory.
            # It is only done if files were written in the directory.
            if os.listdir(out_dir):
                with tools.Profiler("ArchiveOutput"):
                    results = isolateserver.archive_files_to_storage(storage, [out_dir], None)
                # TODO(maruel): Implement side-channel to publish this information.
                output_data = {"hash": results[0][0], "namespace": storage.namespace, "storage": storage.location}
                sys.stdout.flush()
                print("[run_isolated_out_hack]%s[/run_isolated_out_hack]" % tools.format_json(output_data, dense=True))

        finally:
            rmtree(out_dir)

    return result
Exemplo n.º 9
0
def run_tha_test(
    isolated_hash, storage, cache, leak_temp_dir, result_json, root_dir,
    hard_timeout, grace_period, extra_args):
  """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occured.
    root_dir: directory to the path to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    hard_timeout: kills the process if it lasts more than this amount of
                  seconds.
    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.

  Returns:
    Process exit code that should be used.
  """
  if result_json:
    # Write a json output file right away in case we get killed.
    result = {
      'exit_code': None,
      'had_hard_timeout': False,
      'internal_failure': 'Was terminated before completion',
      'outputs_ref': None,
      'version': 2,
    }
    tools.write_json(result_json, result, dense=True)

  # run_isolated exit code. Depends on if result_json is used or not.
  result = map_and_run(
      isolated_hash, storage, cache, leak_temp_dir, root_dir, hard_timeout,
      grace_period, extra_args)
  logging.info('Result:\n%s', tools.format_json(result, dense=True))
  if result_json:
    # We've found tests to delete 'work' when quitting, causing an exception
    # here. Try to recreate the directory if necessary.
    work_dir = os.path.dirname(result_json)
    if not fs.isdir(work_dir):
      fs.mkdir(work_dir)
    tools.write_json(result_json, result, dense=True)
    # Only return 1 if there was an internal error.
    return int(bool(result['internal_failure']))

  # Marshall into old-style inline output.
  if result['outputs_ref']:
    data = {
      'hash': result['outputs_ref']['isolated'],
      'namespace': result['outputs_ref']['namespace'],
      'storage': result['outputs_ref']['isolatedserver'],
    }
    sys.stdout.flush()
    print(
        '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
        tools.format_json(data, dense=True))
    sys.stdout.flush()
  return result['exit_code'] or int(bool(result['internal_failure']))
Exemplo n.º 10
0
def run_tha_test(isolated_hash, storage, cache, extra_args):
  """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the sha-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.
  """
  run_dir = make_temp_dir('run_tha_test', cache.cache_dir)
  out_dir = unicode(tempfile.mkdtemp(prefix='run_tha_test'))
  result = 0
  try:
    try:
      settings = isolateserver.fetch_isolated(
          isolated_hash=isolated_hash,
          storage=storage,
          cache=cache,
          outdir=run_dir,
          require_command=True)
    except isolateserver.ConfigError as e:
      tools.report_error(e)
      result = 1
      return result

    change_tree_read_only(run_dir, settings.read_only)
    cwd = os.path.normpath(os.path.join(run_dir, settings.relative_cwd))
    command = settings.command + extra_args

    # subprocess.call doesn't consider 'cwd' when searching for executable.
    # Yet isolate can specify command relative to 'cwd'. Convert it to absolute
    # path if necessary.
    if not os.path.isabs(command[0]):
      command[0] = os.path.abspath(os.path.join(cwd, command[0]))
    command = process_command(command, out_dir)
    logging.info('Running %s, cwd=%s' % (command, cwd))

    # TODO(csharp): This should be specified somewhere else.
    # TODO(vadimsh): Pass it via 'env_vars' in manifest.
    # Add a rotating log file if one doesn't already exist.
    env = os.environ.copy()
    if MAIN_DIR:
      env.setdefault('RUN_TEST_CASES_LOG_FILE',
          os.path.join(MAIN_DIR, RUN_TEST_CASES_LOG))
    try:
      with tools.Profiler('RunTest'):
        result = subprocess.call(command, cwd=cwd, env=env)
    except OSError as e:
      tools.report_error('Failed to run %s; cwd=%s: %s' % (command, cwd, e))
      result = 1

    # Upload out_dir and generate a .isolated file out of this directory. It is
    # only done if files were written in the directory.
    if os.listdir(out_dir):
      with tools.Profiler('ArchiveOutput'):
        results = isolateserver.archive_files_to_storage(
            storage, [out_dir], None)
      # TODO(maruel): Implement side-channel to publish this information.
      output_data = {
        'hash': results[0][0],
        'namespace': storage.namespace,
        'storage': storage.location,
      }
      sys.stdout.flush()
      sys.stderr.flush()
      print(
          '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
          tools.format_json(output_data, dense=True))

  finally:
    try:
      rmtree(out_dir)
    finally:
      try:
        rmtree(run_dir)
      except OSError:
        logging.warning('Leaking %s', run_dir)
        # Swallow the exception so it doesn't generate an infrastructure error.
        #
        # It usually happens on Windows when a child process is not properly
        # terminated, usually because of a test case starting child processes
        # that time out. This causes files to be locked and it becomes
        # impossible to delete them.
        #
        # Only report an infrastructure error if the test didn't fail. This is
        # because a swarming bot will likely not reboot. This situation will
        # cause accumulation of temporary hardlink trees.
        if not result:
          raise
  return result