コード例 #1
0
ファイル: isolate.py プロジェクト: qlb7707/webrtc_src
def chromium_save_isolated(isolated, data, path_variables, algo):
    """Writes one or many .isolated files.

  This slightly increases the cold cache cost but greatly reduce the warm cache
  cost by splitting low-churn files off the master .isolated file. It also
  reduces overall isolateserver memcache consumption.
  """
    slaves = []

    def extract_into_included_isolated(prefix):
        new_slave = {"algo": data["algo"], "files": {}, "version": data["version"]}
        for f in data["files"].keys():
            if f.startswith(prefix):
                new_slave["files"][f] = data["files"].pop(f)
        if new_slave["files"]:
            slaves.append(new_slave)

    # Split test/data/ in its own .isolated file.
    extract_into_included_isolated(os.path.join("test", "data", ""))

    # Split everything out of PRODUCT_DIR in its own .isolated file.
    if path_variables.get("PRODUCT_DIR"):
        extract_into_included_isolated(path_variables["PRODUCT_DIR"])

    files = []
    for index, f in enumerate(slaves):
        slavepath = isolated[: -len(".isolated")] + ".%d.isolated" % index
        tools.write_json(slavepath, f, True)
        data.setdefault("includes", []).append(isolated_format.hash_file(slavepath, algo))
        files.append(os.path.basename(slavepath))

    files.extend(isolated_format.save_isolated(isolated, data))
    return files
コード例 #2
0
def write_details(logname, outfile, root_dir, blacklist, results):
  """Writes an .test_cases file with all the information about each test
  case.
  """
  api = trace_inputs.get_api()
  logs = dict(
      (i.pop('trace'), i) for i in api.parse_log(logname, blacklist, None))
  results_processed = {}
  exception = None
  for items in results:
    item = items[-1]
    assert item['valid']
    # Load the results;
    log_dict = logs[item['tracename']]
    if log_dict.get('exception'):
      exception = exception or log_dict['exception']
      continue
    trace_result = log_dict['results']
    if root_dir:
      trace_result = trace_result.strip_root(root_dir)
    results_processed[item['test_case']] = {
      'trace': trace_result.flatten(),
      'duration': item['duration'],
      'output': item['output'],
      'returncode': item['returncode'],
    }

  # Make it dense if there is more than 20 results.
  tools.write_json(
      outfile,
      results_processed,
      len(results_processed) > 20)
  if exception:
    raise exception[0], exception[1], exception[2]
コード例 #3
0
def collect(
    url, task_name, shards, timeout, decorate,
    print_status_updates, task_summary_json, task_output_dir):
  """Retrieves results of a Swarming task."""
  # Grab task keys for each shard. Order is important, used to figure out
  # shard index based on the key.
  # TODO(vadimsh): Simplify this once server support is added.
  task_keys = []
  for index in xrange(shards):
    shard_task_name = get_shard_task_name(task_name, shards, index)
    logging.info('Collecting %s', shard_task_name)
    shard_task_keys = get_task_keys(url, shard_task_name)
    if not shard_task_keys:
      raise Failure('No task keys to get results with: %s' % shard_task_name)
    if len(shard_task_keys) != 1:
      raise Failure('Expecting only one shard for a task: %s' % shard_task_name)
    task_keys.append(shard_task_keys[0])

  # Collect summary JSON and output files (if task_output_dir is not None).
  output_collector = TaskOutputCollector(
      task_output_dir, task_name, len(task_keys))

  seen_shards = set()
  exit_codes = []

  try:
    for index, output in yield_results(
        url, task_keys, timeout, None, print_status_updates, output_collector):
      seen_shards.add(index)

      # Grab first non-zero exit code as an overall shard exit code.
      shard_exit_code = 0
      for code in map(int, (output['exit_codes'] or '1').split(',')):
        if code:
          shard_exit_code = code
          break
      exit_codes.append(shard_exit_code)

      if decorate:
        print decorate_shard_output(index, output, shard_exit_code)
      else:
        print(
            '%s/%s: %s' % (
                output['machine_id'],
                output['machine_tag'],
                output['exit_codes']))
        print(''.join('  %s\n' % l for l in output['output'].splitlines()))
  finally:
    summary = output_collector.finalize()
    if task_summary_json:
      tools.write_json(task_summary_json, summary, False)

  if len(seen_shards) != len(task_keys):
    missing_shards = [x for x in range(len(task_keys)) if x not in seen_shards]
    print >> sys.stderr, ('Results from some shards are missing: %s' %
        ', '.join(map(str, missing_shards)))
    return 1

  return int(bool(any(exit_codes)))
コード例 #4
0
def run_tha_test(isolated_hash, storage, cache, leak_temp_dir, result_json,
                 root_dir, hard_timeout, grace_period, extra_args):
    """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occured.
    root_dir: directory to the path to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    hard_timeout: kills the process if it lasts more than this amount of
                  seconds.
    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.

  Returns:
    Process exit code that should be used.
  """
    # run_isolated exit code. Depends on if result_json is used or not.
    result = map_and_run(isolated_hash, storage, cache, leak_temp_dir,
                         root_dir, hard_timeout, grace_period, extra_args)
    logging.info('Result:\n%s', tools.format_json(result, dense=True))
    if result_json:
        # We've found tests to delete 'work' when quitting, causing an exception
        # here. Try to recreate the directory if necessary.
        work_dir = os.path.dirname(result_json)
        if not fs.isdir(work_dir):
            fs.mkdir(work_dir)
        tools.write_json(result_json, result, dense=True)
        # Only return 1 if there was an internal error.
        return int(bool(result['internal_failure']))

    # Marshall into old-style inline output.
    if result['outputs_ref']:
        data = {
            'hash': result['outputs_ref']['isolated'],
            'namespace': result['outputs_ref']['namespace'],
            'storage': result['outputs_ref']['isolatedserver'],
        }
        sys.stdout.flush()
        print('[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
              tools.format_json(data, dense=True))
        sys.stdout.flush()
    return result['exit_code'] or int(bool(result['internal_failure']))
コード例 #5
0
ファイル: swarming.py プロジェクト: misscache/luci-py
def collect(
    swarming, task_name, task_ids, timeout, decorate, print_status_updates,
    task_summary_json, task_output_dir):
  """Retrieves results of a Swarming task."""
  # Collect summary JSON and output files (if task_output_dir is not None).
  output_collector = TaskOutputCollector(
      task_output_dir, task_name, len(task_ids))

  seen_shards = set()
  exit_code = 0
  total_duration = 0
  try:
    for index, metadata in yield_results(
        swarming, task_ids, timeout, None, print_status_updates,
        output_collector):
      seen_shards.add(index)

      # Default to failure if there was no process that even started.
      shard_exit_code = 1
      if metadata.get('exit_codes'):
        shard_exit_code = metadata['exit_codes'][0]
      if shard_exit_code:
        exit_code = shard_exit_code
      if metadata.get('durations'):
        total_duration += metadata['durations'][0]

      if decorate:
        print(decorate_shard_output(swarming, index, metadata))
        if len(seen_shards) < len(task_ids):
          print('')
      else:
        if metadata.get('exit_codes'):
          exit_code = metadata['exit_codes'][0]
        else:
          exit_code = 'N/A'
        print('%s: %s %s' %
            (metadata.get('bot_id') or 'N/A', metadata['id'], exit_code))
        for output in metadata['outputs']:
          if not output:
            continue
          output = output.rstrip()
          if output:
            print(''.join('  %s\n' % l for l in output.splitlines()))
  finally:
    summary = output_collector.finalize()
    if task_summary_json:
      tools.write_json(task_summary_json, summary, False)

  if decorate and total_duration:
    print('Total duration: %.1fs' % total_duration)

  if len(seen_shards) != len(task_ids):
    missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
    print >> sys.stderr, ('Results from some shards are missing: %s' %
        ', '.join(map(str, missing_shards)))
    return 1

  return exit_code
コード例 #6
0
def save_response(ticker, url):
    """
    Request data from API and save response 
    """

    response = request.urlopen(url)
    response_data = response.read()
    write_json(f"poloniex/input/{ticker}.json", json.loads(response_data))
    print(f"Downloaded: {ticker}")
コード例 #7
0
def save_isolated(isolated, data):
    """Writes one or multiple .isolated files.

  Note: this reference implementation does not create child .isolated file so it
  always returns an empty list.
  """
    # Make sure the data is valid .isolated data by 'reloading' it.
    algo = SUPPORTED_ALGOS[data['algo']]
    load_isolated(json.dumps(data), algo)
    tools.write_json(isolated, data, True)
コード例 #8
0
def run_tha_test(data, result_json):
  """Runs an executable and records execution metadata.

  If isolated_hash is specified, downloads the dependencies in the cache,
  hardlinks them into a temporary directory and runs the command specified in
  the .isolated.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
  - data: TaskData instance.
  - result_json: File path to dump result metadata into. If set, the process
    exit code is always 0 unless an internal error occurred.

  Returns:
    Process exit code that should be used.
  """
  if result_json:
    # Write a json output file right away in case we get killed.
    result = {
      'exit_code': None,
      'had_hard_timeout': False,
      'internal_failure': 'Was terminated before completion',
      'outputs_ref': None,
      'version': 5,
    }
    tools.write_json(result_json, result, dense=True)

  # run_isolated exit code. Depends on if result_json is used or not.
  result = map_and_run(data, True)
  logging.info('Result:\n%s', tools.format_json(result, dense=True))

  if result_json:
    # We've found tests to delete 'work' when quitting, causing an exception
    # here. Try to recreate the directory if necessary.
    file_path.ensure_tree(os.path.dirname(result_json))
    tools.write_json(result_json, result, dense=True)
    # Only return 1 if there was an internal error.
    return int(bool(result['internal_failure']))

  # Marshall into old-style inline output.
  if result['outputs_ref']:
    data = {
      'hash': result['outputs_ref']['isolated'],
      'namespace': result['outputs_ref']['namespace'],
      'storage': result['outputs_ref']['isolatedserver'],
    }
    sys.stdout.flush()
    print(
        '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
        tools.format_json(data, dense=True))
    sys.stdout.flush()
  return result['exit_code'] or int(bool(result['internal_failure']))
コード例 #9
0
ファイル: tickers.py プロジェクト: momor666/Ella
def get_all_tickers():
    """
    Fetch tickers from Poloniex API and save to JSON file
    """

    response = urlopen('https://poloniex.com/public?command=returnTicker')
    response_data = response.read()
    tickers = [k for k, _ in json.loads(response_data).items() if k[:3] == 'BTC']
    tickers.sort()
    write_json('poloniex/data/tickers.json', tickers)
    return tickers
コード例 #10
0
def CMDtrigger(parser, args):
  """Triggers a Swarming task.

  Accepts either the hash (sha1) of a .isolated file already uploaded or the
  path to an .isolated file to archive, packages it if needed and sends a
  Swarming manifest file to the Swarming server.

  If an .isolated file is specified instead of an hash, it is first archived.

  Passes all extra arguments provided after '--' as additional command line
  arguments for an isolated command specified in *.isolate file.
  """
  add_trigger_options(parser)
  add_sharding_options(parser)
  args, isolated_cmd_args = extract_isolated_command_extra_args(args)
  parser.add_option(
      '--dump-json',
      metavar='FILE',
      help='Dump details about the triggered task(s) to this file as json')
  options, args = parser.parse_args(args)
  process_trigger_options(parser, options, args)

  auth.ensure_logged_in(options.swarming)
  if file_path.is_url(options.isolate_server):
    auth.ensure_logged_in(options.isolate_server)
  try:
    tasks, task_name = trigger(
        swarming=options.swarming,
        isolate_server=options.isolate_server or options.indir,
        namespace=options.namespace,
        file_hash_or_isolated=args[0],
        task_name=options.task_name,
        extra_args=isolated_cmd_args,
        shards=options.shards,
        dimensions=options.dimensions,
        env=dict(options.env),
        deadline=options.deadline,
        verbose=options.verbose,
        profile=options.profile,
        priority=options.priority)
    if tasks:
      if task_name != options.task_name:
        print('Triggered task: %s' % task_name)
      if options.dump_json:
        data = {
          'base_task_name': task_name,
          'tasks': tasks,
        }
        tools.write_json(options.dump_json, data, True)
    return int(not tasks)
  except Failure:
    on_error.report(None)
    return 1
コード例 #11
0
ファイル: gekko.py プロジェクト: sammychurchill/Ella
    def update_results(self, neurons, final_value):
        """
        Write results to JSON file 
        """

        print(f"\n{self.ticker} "
              f"\nNeurons: {neurons} "
              f"\nNew best: {final_value} "
              f"\nOld best: {self.current_best} ")
        self.current_best = final_value
        data = {'neurons': neurons, 'results': final_value}
        write_json(self.output_file, data)
コード例 #12
0
ファイル: swarming.py プロジェクト: zanxi/bitpop
 def finalize(self):
     """Writes summary.json, shutdowns underlying Storage."""
     with self._lock:
         # Write an array of shard results with None for missing shards.
         summary = {
             "task_name": self.task_name,
             "shards": [self._per_shard_results.get(i) for i in xrange(self.shard_count)],
         }
         tools.write_json(os.path.join(self.task_output_dir, "summary.json"), summary, False)
         if self._storage:
             self._storage.close()
             self._storage = None
コード例 #13
0
 def finalize(self):
     """Assembles and returns task summary JSON, shutdowns underlying Storage."""
     with self._lock:
         # Write an array of shard results with None for missing shards.
         summary = {"shards": [self._per_shard_results.get(i) for i in xrange(self.shard_count)]}
         # Write summary.json to task_output_dir as well.
         if self.task_output_dir:
             tools.write_json(os.path.join(self.task_output_dir, "summary.json"), summary, False)
         if self._storage:
             self._storage.close()
             self._storage = None
         return summary
コード例 #14
0
ファイル: swarming.py プロジェクト: danfengzi/soui_gyp
def CMDtrigger(parser, args):
    """Triggers a Swarming task.

  Accepts either the hash (sha1) of a .isolated file already uploaded or the
  path to an .isolated file to archive, packages it if needed and sends a
  Swarming manifest file to the Swarming server.

  If an .isolated file is specified instead of an hash, it is first archived.

  Passes all extra arguments provided after '--' as additional command line
  arguments for an isolated command specified in *.isolate file.
  """
    add_trigger_options(parser)
    add_sharding_options(parser)
    args, isolated_cmd_args = extract_isolated_command_extra_args(args)
    parser.add_option(
        '--dump-json',
        metavar='FILE',
        help='Dump details about the triggered task(s) to this file as json')
    options, args = parser.parse_args(args)
    process_trigger_options(parser, options, args)

    auth.ensure_logged_in(options.swarming)
    if file_path.is_url(options.isolate_server):
        auth.ensure_logged_in(options.isolate_server)
    try:
        tasks, task_name = trigger(swarming=options.swarming,
                                   isolate_server=options.isolate_server
                                   or options.indir,
                                   namespace=options.namespace,
                                   file_hash_or_isolated=args[0],
                                   task_name=options.task_name,
                                   extra_args=isolated_cmd_args,
                                   shards=options.shards,
                                   dimensions=options.dimensions,
                                   env=dict(options.env),
                                   deadline=options.deadline,
                                   verbose=options.verbose,
                                   profile=options.profile,
                                   priority=options.priority)
        if tasks:
            if task_name != options.task_name:
                print('Triggered task: %s' % task_name)
            if options.dump_json:
                data = {
                    'base_task_name': task_name,
                    'tasks': tasks,
                }
                tools.write_json(options.dump_json, data, True)
        return int(not tasks)
    except Failure:
        on_error.report(None)
        return 1
コード例 #15
0
def collect(swarming, task_name, task_ids, timeout, decorate, print_status_updates, task_summary_json, task_output_dir):
    """Retrieves results of a Swarming task.

  Returns:
    process exit code that should be returned to the user.
  """
    # Collect summary JSON and output files (if task_output_dir is not None).
    output_collector = TaskOutputCollector(task_output_dir, task_name, len(task_ids))

    seen_shards = set()
    exit_code = None
    total_duration = 0
    try:
        for index, metadata in yield_results(swarming, task_ids, timeout, None, print_status_updates, output_collector):
            seen_shards.add(index)

            # Default to failure if there was no process that even started.
            shard_exit_code = metadata.get("exit_code")
            if shard_exit_code:
                # It's encoded as a string, so bool('0') is True.
                shard_exit_code = int(shard_exit_code)
            if shard_exit_code or exit_code is None:
                exit_code = shard_exit_code
            total_duration += metadata.get("duration", 0)

            if decorate:
                print (decorate_shard_output(swarming, index, metadata))
                if len(seen_shards) < len(task_ids):
                    print ("")
            else:
                print ("%s: %s %s" % (metadata.get("bot_id", "N/A"), metadata["task_id"], shard_exit_code))
                if metadata["output"]:
                    output = metadata["output"].rstrip()
                    if output:
                        print ("".join("  %s\n" % l for l in output.splitlines()))
    finally:
        summary = output_collector.finalize()
        if task_summary_json:
            # TODO(maruel): Make this optional.
            for i in summary["shards"]:
                if i:
                    convert_to_old_format(i)
            tools.write_json(task_summary_json, summary, False)

    if decorate and total_duration:
        print ("Total duration: %.1fs" % total_duration)

    if len(seen_shards) != len(task_ids):
        missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
        print >> sys.stderr, ("Results from some shards are missing: %s" % ", ".join(map(str, missing_shards)))
        return 1

    return exit_code if exit_code is not None else 1
コード例 #16
0
def run_tha_test(isolated_hash, storage, cache, leak_temp_dir, result_json, root_dir, extra_args):
    """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occured.
    root_dir: directory to the path to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.

  Returns:
    Process exit code that should be used.
  """
    # run_isolated exit code. Depends on if result_json is used or not.
    result = map_and_run(isolated_hash, storage, cache, leak_temp_dir, root_dir, extra_args)
    logging.info("Result:\n%s", tools.format_json(result, dense=True))
    if result_json:
        # We've found tests to delete 'work' when quitting, causing an exception
        # here. Try to recreate the directory if necessary.
        work_dir = os.path.dirname(result_json)
        if not os.path.isdir(work_dir):
            os.mkdir(work_dir)
        tools.write_json(result_json, result, dense=True)
        # Only return 1 if there was an internal error.
        return int(bool(result["internal_failure"]))

    # Marshall into old-style inline output.
    if result["outputs_ref"]:
        data = {
            "hash": result["outputs_ref"]["isolated"],
            "namespace": result["outputs_ref"]["namespace"],
            "storage": result["outputs_ref"]["isolatedserver"],
        }
        sys.stdout.flush()
        print("[run_isolated_out_hack]%s[/run_isolated_out_hack]" % tools.format_json(data, dense=True))
    return result["exit_code"] or int(bool(result["internal_failure"]))
コード例 #17
0
ファイル: tickers.py プロジェクト: zhangarejiu/Ella
def get_all_tickers():
    """
    Fetch tickers from Poloniex API and save to JSON file
    """

    response = urlopen('https://poloniex.com/public?command=returnTicker')
    response_data = response.read()
    tickers = [
        k for k, _ in json.loads(response_data).items() if k[:3] == 'BTC'
    ]
    tickers.sort()
    write_json('poloniex/data/tickers.json', tickers)
    return tickers
コード例 #18
0
def save_isolated(isolated, data):
  """Writes one or multiple .isolated files.

  Note: this reference implementation does not create child .isolated file so it
  always returns an empty list.

  Returns the list of child isolated files that are included by |isolated|.
  """
  # Make sure the data is valid .isolated data by 'reloading' it.
  algo = SUPPORTED_ALGOS[data['algo']]
  load_isolated(json.dumps(data), algo)
  tools.write_json(isolated, data, True)
  return []
コード例 #19
0
 def save_files(self):
     """Saves self.saved_state and creates a .isolated file."""
     logging.debug('Dumping to %s' % self.isolated_filepath)
     self.saved_state.child_isolated_files = chromium_save_isolated(
         self.isolated_filepath, self.saved_state.to_isolated(),
         self.saved_state.path_variables, self.saved_state.algo)
     total_bytes = sum(
         i.get('s', 0) for i in self.saved_state.files.itervalues())
     if total_bytes:
         # TODO(maruel): Stats are missing the .isolated files.
         logging.debug('Total size: %d bytes' % total_bytes)
     saved_state_file = isolatedfile_to_state(self.isolated_filepath)
     logging.debug('Dumping to %s' % saved_state_file)
     tools.write_json(saved_state_file, self.saved_state.flatten(), True)
コード例 #20
0
ファイル: isolate.py プロジェクト: qlb7707/webrtc_src
 def save_files(self):
     """Saves self.saved_state and creates a .isolated file."""
     logging.debug("Dumping to %s" % self.isolated_filepath)
     self.saved_state.child_isolated_files = chromium_save_isolated(
         self.isolated_filepath,
         self.saved_state.to_isolated(),
         self.saved_state.path_variables,
         self.saved_state.algo,
     )
     total_bytes = sum(i.get("s", 0) for i in self.saved_state.files.itervalues())
     if total_bytes:
         # TODO(maruel): Stats are missing the .isolated files.
         logging.debug("Total size: %d bytes" % total_bytes)
     saved_state_file = isolatedfile_to_state(self.isolated_filepath)
     logging.debug("Dumping to %s" % saved_state_file)
     tools.write_json(saved_state_file, self.saved_state.flatten(), True)
コード例 #21
0
ファイル: swarming.py プロジェクト: mellowdistrict/luci-py
def CMDtrigger(parser, args):
  """Triggers a Swarming task.

  Accepts either the hash (sha1) of a .isolated file already uploaded or the
  path to an .isolated file to archive.

  If an .isolated file is specified instead of an hash, it is first archived.

  Passes all extra arguments provided after '--' as additional command line
  arguments for an isolated command specified in *.isolate file.
  """
  add_trigger_options(parser)
  add_sharding_options(parser)
  parser.add_option(
      '--dump-json',
      metavar='FILE',
      help='Dump details about the triggered task(s) to this file as json')
  options, args = parser.parse_args(args)
  task_request = process_trigger_options(parser, options, args)
  try:
    tasks = trigger_task_shards(
        options.swarming, task_request, options.shards)
    if tasks:
      print('Triggered task: %s' % options.task_name)
      tasks_sorted = sorted(
          tasks.itervalues(), key=lambda x: x['shard_index'])
      if options.dump_json:
        data = {
          'base_task_name': options.task_name,
          'tasks': tasks,
          'request': task_request_to_raw_request(task_request),
        }
        tools.write_json(unicode(options.dump_json), data, True)
        print('To collect results, use:')
        print('  swarming.py collect -S %s --json %s' %
            (options.swarming, options.dump_json))
      else:
        print('To collect results, use:')
        print('  swarming.py collect -S %s %s' %
            (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
      print('Or visit:')
      for t in tasks_sorted:
        print('  ' + t['view_url'])
    return int(not tasks)
  except Failure:
    on_error.report(None)
    return 1
コード例 #22
0
    def save_latest(self,
                    directory,
                    model_and_loss,
                    stats_dict,
                    store_as_best=False,
                    suffix=''):
        # -----------------------------------------------------------------------------------------
        # Make sure directory exists
        # -----------------------------------------------------------------------------------------
        tools.ensure_dir(directory)

        # -----------------------------------------------------------------------------------------
        # Save
        # -----------------------------------------------------------------------------------------
        save_dict = dict(stats_dict)
        save_dict[self._model_key] = model_and_loss.state_dict()

        if not suffix:
            suffix = self._latest_postfix

        latest_checkpoint_filename = os.path.join(
            directory, self._prefix + suffix + self._extension)

        latest_statistics_filename = os.path.join(
            directory, self._prefix + suffix + ".json")

        torch.save(save_dict, latest_checkpoint_filename)
        tools.write_json(data_dict=stats_dict,
                         filename=latest_statistics_filename)

        # -----------------------------------------------------------------------------------------
        # Possibly store as best
        # -----------------------------------------------------------------------------------------
        if store_as_best:
            best_checkpoint_filename = os.path.join(
                directory, self._prefix + self._best_postfix + self._extension)

            best_statistics_filename = os.path.join(
                directory, self._prefix + self._best_postfix + ".json")

            logging.info("Saved checkpoint as best model..")
            shutil.copyfile(latest_checkpoint_filename,
                            best_checkpoint_filename)
            shutil.copyfile(latest_statistics_filename,
                            best_statistics_filename)
コード例 #23
0
ファイル: tickers.py プロジェクト: momor666/Ella
def get_tickers():
    """
    Fetch tickers from Poloniex API and save to JSON file
    """

    tickers = [
        'BTC_BELA',
        'BTC_DASH',
        'BTC_DOGE',
        'BTC_ETH',
        'BTC_LBC',
        'BTC_MAID',
        'BTC_XEM',
        'BTC_XMR',
    ]
    tickers.sort()
    write_json('poloniex/data/tickers.json', tickers)
    return tickers
コード例 #24
0
ファイル: tickers.py プロジェクト: zhangarejiu/Ella
def get_tickers():
    """
    Fetch tickers from Poloniex API and save to JSON file
    """

    tickers = [
        'BTC_BELA',
        'BTC_DASH',
        'BTC_DOGE',
        'BTC_ETH',
        'BTC_LBC',
        'BTC_MAID',
        'BTC_XEM',
        'BTC_XMR',
    ]
    tickers.sort()
    write_json('poloniex/data/tickers.json', tickers)
    return tickers
コード例 #25
0
 def finalize(self):
     """Writes summary.json, shutdowns underlying Storage."""
     with self._lock:
         # Write an array of shard results with None for missing shards.
         summary = {
             'task_name':
             self.task_name,
             'shards': [
                 self._per_shard_results.get(i)
                 for i in xrange(self.shard_count)
             ],
         }
         tools.write_json(
             os.path.join(self.task_output_dir, 'summary.json'), summary,
             False)
         if self._storage:
             self._storage.close()
             self._storage = None
コード例 #26
0
def archive_isolated_triggers(isolate_server, tree_isolated, tests):
    """Creates and archives all the .isolated files for the tests at once.

  Archiving them in one batch is faster than archiving each file individually.
  Also the .isolated files can be reused across OSes, reducing the amount of
  I/O.

  Returns:
    list of (test, sha1) tuples.
  """
    logging.info('archive_isolated_triggers(%s, %s)', tree_isolated, tests)
    tempdir = tempfile.mkdtemp(prefix=u'run_swarming_tests_on_swarming_')
    try:
        isolateds = []
        for test in tests:
            test_name = os.path.basename(test)
            # Creates a manual .isolated file. See
            # https://github.com/luci/luci-py/blob/master/appengine/isolate/doc/Design.md#isolated-file-format
            isolated = {
                'algo': 'sha-1',
                'command': ['python', test],
                'includes': [tree_isolated],
                'read_only': 0,
                'version': '1.4',
            }
            v = os.path.join(tempdir, test_name + '.isolated')
            tools.write_json(v, isolated, True)
            isolateds.append(v)
        cmd = [
            'isolateserver.py',
            'archive',
            '--isolate-server',
            isolate_server,
        ] + isolateds
        if logging.getLogger().isEnabledFor(logging.INFO):
            cmd.append('--verbose')
        items = [i.split() for i in check_output(cmd).splitlines()]
        assert len(items) == len(tests)
        assert all(
            items[i][1].endswith(os.path.basename(tests[i]) + '.isolated')
            for i in xrange(len(tests)))
        return zip(tests, [i[0] for i in items])
    finally:
        file_path.rmtree(tempdir)
コード例 #27
0
def archive_isolated_triggers(isolate_server, tree_isolated, tests):
  """Creates and archives all the .isolated files for the tests at once.

  Archiving them in one batch is faster than archiving each file individually.
  Also the .isolated files can be reused across OSes, reducing the amount of
  I/O.

  Returns:
    list of (test, sha1) tuples.
  """
  logging.info('archive_isolated_triggers(%s, %s)', tree_isolated, tests)
  tempdir = tempfile.mkdtemp(prefix=u'run_swarming_tests_on_swarming_')
  try:
    isolateds = []
    for test in tests:
      test_name = os.path.basename(test)
      # Creates a manual .isolated file. See
      # https://code.google.com/p/swarming/wiki/IsolatedDesign for more details.
      isolated = {
        'algo': 'sha-1',
        'command': ['python', test],
        'includes': [tree_isolated],
        'read_only': 0,
        'version': '1.4',
      }
      v = os.path.join(tempdir, test_name + '.isolated')
      tools.write_json(v, isolated, True)
      isolateds.append(v)
    cmd = [
        'isolateserver.py', 'archive', '--isolate-server', isolate_server,
    ] + isolateds
    if logging.getLogger().isEnabledFor(logging.INFO):
      cmd.append('--verbose')
    items = [i.split() for i in check_output(cmd).splitlines()]
    assert len(items) == len(tests)
    assert all(
        items[i][1].endswith(os.path.basename(tests[i]) + '.isolated')
        for i in xrange(len(tests)))
    return zip(tests, [i[0] for i in items])
  finally:
    shutil.rmtree(tempdir)
コード例 #28
0
def CMDtrigger(parser, args):
    """Triggers a Swarming task.

  Accepts either the hash (sha1) of a .isolated file already uploaded or the
  path to an .isolated file to archive.

  If an .isolated file is specified instead of an hash, it is first archived.

  Passes all extra arguments provided after '--' as additional command line
  arguments for an isolated command specified in *.isolate file.
  """
    add_trigger_options(parser)
    add_sharding_options(parser)
    parser.add_option(
        "--dump-json", metavar="FILE", help="Dump details about the triggered task(s) to this file as json"
    )
    options, args = parser.parse_args(args)
    task_request = process_trigger_options(parser, options, args)
    try:
        tasks = trigger_task_shards(options.swarming, task_request, options.shards)
        if tasks:
            print ("Triggered task: %s" % options.task_name)
            tasks_sorted = sorted(tasks.itervalues(), key=lambda x: x["shard_index"])
            if options.dump_json:
                data = {"base_task_name": options.task_name, "tasks": tasks}
                tools.write_json(options.dump_json, data, True)
                print ("To collect results, use:")
                print ("  swarming.py collect -S %s --json %s" % (options.swarming, options.dump_json))
            else:
                print ("To collect results, use:")
                print (
                    "  swarming.py collect -S %s %s" % (options.swarming, " ".join(t["task_id"] for t in tasks_sorted))
                )
            print ("Or visit:")
            for t in tasks_sorted:
                print ("  " + t["view_url"])
        return int(not tasks)
    except Failure:
        on_error.report(None)
        return 1
コード例 #29
0
ファイル: validate.py プロジェクト: kimsoohwan/TACO_2018
def save_result_annotation(result_annotation, image_files, output_dir,
                           target_file):
    """
    :param result_annotation:
    :param image_files:
    :param output_dir:
    :param target_file:

    """

    json_data = list()
    result_file = os.path.join(output_dir, target_file)
    for idx, annotation in enumerate(result_annotation):

        tmp = dict()
        tmp['annotations'] = annotation
        tmp['class'] = 'image'
        tmp['filename'] = image_files[idx]

        json_data.append(tmp)

    tools.write_json(result_file, json_data)
コード例 #30
0
def chromium_save_isolated(isolated, data, path_variables, algo):
    """Writes one or many .isolated files.

  This slightly increases the cold cache cost but greatly reduce the warm cache
  cost by splitting low-churn files off the master .isolated file. It also
  reduces overall isolateserver memcache consumption.
  """
    slaves = []

    def extract_into_included_isolated(prefix):
        new_slave = {
            'algo': data['algo'],
            'files': {},
            'version': data['version'],
        }
        for f in data['files'].keys():
            if f.startswith(prefix):
                new_slave['files'][f] = data['files'].pop(f)
        if new_slave['files']:
            slaves.append(new_slave)

    # Split test/data/ in its own .isolated file.
    extract_into_included_isolated(os.path.join('test', 'data', ''))

    # Split everything out of PRODUCT_DIR in its own .isolated file.
    if path_variables.get('PRODUCT_DIR'):
        extract_into_included_isolated(path_variables['PRODUCT_DIR'])

    files = []
    for index, f in enumerate(slaves):
        slavepath = isolated[:-len('.isolated')] + '.%d.isolated' % index
        tools.write_json(slavepath, f, True)
        data.setdefault('includes',
                        []).append(isolated_format.hash_file(slavepath, algo))
        files.append(os.path.basename(slavepath))

    files.extend(isolated_format.save_isolated(isolated, data))
    return files
コード例 #31
0
def chromium_save_isolated(isolated, data, path_variables, algo):
  """Writes one or many .isolated files.

  This slightly increases the cold cache cost but greatly reduce the warm cache
  cost by splitting low-churn files off the master .isolated file. It also
  reduces overall isolateserver memcache consumption.
  """
  slaves = []

  def extract_into_included_isolated(prefix):
    new_slave = {
      'algo': data['algo'],
      'files': {},
      'version': data['version'],
    }
    for f in data['files'].keys():
      if f.startswith(prefix):
        new_slave['files'][f] = data['files'].pop(f)
    if new_slave['files']:
      slaves.append(new_slave)

  # Split test/data/ in its own .isolated file.
  extract_into_included_isolated(os.path.join('test', 'data', ''))

  # Split everything out of PRODUCT_DIR in its own .isolated file.
  if path_variables.get('PRODUCT_DIR'):
    extract_into_included_isolated(path_variables['PRODUCT_DIR'])

  files = []
  for index, f in enumerate(slaves):
    slavepath = isolated[:-len('.isolated')] + '.%d.isolated' % index
    tools.write_json(slavepath, f, True)
    data.setdefault('includes', []).append(
        isolated_format.hash_file(slavepath, algo))
    files.append(os.path.basename(slavepath))

  files.extend(isolated_format.save_isolated(isolated, data))
  return files
コード例 #32
0
ファイル: isolate.py プロジェクト: qlb7707/webrtc_src
def CMDbatcharchive(parser, args):
    """Archives multiple isolated trees at once.

  Using single command instead of multiple sequential invocations allows to cut
  redundant work when isolated trees share common files (e.g. file hashes are
  checked only once, their presence on the server is checked only once, and
  so on).

  Takes a list of paths to *.isolated.gen.json files that describe what trees to
  isolate. Format of files is:
  {
    "version": 1,
    "dir": <absolute path to a directory all other paths are relative to>,
    "args": [list of command line arguments for single 'archive' command]
  }
  """
    isolateserver.add_isolate_server_options(parser)
    isolateserver.add_archive_options(parser)
    auth.add_auth_options(parser)
    parser.add_option(
        "--dump-json", metavar="FILE", help="Write isolated hashes of archived trees to this file as JSON"
    )
    options, args = parser.parse_args(args)
    auth.process_auth_options(parser, options)
    isolateserver.process_isolate_server_options(parser, options, True)

    # Validate all incoming options, prepare what needs to be archived as a list
    # of tuples (archival options, working directory).
    work_units = []
    for gen_json_path in args:
        # Validate JSON format of a *.isolated.gen.json file.
        data = tools.read_json(gen_json_path)
        if data.get("version") != ISOLATED_GEN_JSON_VERSION:
            parser.error("Invalid version in %s" % gen_json_path)
        cwd = data.get("dir")
        if not isinstance(cwd, unicode) or not os.path.isdir(cwd):
            parser.error("Invalid dir in %s" % gen_json_path)
        args = data.get("args")
        if not isinstance(args, list) or not all(isinstance(x, unicode) for x in args):
            parser.error("Invalid args in %s" % gen_json_path)
        # Convert command line (embedded in JSON) to Options object.
        work_units.append((parse_archive_command_line(args, cwd), cwd))

    # Perform the archival, all at once.
    isolated_hashes = isolate_and_archive(work_units, options.isolate_server, options.namespace)

    # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's
    # no way currently to figure out what *.isolated file from a batch were
    # successfully uploaded, so consider them all failed (and emit empty dict
    # as JSON result).
    if options.dump_json:
        tools.write_json(options.dump_json, isolated_hashes or {}, False)

    if isolated_hashes is None:
        return EXIT_CODE_UPLOAD_ERROR

    # isolated_hashes[x] is None if 'x.isolate' contains a error.
    if not all(isolated_hashes.itervalues()):
        return EXIT_CODE_ISOLATE_ERROR

    return 0
コード例 #33
0
def run_tha_test(
    isolated_hash, storage, cache, leak_temp_dir, result_json, root_dir,
    hard_timeout, grace_period, extra_args):
  """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occured.
    root_dir: directory to the path to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    hard_timeout: kills the process if it lasts more than this amount of
                  seconds.
    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.

  Returns:
    Process exit code that should be used.
  """
  if result_json:
    # Write a json output file right away in case we get killed.
    result = {
      'exit_code': None,
      'had_hard_timeout': False,
      'internal_failure': 'Was terminated before completion',
      'outputs_ref': None,
      'version': 2,
    }
    tools.write_json(result_json, result, dense=True)

  # run_isolated exit code. Depends on if result_json is used or not.
  result = map_and_run(
      isolated_hash, storage, cache, leak_temp_dir, root_dir, hard_timeout,
      grace_period, extra_args)
  logging.info('Result:\n%s', tools.format_json(result, dense=True))
  if result_json:
    # We've found tests to delete 'work' when quitting, causing an exception
    # here. Try to recreate the directory if necessary.
    work_dir = os.path.dirname(result_json)
    if not fs.isdir(work_dir):
      fs.mkdir(work_dir)
    tools.write_json(result_json, result, dense=True)
    # Only return 1 if there was an internal error.
    return int(bool(result['internal_failure']))

  # Marshall into old-style inline output.
  if result['outputs_ref']:
    data = {
      'hash': result['outputs_ref']['isolated'],
      'namespace': result['outputs_ref']['namespace'],
      'storage': result['outputs_ref']['isolatedserver'],
    }
    sys.stdout.flush()
    print(
        '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
        tools.format_json(data, dense=True))
    sys.stdout.flush()
  return result['exit_code'] or int(bool(result['internal_failure']))
コード例 #34
0
  def test_load_stale_isolated(self):
    isolate_file = os.path.join(
        ROOT_DIR, 'tests', 'isolate', 'touch_root.isolate')

    # Data to be loaded in the .isolated file. Do not create a .state file.
    input_data = {
      'command': ['python'],
      'files': {
        'foo': {
          "m": 416,
          "h": "invalid",
          "s": 538,
          "t": 1335146921,
        },
        os.path.join('tests', 'isolate', 'touch_root.py'): {
          "m": 488,
          "h": "invalid",
          "s": 538,
          "t": 1335146921,
        },
      },
    }
    options = self._get_option(isolate_file)
    tools.write_json(options.isolated, input_data, False)

    # A CompleteState object contains two parts:
    # - Result instance stored in complete_state.isolated, corresponding to the
    #   .isolated file, is what is read by run_test_from_archive.py.
    # - SavedState instance stored in compelte_state.saved_state,
    #   corresponding to the .state file, which is simply to aid the developer
    #   when re-running the same command multiple times and contain
    #   discardable information.
    complete_state = isolate.load_complete_state(options, self.cwd, None, False)
    actual_isolated = complete_state.saved_state.to_isolated()
    actual_saved_state = complete_state.saved_state.flatten()

    expected_isolated = {
      'algo': 'sha-1',
      'command': ['python', 'touch_root.py'],
      'files': {
        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
          'm': 488,
          'h': hash_file('tests', 'isolate', 'touch_root.py'),
          's': _size('tests', 'isolate', 'touch_root.py'),
        },
        u'isolate.py': {
          'm': 488,
          'h': hash_file('isolate.py'),
          's': _size('isolate.py'),
        },
      },
      'relative_cwd': os.path.join(u'tests', 'isolate'),
      'version': isolate.isolateserver.ISOLATED_FILE_VERSION,
    }
    self._cleanup_isolated(expected_isolated)
    self.assertEqual(expected_isolated, actual_isolated)

    expected_saved_state = {
      'OS': sys.platform,
      'algo': 'sha-1',
      'child_isolated_files': [],
      'command': ['python', 'touch_root.py'],
      'config_variables': {
        'OS': 'linux',
        'chromeos': options.config_variables['chromeos'],
      },
      'extra_variables': {
        'foo': 'bar',
      },
      'files': {
        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
          'm': 488,
          'h': hash_file('tests', 'isolate', 'touch_root.py'),
          's': _size('tests', 'isolate', 'touch_root.py'),
        },
        u'isolate.py': {
          'm': 488,
          'h': hash_file('isolate.py'),
          's': _size('isolate.py'),
        },
      },
      'isolate_file': file_path.safe_relpath(
          file_path.get_native_path_case(isolate_file),
          os.path.dirname(options.isolated)),
      'path_variables': {},
      'relative_cwd': os.path.join(u'tests', 'isolate'),
      'root_dir': file_path.get_native_path_case(ROOT_DIR),
      'version': isolate.SavedState.EXPECTED_VERSION,
    }
    self._cleanup_isolated(expected_saved_state)
    self._cleanup_saved_state(actual_saved_state)
    self.assertEqual(expected_saved_state, actual_saved_state)
コード例 #35
0
# save net
torch.save(net, paths.model_serialized_dir + model_name + '.pt')

# Create params dictionary to be saved with all hyperparameters
params = dict(vocab_size=vocab_size,
              num_epochs=num_epochs,
              lr=learning_rate,
              num_sequences=num_sequences,
              length_per_seq=length_per_seq)

params.update(tokenizer_kwargs)
params.update(dataset_split_kwargs)
params['training_loss'] = training_loss
params['validation_loss'] = validation_loss
write_json(params, paths.model_serialized_dir + model_name + '.json')

# Plot training and validation loss
epoch = np.arange(len(training_loss))
plt.figure()
plt.plot(epoch, training_loss, 'r', label='Training loss',)
plt.plot(epoch, validation_loss, 'b', label='Validation loss')
plt.legend()
plt.xlabel('Epoch'), plt.ylabel('NLL')
plt.show()


#%%
# Compare target y with output from forward pass from first sample of test_set
# TO DO: Implement a SCORE for all test data
コード例 #36
0
def CMDbatcharchive(parser, args):
    """Archives multiple isolated trees at once.

  Using single command instead of multiple sequential invocations allows to cut
  redundant work when isolated trees share common files (e.g. file hashes are
  checked only once, their presence on the server is checked only once, and
  so on).

  Takes a list of paths to *.isolated.gen.json files that describe what trees to
  isolate. Format of files is:
  {
    "version": 1,
    "dir": <absolute path to a directory all other paths are relative to>,
    "args": [list of command line arguments for single 'archive' command]
  }
  """
    isolateserver.add_isolate_server_options(parser)
    isolateserver.add_archive_options(parser)
    auth.add_auth_options(parser)
    parser.add_option(
        '--dump-json',
        metavar='FILE',
        help='Write isolated hashes of archived trees to this file as JSON')
    options, args = parser.parse_args(args)
    auth.process_auth_options(parser, options)
    isolateserver.process_isolate_server_options(parser, options, True, True)

    # Validate all incoming options, prepare what needs to be archived as a list
    # of tuples (archival options, working directory).
    work_units = []
    for gen_json_path in args:
        # Validate JSON format of a *.isolated.gen.json file.
        try:
            data = tools.read_json(gen_json_path)
        except IOError as e:
            parser.error('Failed to open %s: %s' % (gen_json_path, e))
        if data.get('version') != ISOLATED_GEN_JSON_VERSION:
            parser.error('Invalid version in %s' % gen_json_path)
        cwd = data.get('dir')
        if not isinstance(cwd, unicode) or not fs.isdir(cwd):
            parser.error('Invalid dir in %s' % gen_json_path)
        args = data.get('args')
        if (not isinstance(args, list)
                or not all(isinstance(x, unicode) for x in args)):
            parser.error('Invalid args in %s' % gen_json_path)
        # Convert command line (embedded in JSON) to Options object.
        work_units.append((parse_archive_command_line(args, cwd), cwd))

    # Perform the archival, all at once.
    isolated_hashes = isolate_and_archive(work_units, options.isolate_server,
                                          options.namespace)

    # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's
    # no way currently to figure out what *.isolated file from a batch were
    # successfully uploaded, so consider them all failed (and emit empty dict
    # as JSON result).
    if options.dump_json:
        tools.write_json(options.dump_json, isolated_hashes or {}, False)

    if isolated_hashes is None:
        return EXIT_CODE_UPLOAD_ERROR

    # isolated_hashes[x] is None if 'x.isolate' contains a error.
    if not all(isolated_hashes.itervalues()):
        return EXIT_CODE_ISOLATE_ERROR

    return 0
コード例 #37
0
ファイル: swarming.py プロジェクト: danfengzi/soui_gyp
def collect(url, task_name, shards, timeout, decorate, print_status_updates,
            task_summary_json, task_output_dir):
    """Retrieves results of a Swarming task."""
    # Grab task keys for each shard. Order is important, used to figure out
    # shard index based on the key.
    # TODO(vadimsh): Simplify this once server support is added.
    task_keys = []
    for index in xrange(shards):
        shard_task_name = get_shard_task_name(task_name, shards, index)
        logging.info('Collecting %s', shard_task_name)
        shard_task_keys = get_task_keys(url, shard_task_name)
        if not shard_task_keys:
            raise Failure('No task keys to get results with: %s' %
                          shard_task_name)
        if len(shard_task_keys) != 1:
            raise Failure('Expecting only one shard for a task: %s' %
                          shard_task_name)
        task_keys.append(shard_task_keys[0])

    # Collect summary JSON and output files (if task_output_dir is not None).
    output_collector = TaskOutputCollector(task_output_dir, task_name,
                                           len(task_keys))

    seen_shards = set()
    exit_codes = []

    try:
        for index, output in yield_results(url, task_keys, timeout, None,
                                           print_status_updates,
                                           output_collector):
            seen_shards.add(index)

            # Grab first non-zero exit code as an overall shard exit code.
            shard_exit_code = 0
            for code in map(int, (output['exit_codes'] or '1').split(',')):
                if code:
                    shard_exit_code = code
                    break
            exit_codes.append(shard_exit_code)

            if decorate:
                print decorate_shard_output(index, output, shard_exit_code)
            else:
                print('%s/%s: %s' %
                      (output['machine_id'], output['machine_tag'],
                       output['exit_codes']))
                print(''.join('  %s\n' % l
                              for l in output['output'].splitlines()))
    finally:
        summary = output_collector.finalize()
        if task_summary_json:
            tools.write_json(task_summary_json, summary, False)

    if len(seen_shards) != len(task_keys):
        missing_shards = [
            x for x in range(len(task_keys)) if x not in seen_shards
        ]
        print >> sys.stderr, ('Results from some shards are missing: %s' %
                              ', '.join(map(str, missing_shards)))
        return 1

    return int(bool(any(exit_codes)))
コード例 #38
0
def CMDquery(parser, args):
    """Returns raw JSON information via an URL endpoint. Use 'query-list' to
  gather the list of API methods from the server.

  Examples:
    Listing all bots:
      swarming.py query -S https://server-url bots/list

    Listing last 10 tasks on a specific bot named 'swarm1':
      swarming.py query -S https://server-url --limit 10 bot/swarm1/tasks
  """
    CHUNK_SIZE = 250

    parser.add_option(
        "-L",
        "--limit",
        type="int",
        default=200,
        help="Limit to enforce on limitless items (like number of tasks); " "default=%default",
    )
    parser.add_option("--json", help="Path to JSON output file (otherwise prints to stdout)")
    parser.add_option("--progress", action="store_true", help="Prints a dot at each request to show progress")
    options, args = parser.parse_args(args)
    if len(args) != 1:
        parser.error("Must specify only method name and optionally query args properly " "escaped.")
    base_url = options.swarming + "/_ah/api/swarming/v1/" + args[0]
    url = base_url
    if options.limit:
        # Check check, change if not working out.
        merge_char = "&" if "?" in url else "?"
        url += "%slimit=%d" % (merge_char, min(CHUNK_SIZE, options.limit))
    data = net.url_read_json(url)
    if data is None:
        # TODO(maruel): Do basic diagnostic.
        print >> sys.stderr, "Failed to access %s" % url
        return 1

    # Some items support cursors. Try to get automatically if cursors are needed
    # by looking at the 'cursor' items.
    while data.get("cursor") and (not options.limit or len(data["items"]) < options.limit):
        merge_char = "&" if "?" in base_url else "?"
        url = base_url + "%scursor=%s" % (merge_char, urllib.quote(data["cursor"]))
        if options.limit:
            url += "&limit=%d" % min(CHUNK_SIZE, options.limit - len(data["items"]))
        if options.progress:
            sys.stdout.write(".")
            sys.stdout.flush()
        new = net.url_read_json(url)
        if new is None:
            if options.progress:
                print ("")
            print >> sys.stderr, "Failed to access %s" % options.swarming
            return 1
        data["items"].extend(new["items"])
        data["cursor"] = new.get("cursor")

    if options.progress:
        print ("")
    if options.limit and len(data.get("items", [])) > options.limit:
        data["items"] = data["items"][: options.limit]
    data.pop("cursor", None)

    if options.json:
        tools.write_json(options.json, data, True)
    else:
        try:
            tools.write_json(sys.stdout, data, False)
            sys.stdout.write("\n")
        except IOError:
            pass
    return 0
コード例 #39
0
ファイル: run_isolated.py プロジェクト: xincun/yxbase
def run_tha_test(command, isolated_hash, storage, isolate_cache, outputs,
                 init_named_caches, leak_temp_dir, result_json, root_dir,
                 hard_timeout, grace_period, bot_file, install_packages_fn,
                 use_symlinks):
    """Runs an executable and records execution metadata.

  Either command or isolated_hash must be specified.

  If isolated_hash is specified, downloads the dependencies in the cache,
  hardlinks them into a temporary directory and runs the command specified in
  the .isolated.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    command: a list of string; the command to run OR optional arguments to add
             to the command stated in the .isolated file if a command was
             specified.
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
                   The command specified in the .isolated is executed.
                   Mutually exclusive with command argument.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    isolate_cache: an isolateserver.LocalCache to keep from retrieving the
                   same objects constantly by caching the objects retrieved.
                   Can be on-disk or in-memory.
    init_named_caches: a function (run_dir) => context manager that creates
                      symlinks for named caches in |run_dir|.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occurred.
    root_dir: path to the directory to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    hard_timeout: kills the process if it lasts more than this amount of
                  seconds.
    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
    install_packages_fn: context manager dir => CipdInfo, see
      install_client_and_packages.
    use_symlinks: create tree with symlinks instead of hardlinks.

  Returns:
    Process exit code that should be used.
  """
    if result_json:
        # Write a json output file right away in case we get killed.
        result = {
            'exit_code': None,
            'had_hard_timeout': False,
            'internal_failure': 'Was terminated before completion',
            'outputs_ref': None,
            'version': 5,
        }
        tools.write_json(result_json, result, dense=True)

    # run_isolated exit code. Depends on if result_json is used or not.
    result = map_and_run(command, isolated_hash, storage, isolate_cache,
                         outputs, init_named_caches, leak_temp_dir, root_dir,
                         hard_timeout, grace_period, bot_file,
                         install_packages_fn, use_symlinks, True)
    logging.info('Result:\n%s', tools.format_json(result, dense=True))

    if result_json:
        # We've found tests to delete 'work' when quitting, causing an exception
        # here. Try to recreate the directory if necessary.
        file_path.ensure_tree(os.path.dirname(result_json))
        tools.write_json(result_json, result, dense=True)
        # Only return 1 if there was an internal error.
        return int(bool(result['internal_failure']))

    # Marshall into old-style inline output.
    if result['outputs_ref']:
        data = {
            'hash': result['outputs_ref']['isolated'],
            'namespace': result['outputs_ref']['namespace'],
            'storage': result['outputs_ref']['isolatedserver'],
        }
        sys.stdout.flush()
        print('[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
              tools.format_json(data, dense=True))
        sys.stdout.flush()
    return result['exit_code'] or int(bool(result['internal_failure']))
コード例 #40
0
  def test_load_stale_isolated(self):
    isolate_file = os.path.join(
        ROOT_DIR, 'tests', 'isolate', 'touch_root.isolate')

    # Data to be loaded in the .isolated file. Do not create a .state file.
    input_data = {
      'command': ['python'],
      'files': {
        'foo': {
          "m": 416,
          "h": "invalid",
          "s": 538,
          "t": 1335146921,
        },
        os.path.join('tests', 'isolate', 'touch_root.py'): {
          "m": 488,
          "h": "invalid",
          "s": 538,
          "t": 1335146921,
        },
      },
    }
    options = self._get_option(isolate_file)
    tools.write_json(options.isolated, input_data, False)

    # A CompleteState object contains two parts:
    # - Result instance stored in complete_state.isolated, corresponding to the
    #   .isolated file, is what is read by run_test_from_archive.py.
    # - SavedState instance stored in compelte_state.saved_state,
    #   corresponding to the .state file, which is simply to aid the developer
    #   when re-running the same command multiple times and contain
    #   discardable information.
    complete_state = isolate.load_complete_state(options, self.cwd, None, False)
    actual_isolated = complete_state.saved_state.to_isolated()
    actual_saved_state = complete_state.saved_state.flatten()

    expected_isolated = {
      'algo': 'sha-1',
      'command': ['python', 'touch_root.py'],
      'files': {
        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
          'm': 488,
          'h': hash_file('tests', 'isolate', 'touch_root.py'),
          's': _size('tests', 'isolate', 'touch_root.py'),
        },
        u'isolate.py': {
          'm': 488,
          'h': hash_file('isolate.py'),
          's': _size('isolate.py'),
        },
      },
      'relative_cwd': os.path.join(u'tests', 'isolate'),
      'version': isolate.isolateserver.ISOLATED_FILE_VERSION,
    }
    self._cleanup_isolated(expected_isolated)
    self.assertEqual(expected_isolated, actual_isolated)

    expected_saved_state = {
      'OS': sys.platform,
      'algo': 'sha-1',
      'child_isolated_files': [],
      'command': ['python', 'touch_root.py'],
      'config_variables': {
        'OS': 'linux',
        'chromeos': options.config_variables['chromeos'],
      },
      'extra_variables': {
        'foo': 'bar',
      },
      'files': {
        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
          'm': 488,
          'h': hash_file('tests', 'isolate', 'touch_root.py'),
          's': _size('tests', 'isolate', 'touch_root.py'),
        },
        u'isolate.py': {
          'm': 488,
          'h': hash_file('isolate.py'),
          's': _size('isolate.py'),
        },
      },
      'isolate_file': file_path.safe_relpath(
          file_path.get_native_path_case(isolate_file),
          os.path.dirname(options.isolated)),
      'path_variables': {},
      'relative_cwd': os.path.join(u'tests', 'isolate'),
      'root_dir': file_path.get_native_path_case(ROOT_DIR),
      'version': isolate.SavedState.EXPECTED_VERSION,
    }
    self._cleanup_isolated(expected_saved_state)
    self._cleanup_saved_state(actual_saved_state)
    self.assertEqual(expected_saved_state, actual_saved_state)
コード例 #41
0
  def test_load_with_includes_with_commands(self):
    # This one is messy.
    isolate1 = {
      'conditions': [
        ['OS=="linux"', {
          'variables': {
            'command': [
              'foo', 'bar',
            ],
            'isolate_dependency_tracked': [
              'file_linux',
            ],
          },
        }, {
          'variables': {
            'isolate_dependency_tracked': [
              'file_non_linux',
            ],
          },
        }],
        ['OS=="win"', {
          'variables': {
            'command': [
              'foo', 'bar',
            ],
          },
        }],
      ],
    }
    tools.write_json(
        os.path.join(self.tempdir, 'isolate1.isolate'), isolate1, True)
    isolate2 = {
      'conditions': [
        ['OS=="linux" or OS=="mac"', {
          'variables': {
            'command': [
              'zoo',
            ],
            'isolate_dependency_tracked': [
              'other/file',
            ],
          },
        }],
      ],
    }
    tools.write_json(
        os.path.join(self.tempdir, 'isolate2.isolate'), isolate2, True)
    isolate3 = {
      'includes': ['isolate1.isolate', 'isolate2.isolate'],
      'conditions': [
        ['OS=="mac"', {
          'variables': {
            'command': [
              'yo', 'dawg',
            ],
            'isolate_dependency_tracked': [
              'file_mac',
            ],
          },
        }],
      ],
    }

    actual = isolate_format.load_isolate_as_config(self.tempdir, isolate3, None)
    expected = {
      ('linux',): {
        # Last included takes precedence.
        'command': ['zoo'],
        'isolate_dependency_tracked': ['file_linux', 'other/file'],
      },
      ('mac',): {
        # Command in isolate3 takes precedence.
        'command': ['yo', 'dawg'],
        'isolate_dependency_tracked': [
          'file_mac',
          'file_non_linux',
          'other/file',
        ],
      },
      ('win',): {
        'command': ['foo', 'bar'],
        'isolate_dependency_tracked': ['file_non_linux'],
      },
    }
    self.assertEqual(expected, actual.flatten())
コード例 #42
0
ファイル: swarming.py プロジェクト: mellowdistrict/luci-py
def CMDquery(parser, args):
  """Returns raw JSON information via an URL endpoint. Use 'query-list' to
  gather the list of API methods from the server.

  Examples:
    Listing all bots:
      swarming.py query -S server-url.com bots/list

    Listing last 10 tasks on a specific bot named 'swarm1':
      swarming.py query -S server-url.com --limit 10 bot/swarm1/tasks

    Listing last 10 tasks with tags os:Ubuntu-12.04 and pool:Chrome. Note that
    quoting is important!:
      swarming.py query -S server-url.com --limit 10 \\
          'tasks/list?tags=os:Ubuntu-12.04&tags=pool:Chrome'
  """
  CHUNK_SIZE = 250

  parser.add_option(
      '-L', '--limit', type='int', default=200,
      help='Limit to enforce on limitless items (like number of tasks); '
           'default=%default')
  parser.add_option(
      '--json', help='Path to JSON output file (otherwise prints to stdout)')
  parser.add_option(
      '--progress', action='store_true',
      help='Prints a dot at each request to show progress')
  options, args = parser.parse_args(args)
  if len(args) != 1:
    parser.error(
        'Must specify only method name and optionally query args properly '
        'escaped.')
  base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
  url = base_url
  if options.limit:
    # Check check, change if not working out.
    merge_char = '&' if '?' in url else '?'
    url += '%slimit=%d' % (merge_char, min(CHUNK_SIZE, options.limit))
  data = net.url_read_json(url)
  if data is None:
    # TODO(maruel): Do basic diagnostic.
    print >> sys.stderr, 'Failed to access %s' % url
    return 1

  # Some items support cursors. Try to get automatically if cursors are needed
  # by looking at the 'cursor' items.
  while (
      data.get('cursor') and
      (not options.limit or len(data['items']) < options.limit)):
    merge_char = '&' if '?' in base_url else '?'
    url = base_url + '%scursor=%s' % (merge_char, urllib.quote(data['cursor']))
    if options.limit:
      url += '&limit=%d' % min(CHUNK_SIZE, options.limit - len(data['items']))
    if options.progress:
      sys.stdout.write('.')
      sys.stdout.flush()
    new = net.url_read_json(url)
    if new is None:
      if options.progress:
        print('')
      print >> sys.stderr, 'Failed to access %s' % options.swarming
      return 1
    data['items'].extend(new.get('items', []))
    data['cursor'] = new.get('cursor')

  if options.progress:
    print('')
  if options.limit and len(data.get('items', [])) > options.limit:
    data['items'] = data['items'][:options.limit]
  data.pop('cursor', None)

  if options.json:
    options.json = unicode(os.path.abspath(options.json))
    tools.write_json(options.json, data, True)
  else:
    try:
      tools.write_json(sys.stdout, data, False)
      sys.stdout.write('\n')
    except IOError:
      pass
  return 0
コード例 #43
0
  def test_load_with_includes(self):
    included_isolate = {
      'variables': {
        'isolate_dependency_tracked': [
          'file_common',
        ],
      },
      'conditions': [
        ['OS=="linux"', {
          'variables': {
            'isolate_dependency_tracked': [
              'file_linux',
            ],
            'read_only': 1,
          },
        }, {
          'variables': {
            'isolate_dependency_tracked': [
              'file_non_linux',
            ],
            'read_only': 0,
          },
        }],
      ],
    }
    tools.write_json(
        os.path.join(self.tempdir, 'included.isolate'), included_isolate, True)
    values = {
      'includes': ['included.isolate'],
      'variables': {
        'isolate_dependency_tracked': [
          'file_less_common',
        ],
      },
      'conditions': [
        ['OS=="mac"', {
          'variables': {
            'isolate_dependency_tracked': [
              'file_mac',
            ],
            'read_only': 2,
          },
        }],
      ],
    }
    actual = isolate_format.load_isolate_as_config(self.tempdir, values, None)

    expected = {
      ('linux',): {
        'isolate_dependency_tracked': [
          'file_common',
          'file_less_common',
          'file_linux',
        ],
        'read_only': 1,
      },
      ('mac',): {
        'isolate_dependency_tracked': [
          'file_common',
          'file_less_common',
          'file_mac',
          'file_non_linux',
        ],
        'read_only': 2,
      },
      ('win',): {
        'isolate_dependency_tracked': [
          'file_common',
          'file_less_common',
          'file_non_linux',
        ],
        'read_only': 0,
      },
    }
    self.assertEqual(expected, actual.flatten())