Exemplo n.º 1
0
def upload_hash_content_to_blobstore(generate_upload_url, data, hash_key, content):
    """Uploads the given hash contents directly to the blobsotre via a generated
  url.

  Arguments:
    generate_upload_url: The url to get the new upload url from.
    data: extra POST data.
    hash_key: sha1 of the uncompressed version of content.
    content: The contents to upload. Must fit in memory for now.
  """
    logging.debug("Generating url to directly upload file to blobstore")
    assert isinstance(hash_key, str), hash_key
    assert isinstance(content, str), (hash_key, content)
    # TODO(maruel): Support large files. This would require streaming support.
    content_type, body = encode_multipart_formdata(data, [("content", hash_key, content)])
    for attempt in xrange(run_isolated.URL_OPEN_MAX_ATTEMPTS):
        # Retry HTTP 50x here.
        response = run_isolated.url_open(generate_upload_url, data=data)
        if not response:
            raise run_isolated.MappingError("Unable to connect to server %s" % generate_upload_url)
        upload_url = response.read()

        # Do not retry this request on HTTP 50x. Regenerate an upload url each time
        # since uploading "consumes" the upload url.
        result = run_isolated.url_open(upload_url, data=body, content_type=content_type, retry_50x=False)
        if result:
            return result.read()
        if attempt != run_isolated.URL_OPEN_MAX_ATTEMPTS - 1:
            run_isolated.HttpService.sleep_before_retry(attempt, None)
    raise run_isolated.MappingError("Unable to connect to server %s" % generate_upload_url)
Exemplo n.º 2
0
def upload_hash_content_to_blobstore(generate_upload_url, data, hash_key,
                                     content):
    """Uploads the given hash contents directly to the blobsotre via a generated
  url.

  Arguments:
    generate_upload_url: The url to get the new upload url from.
    data: extra POST data.
    hash_key: sha1 of the uncompressed version of content.
    content: The contents to upload. Must fit in memory for now.
  """
    logging.debug('Generating url to directly upload file to blobstore')
    assert isinstance(hash_key, str), hash_key
    assert isinstance(content, str), (hash_key, content)
    # TODO(maruel): Support large files. This would require streaming support.
    content_type, body = encode_multipart_formdata(
        data, [('content', hash_key, content)])
    for _ in range(run_isolated.MAX_URL_OPEN_ATTEMPTS):
        # Retry HTTP 50x here.
        response = run_isolated.url_open(generate_upload_url, data=data)
        if not response:
            raise run_isolated.MappingError('Unable to connect to server %s' %
                                            generate_upload_url)
        upload_url = response.read()

        # Do not retry this request on HTTP 50x. Regenerate an upload url each time
        # since uploading "consumes" the upload url.
        result = run_isolated.url_open(upload_url,
                                       data=body,
                                       content_type=content_type,
                                       retry_50x=False)
        if result:
            return result.read()
    raise run_isolated.MappingError('Unable to connect to server %s' %
                                    generate_upload_url)
Exemplo n.º 3
0
def ProcessManifest(file_sha1, test_name, shards, test_filter, options):
  """Process the manifest file and send off the swarm test request."""
  manifest = Manifest(file_sha1, test_name, shards, test_filter, options)

  # Zip up relevent files
  print "Zipping up files..."
  if not manifest.zip():
    return 1

  # Send test requests off to swarm.
  print 'Sending test requests to swarm'
  test_url = options.swarm_url.rstrip('/') + '/test'
  manifest_text = manifest.to_json()
  result = run_isolated.url_open(test_url, data={'request': manifest_text})
  if not result:
    print >> sys.stderr, 'Failed to send test for %s\n%s' % (
        test_name, test_url)
    return 1
  try:
    json.load(result)
  except (ValueError, TypeError) as e:
    print >> sys.stderr, 'Failed to send test for %s' % test_name
    print >> sys.stderr, 'Manifest: %s' % manifest_text
    print >> sys.stderr, str(e)
    return 1
  return 0
Exemplo n.º 4
0
def ProcessManifest(file_sha1, test_name, shards, test_filter, options):
    """Process the manifest file and send off the swarm test request."""
    try:
        manifest = Manifest(file_sha1, test_name, shards, test_filter, options)
    except ValueError as e:
        print >> sys.stderr, 'Unable to process %s: %s' % (test_name, e)
        return 1

    # Zip up relevent files
    print "Zipping up files..."
    if not manifest.zip():
        return 1

    # Send test requests off to swarm.
    print('Sending test requests to swarm.')
    print('Server: %s' % options.swarm_url)
    print('Job name: %s' % test_name)
    test_url = options.swarm_url.rstrip('/') + '/test'
    manifest_text = manifest.to_json()
    result = run_isolated.url_open(test_url, data={'request': manifest_text})
    if not result:
        print >> sys.stderr, 'Failed to send test for %s\n%s' % (test_name,
                                                                 test_url)
        return 1
    try:
        json.load(result)
    except (ValueError, TypeError) as e:
        print >> sys.stderr, 'Failed to send test for %s' % test_name
        print >> sys.stderr, 'Manifest: %s' % manifest_text
        print >> sys.stderr, str(e)
        return 1
    return 0
Exemplo n.º 5
0
def retrieve_results(base_url, test_key, timeout, should_stop):
    """Retrieves results for a single test_key."""
    assert isinstance(timeout, float)
    params = [('r', test_key)]
    result_url = '%s/get_result?%s' % (base_url, urllib.urlencode(params))
    start = now()
    while True:
        if timeout and (now() - start) >= timeout:
            logging.warning('retrieve_results(%s) timed out', base_url)
            return {}
        # Do retries ourselve.
        response = run_isolated.url_open(result_url,
                                         retry_404=False,
                                         retry_50x=False)
        if response is None:
            # Aggressively poll for results. Do not use retry_404 so
            # should_stop is polled more often.
            remaining = min(5, timeout - (now() - start)) if timeout else 5
            if remaining > 0:
                run_isolated.HttpService.sleep_before_retry(1, remaining)
        else:
            try:
                data = json.load(response) or {}
            except (ValueError, TypeError):
                logging.warning(
                    'Received corrupted data for test_key %s. Retrying.',
                    test_key)
            else:
                if data['output']:
                    return data
        if should_stop.get():
            return {}
Exemplo n.º 6
0
def get_test_keys(swarm_base_url, test_name, timeout):
  """Returns the Swarm test key for each shards of test_name."""
  assert isinstance(timeout, float)
  key_data = urllib.urlencode([('name', test_name)])
  url = '%s/get_matching_test_cases?%s' % (swarm_base_url, key_data)

  for i in range(run_isolated.URL_OPEN_MAX_ATTEMPTS):
    response = run_isolated.url_open(url, retry_404=True, timeout=timeout)
    if response is None:
      raise Failure(
          'Error: Unable to find any tests with the name, %s, on swarm server'
          % test_name)

    result = response.read()
    # TODO(maruel): Compare exact string.
    if 'No matching' in result:
      logging.warning('Unable to find any tests with the name, %s, on swarm '
                      'server' % test_name)
      if i != run_isolated.URL_OPEN_MAX_ATTEMPTS:
        run_isolated.HttpService.sleep_before_retry(i, None)
      continue
    return json.loads(result)

  raise Failure(
      'Error: Unable to find any tests with the name, %s, on swarm server'
      % test_name)
Exemplo n.º 7
0
def url_open(url, **kwargs):
    result = run_isolated.url_open(url, **kwargs)
    if not result:
        # If we get no response from the server, assume it is down and raise an
        # exception.
        raise run_isolated.MappingError("Unable to connect to server %s" % url)
    return result
Exemplo n.º 8
0
def get_test_keys(swarm_base_url, test_name, timeout):
    """Returns the Swarm test key for each shards of test_name."""
    assert isinstance(timeout, float)
    key_data = urllib.urlencode([('name', test_name)])
    url = '%s/get_matching_test_cases?%s' % (swarm_base_url, key_data)

    for i in range(run_isolated.URL_OPEN_MAX_ATTEMPTS):
        response = run_isolated.url_open(url, retry_404=True, timeout=timeout)
        if response is None:
            raise Failure(
                'Error: Unable to find any tests with the name, %s, on swarm server'
                % test_name)

        result = response.read()
        # TODO(maruel): Compare exact string.
        if 'No matching' in result:
            logging.warning(
                'Unable to find any tests with the name, %s, on swarm '
                'server' % test_name)
            if i != run_isolated.URL_OPEN_MAX_ATTEMPTS:
                run_isolated.HttpService.sleep_before_retry(i, None)
            continue
        return json.loads(result)

    raise Failure(
        'Error: Unable to find any tests with the name, %s, on swarm server' %
        test_name)
Exemplo n.º 9
0
def retrieve_results(base_url, test_key, timeout, should_stop):
  """Retrieves results for a single test_key."""
  assert isinstance(timeout, float)
  params = [('r', test_key)]
  result_url = '%s/get_result?%s' % (base_url, urllib.urlencode(params))
  start = now()
  while True:
    if timeout and (now() - start) >= timeout:
      logging.warning('retrieve_results(%s) timed out', base_url)
      return {}
    # Do retries ourselve.
    response = run_isolated.url_open(
        result_url, retry_404=False, retry_50x=False)
    if response is None:
      # Aggressively poll for results. Do not use retry_404 so
      # should_stop is polled more often.
      remaining = min(5, timeout - (now() - start)) if timeout else 5
      if remaining > 0:
        run_isolated.HttpService.sleep_before_retry(1, remaining)
    else:
      try:
        data = json.load(response) or {}
      except (ValueError, TypeError):
        logging.warning(
            'Received corrupted data for test_key %s. Retrying.', test_key)
      else:
        if data['output']:
          return data
    if should_stop.get():
      return {}
Exemplo n.º 10
0
def url_open(url, **kwargs):
    result = run_isolated.url_open(url, **kwargs)
    if not result:
        # If we get no response from the server, assume it is down and raise an
        # exception.
        raise run_isolated.MappingError('Unable to connect to server %s' % url)
    return result
  def zip_and_upload(self):
    """Zips up all the files necessary to run a shard and uploads to Swarming
    master.
    """
    assert not self._zip_file_hash
    start_time = time.time()

    zip_memory_file = StringIO.StringIO()
    zip_file = zipfile.ZipFile(zip_memory_file, 'w')

    for source, relpath in self._files.iteritems():
      zip_file.write(source, relpath)

    zip_file.close()
    print 'Zipping completed, time elapsed: %f' % (time.time() - start_time)

    zip_memory_file.flush()
    zip_contents = zip_memory_file.getvalue()
    zip_memory_file.close()

    self._zip_file_hash = hashlib.sha1(zip_contents).hexdigest()

    response = run_isolated.url_open(
        self._data_server_has + '?token=%s' % self._token(),
        data=self._zip_file_hash,
        content_type='application/octet-stream')
    if response is None:
      print >> sys.stderr, (
          'Unable to query server for zip file presence, aborting.')
      return False

    if response.read(1) == chr(1):
      print 'Zip file already on server, no need to reupload.'
      return True

    print 'Zip file not on server, starting uploading.'

    url = '%s%s?priority=0&token=%s' % (
        self._data_server_storage, self._zip_file_hash, self._token())
    response = run_isolated.url_open(
        url, data=zip_contents, content_type='application/octet-stream')
    if response is None:
      print >> sys.stderr, 'Failed to upload the zip file: %s' % url
      return False

    return True
 def _token(self):
   if not self._token_cache:
     result = run_isolated.url_open(self._data_server_get_token)
     if not result:
       # TODO(maruel): Implement authentication.
       raise Failure('Failed to get token, need authentication')
     # Quote it right away, so creating the urls is simpler.
     self._token_cache = urllib.quote(result.read())
   return self._token_cache
Exemplo n.º 13
0
 def token(self):
   if not self._token:
     result = run_isolated.url_open(self.base_url + '/content/get_token')
     if not result:
       # TODO(maruel): Implement authentication.
       raise Failure('Failed to get token, need authentication')
     # Quote it right away, so creating the urls is simpler.
     self._token = urllib.quote(result.read())
   return self._token
Exemplo n.º 14
0
    def zip(self):
        """Zip up all the files necessary to run a shard."""
        start_time = time.time()

        zip_memory_file = StringIO.StringIO()
        zip_file = zipfile.ZipFile(zip_memory_file, 'w')

        zip_file.write(RUN_TEST_PATH, RUN_TEST_NAME)
        zip_file.write(CLEANUP_SCRIPT_PATH, CLEANUP_SCRIPT_NAME)

        zip_file.close()
        print 'Zipping completed, time elapsed: %f' % (time.time() -
                                                       start_time)

        zip_memory_file.flush()
        zip_contents = zip_memory_file.getvalue()
        zip_memory_file.close()

        self.zip_file_hash = hashlib.sha1(zip_contents).hexdigest()

        response = run_isolated.url_open(
            self.data_server_has + '?token=%s' % self.token(),
            data=self.zip_file_hash,
            content_type='application/octet-stream')
        if response is None:
            print >> sys.stderr, (
                'Unable to query server for zip file presence, aborting.')
            return False

        if response.read(1) == chr(1):
            print 'Zip file already on server, no need to reupload.'
            return True

        print 'Zip file not on server, starting uploading.'

        url = '%s%s?priority=0&token=%s' % (self.data_server_storage,
                                            self.zip_file_hash, self.token())
        response = run_isolated.url_open(
            url, data=zip_contents, content_type='application/octet-stream')
        if response is None:
            print >> sys.stderr, 'Failed to upload the zip file: %s' % url
            return False

        return True
Exemplo n.º 15
0
  def zip(self):
    """Zip up all the files necessary to run a shard."""
    start_time = time.time()

    zip_memory_file = StringIO.StringIO()
    zip_file = zipfile.ZipFile(zip_memory_file, 'w')

    zip_file.write(RUN_TEST_PATH, RUN_TEST_NAME)
    zip_file.write(CLEANUP_SCRIPT_PATH, CLEANUP_SCRIPT_NAME)

    zip_file.close()
    print 'Zipping completed, time elapsed: %f' % (time.time() - start_time)

    zip_memory_file.flush()
    zip_contents = zip_memory_file.getvalue()
    zip_memory_file.close()

    self.zip_file_hash = hashlib.sha1(zip_contents).hexdigest()

    response = run_isolated.url_open(
        self.data_server_has + '?token=%s' % self.token(),
        data=self.zip_file_hash,
        content_type='application/octet-stream')
    if response is None:
      print >> sys.stderr, (
          'Unable to query server for zip file presence, aborting.')
      return False

    if response.read(1) == chr(1):
      print 'Zip file already on server, no need to reupload.'
      return True

    print 'Zip file not on server, starting uploading.'

    url = '%s%s?priority=0&token=%s' % (
        self.data_server_storage, self.zip_file_hash, self.token())
    response = run_isolated.url_open(
        url, data=zip_contents, content_type='application/octet-stream')
    if response is None:
      print >> sys.stderr, 'Failed to upload the zip file: %s' % url
      return False

    return True
Exemplo n.º 16
0
def get_test_keys(swarm_base_url, test_name):
  key_data = urllib.urlencode([('name', test_name)])
  test_keys_url = '%s/get_matching_test_cases?%s' % (swarm_base_url, key_data)
  result = run_isolated.url_open(test_keys_url)
  if result is None:
    return []

  result_str = result.read()
  if 'No matching' in result_str:
    print ('Error: Unable to find any tests with the name, %s, on swarm server'
           % test_name)
    return []

  # TODO(csharp): return in a proper format (like json)
  return result_str.split()
Exemplo n.º 17
0
def get_test_keys(swarm_base_url, test_name):
    key_data = urllib.urlencode([('name', test_name)])
    test_keys_url = '%s/get_matching_test_cases?%s' % (swarm_base_url,
                                                       key_data)
    result = run_isolated.url_open(test_keys_url)
    if result is None:
        return []

    result_str = result.read()
    if 'No matching' in result_str:
        print(
            'Error: Unable to find any tests with the name, %s, on swarm server'
            % test_name)
        return []

    # TODO(csharp): return in a proper format (like json)
    return result_str.split()
Exemplo n.º 18
0
def swarm_get_results(swarm_base_url, test_keys, wait):
    """Retrieves the given swarm test results from the swarm server and print it
  to stdout.
  """
    outputs = []
    for test in test_keys:
        result_url = '%s/get_result?r=%s' % (swarm_base_url, test)
        while True:
            result = run_isolated.url_open(result_url)
            if result is None:
                continue
            data = json.load(result)
            if data['output']:
                outputs.append(data)
                break
            if not wait:
                break
    return outputs
Exemplo n.º 19
0
def swarm_get_results(swarm_base_url, test_keys, wait):
  """Retrieves the given swarm test results from the swarm server and print it
  to stdout.
  """
  outputs = []
  for test in test_keys:
    result_url = '%s/get_result?r=%s' % (swarm_base_url, test)
    while True:
      result = run_isolated.url_open(result_url)
      if result is None:
        continue
      data = json.load(result)
      if data['output']:
        outputs.append(data)
        break
      if not wait:
        break
  return outputs
def process_manifest(
    file_sha1, test_name, shards, test_filter, os_image, working_dir,
    data_server, swarm_url, verbose, profile, priority):
  """Process the manifest file and send off the swarm test request."""
  try:
    manifest = Manifest(
        file_sha1, test_name, shards, test_filter, os_image, working_dir,
        data_server, verbose, profile, priority)
  except ValueError as e:
    print >> sys.stderr, 'Unable to process %s: %s' % (test_name, e)
    return 1

  chromium_setup(manifest)

  # Zip up relevent files
  print "Zipping up files..."
  if not manifest.zip_and_upload():
    return 1

  # Send test requests off to swarm.
  print('Sending test requests to swarm.')
  print('Server: %s' % swarm_url)
  print('Job name: %s' % test_name)
  test_url = swarm_url.rstrip('/') + '/test'
  manifest_text = manifest.to_json()
  result = run_isolated.url_open(test_url, data={'request': manifest_text})
  if not result:
    print >> sys.stderr, 'Failed to send test for %s\n%s' % (
        test_name, test_url)
    return 1
  try:
    json.load(result)
  except (ValueError, TypeError) as e:
    print >> sys.stderr, 'Failed to send test for %s' % test_name
    print >> sys.stderr, 'Manifest: %s' % manifest_text
    print >> sys.stderr, str(e)
    return 1
  return 0
 def call(self, mode, sleep_duration, **kwargs):
   url = self.server.url + '/%s/%f' % (mode, sleep_duration)
   kwargs['max_attempts'] = 2
   return run_isolated.url_open(url, **kwargs)