示例#1
0
def get_build_urls_list(bucket_path, reverse=True):
  """Returns a sorted list of build urls from a bucket path."""
  if not bucket_path:
    return []

  base_url = os.path.dirname(bucket_path)
  if environment.is_running_on_app_engine():
    build_urls = list(storage.list_blobs(base_url))
  else:
    keys_directory = environment.get_value('BUILD_URLS_DIR')
    keys_filename = '%s.list' % utils.string_hash(bucket_path)
    keys_file_path = os.path.join(keys_directory, keys_filename)

    # For one task, keys file that is cached locally should be re-used.
    # Otherwise, we do waste lot of network bandwidth calling and getting the
    # same set of urls (esp for regression and progression testing).
    if not os.path.exists(keys_file_path):
      # Get url list by reading the GCS bucket.
      with open(keys_file_path, 'w') as f:
        for path in storage.list_blobs(base_url):
          f.write(path + '\n')

    content = utils.read_data_from_file(keys_file_path, eval_data=False)
    if not content:
      return []

    build_urls = content.splitlines()

  return _sort_build_urls_by_revision(build_urls, bucket_path, reverse)
示例#2
0
def get_stacktrace(testcase, stack_attribute='crash_stacktrace'):
  """Returns the stacktrace for a test case.

  This may require a blobstore read.
  """
  result = getattr(testcase, stack_attribute)
  if not result or not result.startswith(data_types.BLOBSTORE_STACK_PREFIX):
    return result

  # For App Engine, we can't write to local file, so use blobs.read_key instead.
  if environment.is_running_on_app_engine():
    key = result[len(data_types.BLOBSTORE_STACK_PREFIX):]
    return unicode(blobs.read_key(key), errors='replace')

  key = result[len(data_types.BLOBSTORE_STACK_PREFIX):]
  tmpdir = environment.get_value('BOT_TMPDIR')
  tmp_stacktrace_file = os.path.join(tmpdir, 'stacktrace.tmp')
  blobs.read_blob_to_disk(key, tmp_stacktrace_file)

  try:
    handle = open(tmp_stacktrace_file)
    result = handle.read()
    handle.close()
  except:
    logs.log_error(
        'Unable to read stacktrace for testcase %d.' % testcase.key.id())
    result = ''

  shell.remove_file(tmp_stacktrace_file)
  return result
示例#3
0
def _check_commits(testcase, bisect_type, old_commit, new_commit):
    """Check old and new commit validity."""
    if old_commit != new_commit or build_manager.is_custom_binary():
        return old_commit, new_commit

    # Something went wrong during bisection for the same commit to be chosen for
    # both the start and end range.
    # Get the bisection infrastructure to re-bisect.
    if environment.is_running_on_app_engine():
        bucket_path = data_handler.get_value_from_job_definition(
            testcase.job_type, 'RELEASE_BUILD_BUCKET_PATH')
    else:
        bucket_path = build_manager.get_primary_bucket_path()
    revision_list = build_manager.get_revisions_list(bucket_path)

    last_tested_revision = testcase.get_metadata('last_tested_crash_revision')
    known_crash_revision = last_tested_revision or testcase.crash_revision

    if bisect_type == 'fixed':
        # Narrowest range: last crashing revision up to the latest build.
        return _get_commits(
            str(known_crash_revision) + ':' + str(revision_list[-1]),
            testcase.job_type)

    if bisect_type == 'regressed':
        # Narrowest range: first build to the first crashing revision.
        return _get_commits(
            str(revision_list[0]) + ':' + str(testcase.crash_revision),
            testcase.job_type)

    raise ValueError('Invalid bisection type: ' + bisect_type)
示例#4
0
def read_blob_to_disk(blob_key, local_file):
    """Copy data stored in the blobstore to a local file."""
    assert not environment.is_running_on_app_engine()

    directory = os.path.dirname(local_file)
    if not os.path.exists(directory):
        os.makedirs(directory)

    gcs_path = get_gcs_path(blob_key)
    return storage.copy_file_from(gcs_path, local_file)
示例#5
0
    def decorator(func):
        """Decorates the given function."""
        if environment.is_running_on_app_engine():
            # multiprocessing doesn't work on App Engine.
            return func

        @functools.wraps(func)
        def _wrapper(*args, **kwargs):
            """Wrapper."""
            # FIXME: Weird exceptions in imports, might be something relating to our
            # reload module. Needs furthur investigation, try this as a temporary fix.
            import multiprocessing.pool
            import threading

            # Fix for Python < 2.7.2.
            if not hasattr(threading.current_thread(), '_children'):
                # pylint: disable=protected-access
                threading.current_thread(
                )._children = weakref.WeakKeyDictionary()

            global THREAD_POOL
            if THREAD_POOL is None:
                THREAD_POOL = multiprocessing.pool.ThreadPool(processes=3)

            try:
                async_result = THREAD_POOL.apply_async(func,
                                                       args=args,
                                                       kwds=kwargs)
                return async_result.get(timeout=duration)
            except multiprocessing.TimeoutError:
                # Sleep for some minutes in order to wait for flushing metrics.
                time.sleep(120)

                # If we don't exit here, we will cause threads to pile up and leading to
                # out-of-memory. Safe to just exit here.
                logs.log_fatal_and_exit((
                    'Exception occurred in function {0}: args: {1}, kwargs: {2}'
                    ' exception: {3}').format(func, args, kwargs,
                                              sys.exc_info()[1]))

        return _wrapper
示例#6
0
  def _chunk_size(self):
    if environment.is_running_on_app_engine():
      # To match App Engine URLFetch's request size limit.
      return 10 * 1024 * 1024  # 10 MiB.

    return None
示例#7
0
文件: ndb.py 项目: zzdxxd/clusterfuzz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NDB importer."""

from system import environment
if (not environment.is_running_on_app_engine()
        and not environment.get_value('PY_UNITTESTS')):
    # Override the default google-cloud-datastore retry parameters to include
    # INTERNAL errors.
    # See https://github.com/googleapis/google-cloud-python/issues/6119.
    from google.cloud.datastore_v1.gapic import datastore_client_config
    datastore_config = datastore_client_config.config['interfaces'][
        'google.datastore.v1.Datastore']
    retry_codes = datastore_config['retry_codes']['idempotent']
    if 'INTERNAL' not in retry_codes:
        retry_codes.append('INTERNAL')

    import ndb_patcher
    ndb_patcher.patch_ndb()

from google.appengine.ext.ndb import *  # pylint:disable=wildcard-import