Beispiel #1
0
 def testTimeoutAndDelays(self):
   """Test retry scenario in which the RetryPolicy timeout is exceeded."""
   retry_policy = retry.RetryPolicy(timeout=.6, min_delay=.05, max_delay=.2, check_result=lambda res, err: True)
   retry.CallWithRetryAsync(retry_policy, self._AsyncFunc, callback=self._OnCompleted)
   self.wait()
   self.assertLogMatches('Retrying.*Retrying.*Retrying',
                         'Expected at least 3 retries in the log')
Beispiel #2
0
 def testMaxTries(self):
   """Test retry scenario in which the RetryPolicy max_tries is exceeded."""
   retry_policy = retry.RetryPolicy(max_tries=10, check_result=lambda res, err: True)
   retry.CallWithRetryAsync(retry_policy, self._AsyncFunc, callback=self._OnCompleted)
   self.wait()
   self.assertLogMatches('Retrying.*Retrying.*Retrying.*Retrying.*Retrying.*Retrying.*Retrying.*Retrying.*Retrying',
                         'Expected 9 retries in the log')
Beispiel #3
0
 def testWithStackContext2(self):
   """Ensure Retry doesn't interfere with asynchronous function that throws immediately."""
   try:
     with stack_context.ExceptionStackContext(self._OnError):
       retry.CallWithRetryAsync(retry.RetryPolicy(), self._AsyncFuncRaisesError,
                                callback=self._OnCompleted)
     self.assert_(False, 'Expected exception to be raised')
   except:
     self.wait()
Beispiel #4
0
  def testRetryWithException2(self):
    """Retry on exceptions raised by async function after stack transfer."""
    def CallAfterStackTransfer(dict, callback):
      func = functools.partial(self._AsyncFuncRaisesErrorOnce, dict, callback)
      self.io_loop.add_callback(func)

    retry_policy = retry.RetryPolicy(max_tries=3, check_exception=lambda typ, val, tb: True)
    retry.CallWithRetryAsync(retry_policy, CallAfterStackTransfer, dict(), callback=self.stop)
    self.wait()
Beispiel #5
0
  def testRetryPolicyApi(self):
    """Test RetryPolicy __init__ API."""
    self.assertRaises(OverflowError, functools.partial(retry.RetryPolicy, timeout=1234123412341234))

    retry.RetryPolicy(timeout=timedelta(milliseconds=500))
    self.assertEqual(retry.RetryPolicy(timeout=10).timeout.total_seconds(), 10)
    self.assertEqual(retry.RetryPolicy(timeout= -1.5).timeout.total_seconds(), -1.5)

    retry.RetryPolicy(min_delay=timedelta(days=500))
    self.assertEqual(retry.RetryPolicy(min_delay=10).min_delay.total_seconds(), 10)
    self.assertEqual(retry.RetryPolicy(min_delay= -1.5).min_delay.total_seconds(), -1.5)

    retry.RetryPolicy(max_delay=timedelta(hours=500))
    self.assertEqual(retry.RetryPolicy(max_delay=10).max_delay.total_seconds(), 10)
    self.assertEqual(retry.RetryPolicy(max_delay= -1.5).max_delay.total_seconds(), -1.5)
Beispiel #6
0
 def testWithStackContext3(self):
   """Ensure Retry doesn't interfere with asynchronous callback that throws."""
   try:
     with stack_context.ExceptionStackContext(self._OnError):
       retry.CallWithRetryAsync(retry.RetryPolicy(check_exception=lambda typ, val, tb: True), self._AsyncFunc,
                                callback=self._OnCompletedRaisesError)
     self.wait()
     self.assert_(False, 'Expected exception to be raised')
   except Exception as e:
     self.assert_('_OnCompletedRaisesError' in e.message, e)
Beispiel #7
0
  def testWithStackContext1(self):
    """Ensure Retry preserves StackContext."""
    self.__in_context = False

    @contextlib.contextmanager
    def _MyContext():
      try:
        self.__in_context = True
        yield
      finally:
        self.__in_context = False

    def _OnCompletedCheckContext(result, error):
      self.assertTrue(self.__in_context)
      self.stop()

    with stack_context.StackContext(_MyContext):
      retry_policy = retry.RetryPolicy(max_tries=2, check_result=lambda res, err: err)
      retry.CallWithRetryAsync(retry_policy, self._AsyncFuncFailOnce, callback=_OnCompletedCheckContext)
    self.wait()
Beispiel #8
0
"""

__author__ = '[email protected] (Marc Berhault)'

import logging
import os
import tempfile

from tornado import gen
from viewfinder.backend.base import retry
from viewfinder.backend.storage import file_object_store, s3_object_store

# Retry policy for uploading files to S3 (merge logs and registry).
kS3UploadRetryPolicy = retry.RetryPolicy(max_tries=5, timeout=300,
                                         min_delay=1, max_delay=30,
                                         check_exception=retry.RetryPolicy.AlwaysRetryOnException)
class LocalLogMerge(object):
  """Class used to build a single merged log file locally."""

  def __init__(self, logs_store, id_list, s3_base):
    self._logs_store = logs_store
    self._s3_filename = os.path.join(s3_base, *id_list)
    fd, self._working_filename = tempfile.mkstemp(suffix='.' + '.'.join(id_list))
    self._output = os.fdopen(fd, 'w')
    self._buffer = []
    self._needs_separator = False

  @gen.engine
  def FetchExistingFromS3(self, callback):
    """If S3 already has a file for this day/instance, fetch it and write its contents to the
Beispiel #9
0
 def testWithBarrier(self):
   """Ensure Retry doesn't interfere with barriers."""
   retry_policy = retry.RetryPolicy(max_tries=2, check_result=lambda res, err: err)
   with util.MonoBarrier(self._OnCompleted) as b:
     retry.CallWithRetryAsync(retry_policy, self._AsyncFuncFailOnce, callback=b.Callback())
   self.wait()
Beispiel #10
0
 def CallWithRetry():
   retry_policy = retry.RetryPolicy(max_tries=3, check_exception=lambda typ, val, tb: True)
   retry.CallWithRetryAsync(retry_policy, self._AsyncFuncRaisesErrorOnce, dict(), callback=self.stop)
Beispiel #11
0
class Version(object):
    """The version base class. Provides a guarantee that rank order is
  correct (that careless code updating doesn't confuse the ordering of
  versions).

  When authoring a version migrator, do not assume that the version
  is correct. There are cases where it can "get behind". So, for
  example, while the version on the row = 5, the actual row data
  corresponds to version 8. However, the reverse *cannot* happen;
  if the version = 8, then the actual row data cannot correspond
  to version 5. So if a row version shows the row is at the latest
  version, then there is no need to migrate.
  """
    _DELETED_MIGRATOR_COUNT = 16
    """If older migrators are deleted, update this."""

    _version_classes = set()
    _rank_ordering = []
    _mutate_items = True
    _allow_s3_queries = True

    _migrate_retry_policy = retry.RetryPolicy(
        max_tries=5, min_delay=1, check_exception=lambda type, value, tb: True)
    """Retry migration of an item up to 5 times before giving up."""
    def __init__(self):
        self.rank = len(
            Version._rank_ordering) + 1 + Version._DELETED_MIGRATOR_COUNT
        if Version._rank_ordering:
            assert self.rank > Version._rank_ordering[-1], \
                'rank is out of order (! %d > %d)' % (self.rank, Version._rank_ordering[-1])
        assert self.__class__ not in Version._version_classes, \
            'class %s has already been added to version set' % self.__class__.__name__
        Version._rank_ordering.append(self.rank)
        Version._version_classes.add(self.__class__)

    @classmethod
    def SetMutateItems(cls, mutate):
        """Set to affect mutations on the database. If False, the planned
    modifications to each item are verbosely logged but not persisted.
    """
        Version._mutate_items = mutate

    @classmethod
    def SetAllowS3Queries(cls, allow):
        """Allow S3 queries. If False, upgrades that involve querying S3 will
    skip it, but may perform other work.
    eg: CreateMD5Hashes and FillFileSizes both use S3 queries as a fallback
    when the desired fields are not found the Photo.client_data.
    """
        Version._allow_s3_queries = allow

    @classmethod
    def GetCurrentVersion(cls):
        """Returns the maximum version. New objects have item._version set
    to this value.
    """
        if Version._rank_ordering:
            return Version._rank_ordering[-1]
        else:
            return 0

    @classmethod
    def MaybeMigrate(cls, client, original_item, versions, callback):
        """Migrates the data in one table row ('item') by advancing
    'item's version via successive data migrations. If 'item' does not
    have a version yet, all data migrations are applied. If item's
    version is current, does nothing. Return the migrated object if
    mutations are enabled, or the original object if not. Take care
    if the migration changes the primary key; the caller might fetch
    an object using one primary key, but get back an object with a
    different migrated primary key!
    """
        def _Migrate(start_rank, mutate_item):
            last_rank = 0
            for version in versions:
                if version.rank < start_rank:
                    last_rank = version.rank
                    continue
                assert version.rank > last_rank, \
                    'tags listed out of order (! %d > %d)' % (version.rank, last_rank)
                last_rank = version.rank
                item_version = mutate_item._version or 0
                if item_version < version.rank:
                    logging.debug('upgrading item from %s to version %s' %
                                  (type(mutate_item)._table.name,
                                   version.__class__.__name__))
                    mutate_item._version = version.rank

                    # If Transform fails, retry several times before giving up.
                    transform_callback = partial(_Migrate, last_rank + 1)
                    retry.CallWithRetryAsync(Version._migrate_retry_policy,
                                             version.Transform,
                                             client,
                                             mutate_item,
                                             callback=transform_callback)
                    return

            callback(mutate_item if Version._mutate_items else original_item)

        _Migrate(
            0,
            original_item if Version._mutate_items else original_item._Clone())

    def _LogUpdate(self, item):
        """Log the changes to the object."""
        mods = [
            '%s => %r' % (n, getattr(item, n)) for n in item.GetColNames()
            if item._IsModified(n)
        ]
        if mods:
            logging.info(
                '%s (%r): %s' %
                (type(item)._table.name, item.GetKey(), ', '.join(mods)))

    def Transform(self, client, item, callback):
        """Implement in each subclass to effect the required data migration.
    'callback' should be invoked on completion with the update object.
    If no async processing is required, it should be invoked directly.
    """
        raise NotImplementedError()