def test_run_process_more_than_1000_entities(self):
        """Tests we can process more entities than the old limit of 1k."""
        counter = counters.PerfCounter(
            'test-run-process-more-than-1000-entities-counter',
            'counter for testing increment by QueryMapper')
        db.put([Model() for _ in xrange(1001)])
        # Also pass custom args to QueryMapper ctor.
        num_processed = utils.QueryMapper(Model.all(),
                                          batch_size=50,
                                          counter=counter,
                                          report_every=0).run(process,
                                                              1,
                                                              string='foo')
        last_written = Model.all().order('-create_date').get()

        self.assertEqual(1001, counter.value)
        self.assertEqual(1001, num_processed)
        self.assertEqual(1, last_written.number)
        self.assertEqual('foo', last_written.string)
Пример #2
0
from google.appengine.ext import db

MODULE_NAME = 'Full Text Search'

DEPRECATED = config.ConfigProperty(
    'gcb_can_index_automatically',
    bool,
    safe_dom.Text(
        'This property has been deprecated; it is retained so that we '
        'will not generate no-such-variable error messages for existing '
        'installations that have this property set.'),
    default_value=False,
    label='Automatically index search',
    deprecated=True)
SEARCH_QUERIES_MADE = counters.PerfCounter(
    'gcb-search-queries-made',
    'The number of student queries made to the search module.')
SEARCH_RESULTS_RETURNED = counters.PerfCounter(
    'gcb-search-results-returned',
    'The number of search results returned across all student queries.')
SEARCH_FAILURES = counters.PerfCounter(
    'gcb-search-failures',
    'The number of search failure messages returned across all student '
    'queries.')

INDEX_NAME = 'gcb_search_index_loc_%s'
RESULTS_LIMIT = 10
GCB_SEARCH_FOLDER_NAME = os.path.normpath('/modules/search/')

MAX_RETRIES = 5
Пример #3
0
from models import entities
from models import entity_transforms
from models import student_work
from models import transforms
from models import utils
import models.review
from modules.dashboard import dashboard
from modules.review import dashboard as review_dashboard
from modules.review import domain
from modules.review import peer
from modules.review import stats
from google.appengine.ext import db

# In-process increment-only performance counters.
COUNTER_ADD_REVIEWER_BAD_SUMMARY_KEY = counters.PerfCounter(
    'gcb-pr-add-reviewer-bad-summary-key',
    'number of times add_reviewer() failed due to a bad review summary key')
COUNTER_ADD_REVIEWER_SET_ASSIGNER_KIND_HUMAN = counters.PerfCounter(
    'gcb-pr-add-reviewer-set-assigner-kind-human',
    ("number of times add_reviewer() changed an existing step's assigner_kind "
     'to ASSIGNER_KIND_HUMAN'))
COUNTER_ADD_REVIEWER_CREATE_REVIEW_STEP = counters.PerfCounter(
    'gcb-pr-add-reviewer-create-review-step',
    'number of times add_reviewer() created a new review step')
COUNTER_ADD_REVIEWER_EXPIRED_STEP_REASSIGNED = counters.PerfCounter(
    'gcb-pr-add-reviewer-expired-step-reassigned',
    'number of times add_reviewer() reassigned an expired step')
COUNTER_ADD_REVIEWER_FAILED = counters.PerfCounter(
    'gcb-pr-add-reviewer-failed',
    'number of times add_reviewer() had a fatal error')
COUNTER_ADD_REVIEWER_REMOVED_STEP_UNREMOVED = counters.PerfCounter(
Пример #4
0
from models import custom_modules
from models import models
from models import review as models_review
from models import roles
from models import student_work
from models import transforms
from modules.assessments import assessments
from modules.courses import unit_outline
from modules.review import domain
from tools import verify

from google.appengine.ext import db


COURSE_EVENTS_RECEIVED = counters.PerfCounter(
    'gcb-course-events-received',
    'A number of activity/assessment events received by the server.')

COURSE_EVENTS_RECORDED = counters.PerfCounter(
    'gcb-course-events-recorded',
    'A number of activity/assessment events recorded in a datastore.')

UNIT_PAGE_TYPE = 'unit'
ACTIVITY_PAGE_TYPE = 'activity'
ASSESSMENT_PAGE_TYPE = 'assessment'
ASSESSMENT_CONFIRMATION_PAGE_TYPE = 'test_confirmation'

TAGS_THAT_TRIGGER_BLOCK_COMPLETION = ['attempt-activity']
TAGS_THAT_TRIGGER_COMPONENT_COMPLETION = ['tag-assessment']
TAGS_THAT_TRIGGER_HTML_COMPLETION = ['attempt-lesson']
Пример #5
0
models/student_work.py. Entities declared here should not be used by external
clients.
"""

__author__ = [
    '[email protected] (John Cox)',
]

from models import counters
from models import models
from models import student_work
from modules.review import domain
from google.appengine.ext import db

COUNTER_INCREMENT_COUNT_COUNT_AGGREGATE_EXCEEDED_MAX = counters.PerfCounter(
    'gcb-pr-increment-count-count-aggregate-exceeded-max',
    ('number of times increment_count() failed because the new aggregate of '
     'the counts would have exceeded domain.MAX_UNREMOVED_REVIEW_STEPS'))


class ReviewSummary(student_work.BaseEntity):
    """Object that tracks the aggregate state of reviews for a submission."""

    # UTC last modification timestamp.
    change_date = db.DateTimeProperty(auto_now=True, required=True)
    # UTC create date.
    create_date = db.DateTimeProperty(auto_now_add=True, required=True)

    # Strong counters. Callers should never manipulate these directly. Instead,
    # use decrement|increment_count.
    # Count of ReviewStep entities for this submission currently in state
    # STATE_ASSIGNED.
    def build(cls, name, label, desc, max_size_bytes, ttl_sec, dao_class):
        """Build the family of classes for a process-scoped Entity cache.

        Args:
          name: Name under which cache is registered.  This should be in the
              lower_case_and_underscores naming style
          label: Label for the course-level setting enabling/disabling
              process-level caching for this entity type.
          desc: Description to add to the course-level setting
              enabling/disabling process level caching for this entity type.
          max_size_bytes: Largest size the cache may take on.  If adding
              an item to the cache would make it exceed this size, items
              are LRU'd out until the item fits.
          ttl_sec: Number of seconds after which cached entries are
              considered stale and a (lazy) refresh is performed.
          dao_class: The class of an DAO in the Entity/DTO/DAO scheme
              common for Course Builder data access.  Used for itself
              and also for its references to its matching DTO, Entity
              classes.
        Returns:
          A ResourceCacheFactory entry containing the constellation of
          objects that interoperate to form a cche.
        """

        if name in cls._CACHES:
            return cls._CACHES[name]

        config_property = config.ConfigProperty(
            'gcb_can_use_%s_in_process_cache' % name,
            bool,
            desc,
            label=label,
            default_value=True)

        class EntityCache(caching.ProcessScopedSingleton):
            """This class holds in-process global cache of objects."""
            @classmethod
            def get_cache_len(cls):
                # pylint: disable=protected-access
                return len(cls.instance()._cache.items.keys())

            @classmethod
            def get_cache_size(cls):
                # pylint: disable=protected-access
                return cls.instance()._cache.total_size

            def __init__(self):
                self._cache = caching.LRUCache(max_size_bytes=max_size_bytes)
                self._cache.get_entry_size = self._get_entry_size

            def _get_entry_size(self, key, value):
                if not value:
                    return 0
                return sys.getsizeof(key) + sys.getsizeof(value)

            @property
            def cache(self):
                return self._cache

        class CacheEntry(caching.AbstractCacheEntry):
            """Cache entry containing an entity."""
            def __init__(self, entity):
                self.entity = entity
                self.created_on = datetime.datetime.utcnow()

            def getsizeof(self):
                return (dao_class.ENTITY.getsizeof(self.entity) +
                        sys.getsizeof(self.created_on))

            def has_expired(self):
                age = (datetime.datetime.utcnow() -
                       self.created_on).total_seconds()
                return age > ttl_sec

            def is_up_to_date(self, key, update):
                if update and self.entity:
                    return update.updated_on == self.entity.updated_on
                return not update and not self.entity

            def updated_on(self):
                if self.entity:
                    return self.entity.updated_on
                return None

            @classmethod
            def externalize(cls, key, entry):
                entity = entry.entity
                if not entity:
                    return None
                return dao_class.DTO(entity.key().id_or_name(),
                                     transforms.loads(entity.data))

            @classmethod
            def internalize(cls, key, entity):
                return cls(entity)

        class CacheConnection(caching.AbstractCacheConnection):

            PERSISTENT_ENTITY = dao_class.ENTITY
            CACHE_ENTRY = CacheEntry

            @classmethod
            def init_counters(cls):
                caching.AbstractCacheConnection.init_counters()

            @classmethod
            def is_enabled(cls):
                return config_property.value

            def __init__(self, namespace):
                caching.AbstractCacheConnection.__init__(self, namespace)
                self.cache = EntityCache.instance().cache

            def get_updates_when_empty(self):
                """Load in all ResourceBundles when cache is empty."""
                q = self.PERSISTENT_ENTITY.all()
                for entity in caching.iter_all(q):
                    self.put(entity.key().name(), entity)
                    self.CACHE_UPDATE_COUNT.inc()

                # we don't have any updates to apply; all items are new
                return {}

        class ConnectionManager(caching.RequestScopedSingleton):
            """Class that provides access to in-process Entity cache.

            This class only supports get() and does not intercept
            put() or delete() and is unaware of changes to
            Entities made in this very process.  When
            entites change, the changes will be picked up
            when new instance of this class is created. If you are
            watching perfomance counters, you will see EVICT and
            EXPIRE being incremented, but not DELETE or PUT.
            """
            def __init__(self):
                # Keep a separate CacheConnection for each namespace that
                # makes a get() request.
                self._conns = {}

            def _conn(self, ns):
                connected = self._conns.get(ns)
                if not connected:
                    logging.debug(
                        'CONNECTING a CacheConnection for namespace "%s",', ns)
                    connected = CacheConnection.new_connection(ns)
                    self._conns[ns] = connected
                return connected

            @classmethod
            def _ns(cls, app_context):
                if app_context:
                    return app_context.get_namespace_name()
                return namespace_manager.get_namespace()

            def _get(self, key, namespace):
                found, stream = self._conn(namespace).get(key)
                if found and stream:
                    return stream
                with utils.Namespace(namespace):
                    entity = dao_class.ENTITY_KEY_TYPE.get_entity_by_key(
                        dao_class.ENTITY, str(key))
                if entity:
                    self._conn(namespace).put(key, entity)
                    return dao_class.DTO(entity.key().id_or_name(),
                                         transforms.loads(entity.data))
                self._conn(namespace).CACHE_NOT_FOUND.inc()
                self._conn(namespace).put(key, None)
                return None

            def _get_multi(self, keys, namespace):
                return [self._get(key, namespace) for key in keys]

            @classmethod
            def get(cls, key, app_context=None):
                # pylint: disable=protected-access
                return cls.instance()._get(key, cls._ns(app_context))

            @classmethod
            def get_multi(cls, keys, app_context=None):
                # pylint: disable=protected-access
                return cls.instance()._get_multi(keys, cls._ns(app_context))

        cache_len = counters.PerfCounter(
            'gcb-models-%sCacheConnection-cache-len' %
            dao_class.ENTITY.__name__, 'Total number of items in the cache')
        cache_len.poll_value = EntityCache.get_cache_len

        cache_size = counters.PerfCounter(
            'gcb-models-%sCacheConnection-cache-bytes' %
            dao_class.ENTITY.__name__, 'Total number of bytes in the cache.')
        cache_size.poll_value = EntityCache.get_cache_size

        CacheConnection.init_counters()

        entry = CacheFactoryEntry(EntityCache, CacheEntry, CacheConnection,
                                  ConnectionManager, config_property,
                                  cache_len, cache_size)
        cls._CACHES[name] = entry
        return entry
    mail_errors.InvalidSenderError,
])
_ENQUEUED_BUFFER_MULTIPLIER = 1.5
_KEY_DELIMITER = ':'
_MAX_ENQUEUED_HOURS = 3
_MAX_RETRY_DAYS = 3
# Number of times past which recoverable failure of send_mail() calls becomes
# hard failure. Used as a brake on runaway queues. Should be larger than the
# expected cap on the number of retries imposed by taskqueue.
_RECOVERABLE_FAILURE_CAP = 20
_SECONDS_PER_HOUR = 60 * 60
_SECONDS_PER_DAY = 24 * _SECONDS_PER_HOUR
_USECS_PER_SECOND = 10**6

COUNTER_RETENTION_POLICY_RUN = counters.PerfCounter(
    'gcb-notifications-retention-policy-run',
    'number of times a retention policy was run')
COUNTER_SEND_ASYNC_FAILED_BAD_ARGUMENTS = counters.PerfCounter(
    'gcb-notifications-send-async-failed-bad-arguments',
    'number of times send_async failed because arguments were bad')
COUNTER_SEND_ASYNC_FAILED_DATASTORE_ERROR = counters.PerfCounter(
    'gcb-notifications-send-async-failed-datastore-error',
    'number of times send_async failed because of datastore error')
COUNTER_SEND_ASYNC_START = counters.PerfCounter(
    'gcb-notifications-send-async-called',
    'number of times send_async has been called')
COUNTER_SEND_ASYNC_SUCCESS = counters.PerfCounter(
    'gcb-notifications-send-async-success',
    'number of times send_async succeeded')
COUNTER_SEND_MAIL_TASK_FAILED = counters.PerfCounter(
    'gcb-notifications-send-mail-task-failed',