Exemplo n.º 1
0
    def test_cache_memoize_large_files_compressed(self):
        """Testing cache_memoize with large files with compression"""
        cache_key = 'abc123'

        data, pickled_data = self._build_test_chunk_data(num_chunks=2)

        def cache_func():
            return data

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize(cache_key, cache_func, large_data=True,
                               compress_large_data=True)
        self.assertTrue(cache_func.spy.called)

        cache_key_0 = make_cache_key('%s-0' % cache_key)

        self.assertTrue(make_cache_key(cache_key) in cache)
        self.assertTrue(cache_key_0 in cache)
        self.assertFalse(make_cache_key('%s-1' % cache_key) in cache)
        self.assertFalse(make_cache_key('%s-2' % cache_key) in cache)

        # Verify the contents of the stored data.
        stored_data = cache.get(cache_key_0)[0]
        self.assertEqual(stored_data, zlib.compress(pickled_data))

        # Try fetching the data we stored.
        cache_func.spy.reset_calls()
        result = cache_memoize(cache_key, cache_func, large_data=True,
                               compress_large_data=True)
        self.assertEqual(result, data)
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 2
0
    def test_cache_memoize_large_files_load_uncompressed(self):
        """Testing cache_memoize with large files without compression and
        loading data
        """
        cache_key = 'abc123'

        # This takes into account the size of the pickle data, and will
        # get us to exactly 2 chunks of data in cache.
        data, pickled_data = self._build_test_chunk_data(num_chunks=2)

        cache.set(make_cache_key(cache_key), '2')
        cache.set(make_cache_key('%s-0' % cache_key),
                  [pickled_data[:CACHE_CHUNK_SIZE]])
        cache.set(make_cache_key('%s-1' % cache_key),
                  [pickled_data[CACHE_CHUNK_SIZE:]])

        def cache_func():
            return ''

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize(cache_key, cache_func, large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 3
0
    def test_cache_memoize_large_files(self):
        """Testing cache_memoize with large files"""
        cacheKey = "abc123"

        # This takes into account the size of the pickle data, and will
        # get us to exactly 2 chunks of data in cache.
        data = 'x' * (CACHE_CHUNK_SIZE * 2 - 8)

        def cacheFunc(cacheCalled=[]):
            self.assertTrue(not cacheCalled)
            cacheCalled.append(True)
            return data

        result = cache_memoize(cacheKey, cacheFunc, large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)

        self.assertTrue(make_cache_key(cacheKey) in cache)
        self.assertTrue(make_cache_key('%s-0' % cacheKey) in cache)
        self.assertTrue(make_cache_key('%s-1' % cacheKey) in cache)
        self.assertFalse(make_cache_key('%s-2' % cacheKey) in cache)

        result = cache_memoize(cacheKey, cacheFunc, large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)
Exemplo n.º 4
0
    def test_cache_memoize_large_files_missing_chunk(self):
        """Testing cache_memoize with loading large files with missing chunks
        """
        cache_key = 'abc123'

        # This takes into account the size of the pickle data, and will
        # get us to exactly 2 chunks of data in cache.
        data, pickled_data = self._build_test_chunk_data(num_chunks=2)

        cache.set(make_cache_key(cache_key), '2')
        cache.set(make_cache_key('%s-0' % cache_key),
                  [pickled_data[:CACHE_CHUNK_SIZE]])

        def cache_func():
            return data

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize(cache_key,
                               cache_func,
                               large_data=True,
                               compress_large_data=False)
        self.assertEqual(len(result), len(data))
        self.assertEqual(result, data)
        self.assertTrue(cache_func.spy.called)
Exemplo n.º 5
0
    def test_cache_memoize_large_files(self):
        """Testing cache_memoize with large files"""
        cacheKey = "abc123"

        # This takes into account the size of the pickle data, and will
        # get us to exactly 2 chunks of data in cache.
        data = 'x' * (CACHE_CHUNK_SIZE * 2 - 8)

        def cacheFunc(cacheCalled=[]):
            self.assertTrue(not cacheCalled)
            cacheCalled.append(True)
            return data

        result = cache_memoize(cacheKey,
                               cacheFunc,
                               large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)

        self.assertTrue(make_cache_key(cacheKey) in cache)
        self.assertTrue(make_cache_key('%s-0' % cacheKey) in cache)
        self.assertTrue(make_cache_key('%s-1' % cacheKey) in cache)
        self.assertFalse(make_cache_key('%s-2' % cacheKey) in cache)

        result = cache_memoize(cacheKey,
                               cacheFunc,
                               large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)
Exemplo n.º 6
0
    def get_commits(self, start=None):
        """Returns a list of commits.

        This is paginated via the 'start' parameter. Any exceptions are
        expected to be handled by the caller.
        """
        hosting_service = self.hosting_service

        cache_key = make_cache_key('repository-commits:%s:%s' % (self.pk, start))
        if hosting_service:
            commits_callable = lambda: hosting_service.get_commits(self, start)
        else:
            commits_callable = lambda: self.get_scmtool().get_commits(start)

        # We cache both the entire list for 'start', as well as each individual
        # commit. This allows us to reduce API load when people are looking at
        # the "new review request" page more frequently than they're pushing
        # code, and will usually save 1 API request when they go to actually
        # create a new review request.
        commits = cache_memoize(cache_key, commits_callable)

        for commit in commits:
            cache.set(self.get_commit_cache_key(commit.id),
                      commit, self.COMMITS_CACHE_PERIOD)

        return commits
Exemplo n.º 7
0
    def get_commits(self, start=None):
        """Returns a list of commits.

        This is paginated via the 'start' parameter. Any exceptions are
        expected to be handled by the caller.
        """
        hosting_service = self.hosting_service

        cache_key = make_cache_key('repository-commits:%s:%s' %
                                   (self.pk, start))
        if hosting_service:
            commits_callable = lambda: hosting_service.get_commits(self, start)
        else:
            commits_callable = lambda: self.get_scmtool().get_commits(start)

        # We cache both the entire list for 'start', as well as each individual
        # commit. This allows us to reduce API load when people are looking at
        # the "new review request" page more frequently than they're pushing
        # code, and will usually save 1 API request when they go to actually
        # create a new review request.
        commits = cache_memoize(cache_key, commits_callable)

        for commit in commits:
            cache.set(self.get_commit_cache_key(commit.id), commit,
                      self.COMMITS_CACHE_PERIOD)

        return commits
Exemplo n.º 8
0
    def __init__(self, key):
        self.key = key

        self.pkg_resources = None

        self._extension_classes = {}
        self._extension_instances = {}
        self._load_errors = {}

        # State synchronization
        self._sync_key = make_cache_key('extensionmgr:%s:gen' % key)
        self._last_sync_gen = None
        self._load_lock = threading.Lock()
        self._block_sync_gen = False

        self.dynamic_urls = DynamicURLResolver()

        # Extension middleware instances, ordered by dependencies.
        self.middleware = []

        # Wrap the INSTALLED_APPS and TEMPLATE_CONTEXT_PROCESSORS settings
        # to allow for ref-counted add/remove operations.
        self._installed_apps_setting = SettingListWrapper(
            'INSTALLED_APPS',
            'installed app')
        self._context_processors_setting = SettingListWrapper(
            'TEMPLATE_CONTEXT_PROCESSORS',
            'context processor')

        _extension_managers.append(self)
Exemplo n.º 9
0
    def get_file_exists(self,
                        path,
                        revision,
                        base_commit_id=None,
                        request=None):
        """Returns whether or not a file exists in the repository.

        If the repository is backed by a hosting service, this will go
        through that. Otherwise, it will attempt to directly access the
        repository.

        The result of this call will be cached, making future lookups
        of this path and revision on this repository faster.
        """
        key = self._make_file_exists_cache_key(path, revision, base_commit_id)

        if cache.get(make_cache_key(key)) == '1':
            return True

        exists = self._get_file_exists_uncached(path, revision, base_commit_id,
                                                request)

        if exists:
            cache_memoize(key, lambda: '1')

        return exists
Exemplo n.º 10
0
    def _get_file_exists_uncached(self, path, revision, base_commit_id, request):
        """Internal function for checking that a file exists.

        This is called by get_file_eixsts if the file isn't already in the
        cache.

        This function is smart enough to check if the file exists in cache,
        and will use that for the result instead of making a separate call.
        """
        # First we check to see if we've fetched the file before. If so,
        # it's in there and we can just return that we have it.
        file_cache_key = make_cache_key(self._make_file_cache_key(path, revision, base_commit_id))

        if file_cache_key in cache:
            exists = True
        else:
            # We didn't have that in the cache, so check from the repository.
            checking_file_exists.send(
                sender=self, path=path, revision=revision, base_commit_id=base_commit_id, request=request
            )

            hosting_service = self.hosting_service

            if hosting_service:
                exists = hosting_service.get_file_exists(self, path, revision, base_commit_id=base_commit_id)
            else:
                exists = self.get_scmtool().file_exists(path, revision)

            checked_file_exists.send(
                sender=self, path=path, revision=revision, base_commit_id=base_commit_id, request=request, exists=exists
            )

        return exists
Exemplo n.º 11
0
    def get_branches(self):
        """Return a list of all branches on the repository.

        This will fetch a list of all known branches for use in the API and
        New Review Request page.

        Returns:
            list of reviewboard.scmtools.core.Branch:
            The list of branches in the repository. One (and only one) will
            be marked as the default branch.

        Raises:
            reviewboard.hostingsvcs.errors.HostingServiceError:
                The hosting service backing the repository encountered an
                error.

            reviewboard.scmtools.errors.SCMError:
                The repository tool encountered an error.

            NotImplementedError:
                Branch retrieval is not available for this type of repository.
        """
        hosting_service = self.hosting_service

        cache_key = make_cache_key('repository-branches:%s' % self.pk)

        if hosting_service:
            branches_callable = lambda: hosting_service.get_branches(self)
        else:
            branches_callable = self.get_scmtool().get_branches

        return cache_memoize(cache_key, branches_callable,
                             self.BRANCHES_CACHE_PERIOD)
Exemplo n.º 12
0
    def __init__(self, key):
        self.key = key

        self.pkg_resources = None

        self._extension_classes = {}
        self._extension_instances = {}
        self._load_errors = {}

        # State synchronization
        self._sync_key = make_cache_key('extensionmgr:%s:gen' % key)
        self._last_sync_gen = None

        self.dynamic_urls = DynamicURLResolver()

        # Extension middleware instances, ordered by dependencies.
        self.middleware = []

        # Wrap the INSTALLED_APPS and TEMPLATE_CONTEXT_PROCESSORS settings
        # to allow for ref-counted add/remove operations.
        self._installed_apps_setting = SettingListWrapper(
            'INSTALLED_APPS', 'installed app')
        self._context_processors_setting = SettingListWrapper(
            'TEMPLATE_CONTEXT_PROCESSORS', 'context processor')

        _extension_managers.append(self)
Exemplo n.º 13
0
    def test_cache_memoize_large_files_uncompressed_off_by_one(self):
        """Testing cache_memoize with large files without compression and
        one byte larger than an even chunk size."""
        cache_key = 'abc123'

        # This takes into account the size of the pickle data, and will
        # get us to just barely 3 chunks of data in cache.
        data = self._build_test_chunk_data(num_chunks=2)[0] + 'x'
        pickled_data = pickle.dumps(data, protocol=0)

        def cache_func():
            return data

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize(cache_key,
                               cache_func,
                               large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)
        self.assertTrue(cache_func.spy.called)

        cache_key_0 = make_cache_key('%s-0' % cache_key)
        cache_key_1 = make_cache_key('%s-1' % cache_key)
        cache_key_2 = make_cache_key('%s-2' % cache_key)

        self.assertTrue(make_cache_key(cache_key) in cache)
        self.assertTrue(cache_key_0 in cache)
        self.assertTrue(cache_key_1 in cache)
        self.assertTrue(cache_key_2 in cache)
        self.assertFalse(make_cache_key('%s-3' % cache_key) in cache)

        # Verify the contents of the stored data.
        stored_data = b''.join(
            cache.get(cache_key_0) + cache.get(cache_key_1) +
            cache.get(cache_key_2))
        self.assertEqual(stored_data, pickled_data)

        # Try fetching the data we stored.
        cache_func.spy.reset_calls()

        result = cache_memoize(cache_key,
                               cache_func,
                               large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 14
0
    def test_get_all_consent_with_no_user_data(self):
        """Testing DatabaseConsentTracker.get_all_consent with user without any
        consent data
        """
        self.assertEqual(self.tracker.get_all_consent(self.user), {})

        self.assertEqual(
            cache.get(make_cache_key('privacy-consent:%s' % self.user.pk)),
            {})
Exemplo n.º 15
0
    def test_cache_memoize_large_files_uncompressed_off_by_one(self):
        """Testing cache_memoize with large files without compression and
        one byte larger than an even chunk size."""
        cache_key = 'abc123'

        # This takes into account the size of the pickle data, and will
        # get us to just barely 3 chunks of data in cache.
        data = self._build_test_chunk_data(num_chunks=2)[0] + 'x'
        pickled_data = pickle.dumps(data)

        def cache_func():
            return data

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize(cache_key, cache_func, large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)
        self.assertTrue(cache_func.spy.called)

        cache_key_0 = make_cache_key('%s-0' % cache_key)
        cache_key_1 = make_cache_key('%s-1' % cache_key)
        cache_key_2 = make_cache_key('%s-2' % cache_key)

        self.assertTrue(make_cache_key(cache_key) in cache)
        self.assertTrue(cache_key_0 in cache)
        self.assertTrue(cache_key_1 in cache)
        self.assertTrue(cache_key_2 in cache)
        self.assertFalse(make_cache_key('%s-3' % cache_key) in cache)

        # Verify the contents of the stored data.
        stored_data = b''.join(cache.get(cache_key_0) +
                               cache.get(cache_key_1) +
                               cache.get(cache_key_2))
        self.assertEqual(stored_data, pickled_data)

        # Try fetching the data we stored.
        cache_func.spy.reset_calls()

        result = cache_memoize(cache_key, cache_func, large_data=True,
                               compress_large_data=False)
        self.assertEqual(result, data)
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 16
0
def delete_cached_user_team_ids(user):
    """Delete the user's cached I Done This team IDs.

    Args:
        user (django.contrib.auth.models.User):
            The user whose cached team IDs should be deleted.
    """
    logging.debug('IDoneThis: Deleting cached team IDs for user "%s"',
                  user.username)
    cache.delete(make_cache_key(_make_user_team_ids_cache_key(user)))
Exemplo n.º 17
0
def delete_cached_user_team_ids(user):
    """Delete the user's cached I Done This team IDs.

    Args:
        user (django.contrib.auth.models.User):
            The user whose cached team IDs should be deleted.
    """
    logging.debug('IDoneThis: Deleting cached team IDs for user "%s"',
                  user.username)
    cache.delete(make_cache_key(_make_user_team_ids_cache_key(user)))
Exemplo n.º 18
0
    def _get_file_exists_uncached(self, path, revision, base_commit_id,
                                  request):
        """Internal function for checking that a file exists.

        This is called by get_file_exists if the file isn't already in the
        cache.

        This function is smart enough to check if the file exists in cache,
        and will use that for the result instead of making a separate call.
        """
        # First we check to see if we've fetched the file before. If so,
        # it's in there and we can just return that we have it.
        file_cache_key = make_cache_key(
            self._make_file_cache_key(path, revision, base_commit_id))

        if file_cache_key in cache:
            exists = True
        else:
            # We didn't have that in the cache, so check from the repository.
            checking_file_exists.send(sender=self,
                                      path=path,
                                      revision=revision,
                                      base_commit_id=base_commit_id,
                                      request=request)

            hosting_service = self.hosting_service

            if hosting_service:
                exists = hosting_service.get_file_exists(
                    self,
                    path,
                    revision,
                    base_commit_id=base_commit_id)
            else:
                tool = self.get_scmtool()
                argspec = inspect.getargspec(tool.file_exists)

                if argspec.keywords is None:
                    warnings.warn('SCMTool.file_exists() must take keyword '
                                  'arguments, signature for %s is deprecated.'
                                  % tool.name,
                                  RemovedInReviewBoard40Warning)
                    exists = tool.file_exists(path, revision)
                else:
                    exists = tool.file_exists(path, revision,
                                              base_commit_id=base_commit_id)

            checked_file_exists.send(sender=self,
                                     path=path,
                                     revision=revision,
                                     base_commit_id=base_commit_id,
                                     request=request,
                                     exists=exists)

        return exists
Exemplo n.º 19
0
    def _get_file_exists_uncached(self, path, revision, base_commit_id,
                                  request):
        """Internal function for checking that a file exists.

        This is called by get_file_exists if the file isn't already in the
        cache.

        This function is smart enough to check if the file exists in cache,
        and will use that for the result instead of making a separate call.
        """
        # First we check to see if we've fetched the file before. If so,
        # it's in there and we can just return that we have it.
        file_cache_key = make_cache_key(
            self._make_file_cache_key(path, revision, base_commit_id))

        if file_cache_key in cache:
            exists = True
        else:
            # We didn't have that in the cache, so check from the repository.
            checking_file_exists.send(sender=self,
                                      path=path,
                                      revision=revision,
                                      base_commit_id=base_commit_id,
                                      request=request)

            hosting_service = self.hosting_service

            if hosting_service:
                exists = hosting_service.get_file_exists(
                    self,
                    path,
                    revision,
                    base_commit_id=base_commit_id)
            else:
                tool = self.get_scmtool()
                argspec = inspect.getargspec(tool.file_exists)

                if argspec.keywords is None:
                    warnings.warn('SCMTool.file_exists() must take keyword '
                                  'arguments, signature for %s is deprecated.'
                                  % tool.name,
                                  RemovedInReviewBoard40Warning)
                    exists = tool.file_exists(path, revision)
                else:
                    exists = tool.file_exists(path, revision,
                                              base_commit_id=base_commit_id)

            checked_file_exists.send(sender=self,
                                     path=path,
                                     revision=revision,
                                     base_commit_id=base_commit_id,
                                     request=request,
                                     exists=exists)

        return exists
Exemplo n.º 20
0
    def get_branches(self):
        """Returns a list of branches."""
        hosting_service = self.hosting_service

        cache_key = make_cache_key("repository-branches:%s" % self.pk)
        if hosting_service:
            branches_callable = lambda: hosting_service.get_branches(self)
        else:
            branches_callable = self.get_scmtool().get_branches

        return cache_memoize(cache_key, branches_callable, self.BRANCHES_CACHE_PERIOD)
Exemplo n.º 21
0
    def get_branches(self):
        """Returns a list of branches."""
        hosting_service = self.hosting_service

        cache_key = make_cache_key('repository-branches:%s' % self.pk)
        if hosting_service:
            branches_callable = lambda: hosting_service.get_branches(self)
        else:
            branches_callable = self.get_scmtool().get_branches

        return cache_memoize(cache_key, branches_callable,
                             self.BRANCHES_CACHE_PERIOD)
Exemplo n.º 22
0
    def _get_file_exists_uncached(self, path, revision, base_commit_id,
                                  request):
        """Check for file existence, bypassing cache.

        This is called internally by :py:meth:`get_file_exists` if the file
        isn't already in the cache.

        This function is smart enough to check if the file exists in cache,
        and will use that for the result instead of making a separate call.

        This will send the
        :py:data:`~reviewboard.scmtools.signals.checking_file_exists` signal
        before beginning a file fetch from the repository, and the
        :py:data:`~reviewboard.scmtools.signals.checked_file_exists` signal
        after.
        """
        # First we check to see if we've fetched the file before. If so,
        # it's in there and we can just return that we have it.
        file_cache_key = make_cache_key(
            self._make_file_cache_key(path, revision, base_commit_id))

        if file_cache_key in cache:
            exists = True
        else:
            # We didn't have that in the cache, so check from the repository.
            checking_file_exists.send(sender=self,
                                      path=path,
                                      revision=revision,
                                      base_commit_id=base_commit_id,
                                      request=request)

            hosting_service = self.hosting_service

            if hosting_service:
                exists = hosting_service.get_file_exists(
                    self,
                    path,
                    revision,
                    base_commit_id=base_commit_id)
            else:
                tool = self.get_scmtool()
                exists = tool.file_exists(path, revision,
                                          base_commit_id=base_commit_id)

            checked_file_exists.send(sender=self,
                                     path=path,
                                     revision=revision,
                                     base_commit_id=base_commit_id,
                                     request=request,
                                     exists=exists)

        return exists
Exemplo n.º 23
0
    def test_cache_memoize_large_files_load_compressed(self):
        """Testing cache_memoize with large files with compression and
        loading cached data
        """
        cache_key = 'abc123'

        data, pickled_data = self._build_test_chunk_data(num_chunks=2)
        stored_data = zlib.compress(pickled_data)
        self.assertTrue(len(stored_data) < CACHE_CHUNK_SIZE)

        cache.set(make_cache_key(cache_key), '1')
        cache.set(make_cache_key('%s-0' % cache_key), [stored_data])

        def cache_func():
            return ''

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize(cache_key, cache_func, large_data=True,
                               compress_large_data=True)
        self.assertEqual(result, data)
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 24
0
    def record_consent_data_list(self, user, consent_data_list):
        """Record a list of all consent data made by a user.

        Args:
            user (django.contrib.auth.models.User):
                The user to record the consent data for.

            consent_data_list (list of
                               djblets.privacy.consent.base.ConsentData):
                A list of consent data to record.
        """
        self.store_recorded_consent_data_list(user, consent_data_list)
        cache.delete(make_cache_key(self._get_user_cache_key(user)))
Exemplo n.º 25
0
    def test_cache_memoize_large_files_compressed(self):
        """Testing cache_memoize with large files with compression"""
        cache_key = 'abc123'

        data, pickled_data = self._build_test_chunk_data(num_chunks=2)

        def cache_func():
            return data

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize(cache_key,
                               cache_func,
                               large_data=True,
                               compress_large_data=True)
        self.assertTrue(cache_func.spy.called)

        cache_key_0 = make_cache_key('%s-0' % cache_key)

        self.assertTrue(make_cache_key(cache_key) in cache)
        self.assertTrue(cache_key_0 in cache)
        self.assertFalse(make_cache_key('%s-1' % cache_key) in cache)
        self.assertFalse(make_cache_key('%s-2' % cache_key) in cache)

        # Verify the contents of the stored data.
        stored_data = cache.get(cache_key_0)[0]
        self.assertEqual(stored_data, zlib.compress(pickled_data))

        # Try fetching the data we stored.
        cache_func.spy.reset_calls()
        result = cache_memoize(cache_key,
                               cache_func,
                               large_data=True,
                               compress_large_data=True)
        self.assertEqual(result, data)
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 26
0
    def test_cache_memoize_large_files_load_compressed(self):
        """Testing cache_memoize with large files with compression and
        loading cached data
        """
        cache_key = 'abc123'

        data, pickled_data = self._build_test_chunk_data(num_chunks=2)
        stored_data = zlib.compress(pickled_data)
        self.assertTrue(len(stored_data) < CACHE_CHUNK_SIZE)

        cache.set(make_cache_key(cache_key), '1')
        cache.set(make_cache_key('%s-0' % cache_key), [stored_data])

        def cache_func():
            return ''

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize(cache_key,
                               cache_func,
                               large_data=True,
                               compress_large_data=True)
        self.assertEqual(result, data)
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 27
0
    def test_record_consent_data_list_clears_cache(self):
        """Testing DatabaseConsentTracker.record_consent_data_list clears cache
        """
        consent_data_1 = ConsentData(requirement_id='test-requirement-1',
                                     granted=True)
        consent_data_2 = ConsentData(requirement_id='test-requirement-2',
                                     granted=False)

        cache_key = make_cache_key('privacy-consent:%s' % self.user.pk)
        cache.add(cache_key, ['test-requirement-1'])

        self.assertEqual(cache.get(cache_key), ['test-requirement-1'])

        self.tracker.record_consent_data_list(self.user,
                                              [consent_data_1, consent_data_2])

        self.assertIsNone(cache.get(cache_key))
Exemplo n.º 28
0
    def _get_file_exists_uncached(self, path, revision, base_commit_id,
                                  request):
        """Internal function for checking that a file exists.

        This is called by get_file_exists if the file isn't already in the
        cache.

        This function is smart enough to check if the file exists in cache,
        and will use that for the result instead of making a separate call.
        """
        # First we check to see if we've fetched the file before. If so,
        # it's in there and we can just return that we have it.
        file_cache_key = make_cache_key(
            self._make_file_cache_key(path, revision, base_commit_id))

        if file_cache_key in cache:
            exists = True
        else:
            # We didn't have that in the cache, so check from the repository.
            checking_file_exists.send(sender=self,
                                      path=path,
                                      revision=revision,
                                      base_commit_id=base_commit_id,
                                      request=request)

            hosting_service = self.hosting_service

            if hosting_service:
                exists = hosting_service.get_file_exists(
                    self,
                    path,
                    revision,
                    base_commit_id=base_commit_id)
            else:
                tool = self.get_scmtool()
                exists = tool.file_exists(path, revision,
                                          base_commit_id=base_commit_id)

            checked_file_exists.send(sender=self,
                                     path=path,
                                     revision=revision,
                                     base_commit_id=base_commit_id,
                                     request=request,
                                     exists=exists)

        return exists
Exemplo n.º 29
0
    def changeset_is_pending(self, commit_id):
        """Returns whether the associated changeset is pending commit.

        For repositories that support it, this will return whether the
        associated changeset is pending commit. This requires server-side
        knowledge of the change.
        """
        cache_key = make_cache_key(
            'commit-id-is-pending-%d-%s' % (self.pk, commit_id))

        cached_values = cache.get(cache_key)
        if cached_values:
            return cached_values

        is_pending = False

        scmtool = self.repository.get_scmtool()
        if (scmtool.supports_pending_changesets and
            commit_id is not None):
            changeset = scmtool.get_changeset(commit_id, allow_empty=True)

            if changeset:
                is_pending = changeset.pending

                new_commit_id = six.text_type(changeset.changenum)

                if commit_id != new_commit_id:
                    self.commit_id = new_commit_id
                    self.save(update_fields=['commit_id'])
                    commit_id = new_commit_id

                    draft = self.get_draft()
                    if draft:
                        draft.commit_id = new_commit_id
                        draft.save(update_fields=['commit_id'])

                # If the changeset is pending, we cache for only one minute to
                # speed things up a little bit when navigating through
                # different pages. If the changeset is no longer pending, cache
                # for the full default time.
                if is_pending:
                    cache.set(cache_key, (is_pending, commit_id), 60)
                else:
                    cache.set(cache_key, (is_pending, commit_id))

        return is_pending, commit_id
Exemplo n.º 30
0
    def changeset_is_pending(self, commit_id):
        """Returns whether the associated changeset is pending commit.

        For repositories that support it, this will return whether the
        associated changeset is pending commit. This requires server-side
        knowledge of the change.
        """
        cache_key = make_cache_key(
            'commit-id-is-pending-%d-%s' % (self.pk, commit_id))

        cached_values = cache.get(cache_key)
        if cached_values:
            return cached_values

        is_pending = False

        scmtool = self.repository.get_scmtool()
        if (scmtool.supports_pending_changesets and
            commit_id is not None):
            changeset = scmtool.get_changeset(commit_id, allow_empty=True)

            if changeset:
                is_pending = changeset.pending

                new_commit_id = six.text_type(changeset.changenum)

                if commit_id != new_commit_id:
                    self.commit_id = new_commit_id
                    self.save(update_fields=['commit_id'])
                    commit_id = new_commit_id

                    draft = self.get_draft()
                    if draft:
                        draft.commit_id = new_commit_id
                        draft.save(update_fields=['commit_id'])

                # If the changeset is pending, we cache for only one minute to
                # speed things up a little bit when navigating through
                # different pages. If the changeset is no longer pending, cache
                # for the full default time.
                if is_pending:
                    cache.set(cache_key, (is_pending, commit_id), 60)
                else:
                    cache.set(cache_key, (is_pending, commit_id))

        return is_pending, commit_id
Exemplo n.º 31
0
    def test_get_all_consent(self):
        """Testing DatabaseConsentTracker.get_all_consent"""
        # Populate some data we can fetch.
        self.test_record_consent_data_list()

        self.assertEqual(
            self.tracker.get_all_consent(self.user),
            {
                'test-requirement-1': Consent.GRANTED,
                'test-requirement-2': Consent.DENIED,
            })

        self.assertEqual(
            cache.get(make_cache_key('privacy-consent:%s' % self.user.pk)),
            {
                'test-requirement-1': Consent.GRANTED,
                'test-requirement-2': Consent.DENIED,
            })
Exemplo n.º 32
0
    def get_file_exists(self, path, revision, base_commit_id=None, request=None):
        """Returns whether or not a file exists in the repository.

        If the repository is backed by a hosting service, this will go
        through that. Otherwise, it will attempt to directly access the
        repository.

        The result of this call will be cached, making future lookups
        of this path and revision on this repository faster.
        """
        key = self._make_file_exists_cache_key(path, revision, base_commit_id)

        if cache.get(make_cache_key(key)) == "1":
            return True

        exists = self._get_file_exists_uncached(path, revision, base_commit_id, request)

        if exists:
            cache_memoize(key, lambda: "1")

        return exists
Exemplo n.º 33
0
    def get_file_exists(self,
                        path,
                        revision,
                        base_commit_id=None,
                        request=None):
        """Returns whether or not a file exists in the repository.

        If the repository is backed by a hosting service, this will go
        through that. Otherwise, it will attempt to directly access the
        repository.

        The result of this call will be cached, making future lookups
        of this path and revision on this repository faster.
        """
        if not isinstance(path, six.text_type):
            raise TypeError('"path" must be a Unicode string, not %s' %
                            type(path))

        if not isinstance(revision, six.text_type):
            raise TypeError('"revision" must be a Unicode string, not %s' %
                            type(revision))

        if (base_commit_id is not None
                and not isinstance(base_commit_id, six.text_type)):
            raise TypeError('"base_commit_id" must be a Unicode string, '
                            'not %s' % type(base_commit_id))

        key = self._make_file_exists_cache_key(path, revision, base_commit_id)

        if cache.get(make_cache_key(key)) == '1':
            return True

        exists = self._get_file_exists_uncached(path, revision, base_commit_id,
                                                request)

        if exists:
            cache_memoize(key, lambda: '1')

        return exists
Exemplo n.º 34
0
    def __init__(self, cache_key, normalize_cache_key=True):
        """Initialize the synchronizer.

        Args:
            cache_key (unicode):
                The base cache key used for all synchronization. This will be
                normalized by
                :py:func:`~djblets.cache.backends.make_cache_key`.

            normalize_cache_key (bool, optional):
                Whether to normalize the cache key. Normalizing it will
                ensure it can fit within the key length constraints, and
                reduces changes of colliding with keys from other services.
                This is enabled by default.
        """
        if normalize_cache_key:
            cache_key = make_cache_key(cache_key)

        self.cache_key = cache_key
        self.sync_gen = None

        self._fetch_or_create_sync_gen()
Exemplo n.º 35
0
    def __init__(self, cache_key, normalize_cache_key=True):
        """Initialize the synchronizer.

        Args:
            cache_key (unicode):
                The base cache key used for all synchronization. This will be
                normalized by
                :py:func:`~djblets.cache.backends.make_cache_key`.

            normalize_cache_key (bool, optional):
                Whether to normalize the cache key. Normalizing it will
                ensure it can fit within the key length constraints, and
                reduces changes of colliding with keys from other services.
                This is enabled by default.
        """
        if normalize_cache_key:
            cache_key = make_cache_key(cache_key)

        self.cache_key = cache_key
        self.sync_gen = None

        self._fetch_or_create_sync_gen()
Exemplo n.º 36
0
    def get_file_exists(self,
                        path,
                        revision,
                        base_commit_id=None,
                        request=None,
                        context=None):
        """Return whether or not a file exists in the repository.

        If the repository is backed by a hosting service, this will go
        through that. Otherwise, it will attempt to directly access the
        repository.

        The result of this call will be cached, making future lookups
        of this path and revision on this repository faster.

        This will send the
        :py:data:`~reviewboard.scmtools.signals.checking_file_exists` signal
        before beginning a file fetch from the repository (if not cached), and
        the :py:data:`~reviewboard.scmtools.signals.checked_file_exists` signal
        after.

        Args:
            path (unicode):
                The path to the file in the repository.

            revision (unicode);
                The revision of the file to check.

            base_commit_id (unicode, optional):
                The ID of the commit containing the revision of the file
                to check. This is required for some types of repositories
                where the revision of a file and the ID of a commit differ.

                Deprecated:
                    4.0.5:
                    Callers should provide this in ``context`` instead.

            request (django.http.HttpRequest, optional):
                The current HTTP request from the client. This is used for
                logging purposes.

                Deprecated:
                    4.0.5:
                    Callers should provide this in ``context`` instead.

            context (reviewboard.scmtools.core.FileLookupContext, optional):
                Extra context used to help look up this file.

                This contains information about the HTTP request, requesting
                user, and parsed diff information, which may be useful as
                part of the repository lookup process.

                Version Added:
                    4.0.5

        Returns:
            bool:
            ``True`` if the file exists in the repository. ``False`` if it
            does not.

        Raises:
            TypeError:
                One or more of the provided arguments is an invalid type.
                Details are contained in the error message.
        """
        if not isinstance(path, str):
            raise TypeError('"path" must be a Unicode string, not %s' %
                            type(path))

        if not isinstance(revision, str):
            raise TypeError('"revision" must be a Unicode string, not %s' %
                            type(revision))

        if context is None:
            # If an explicit context isn't provided, create one. In a future
            # version, this will be required.
            context = FileLookupContext(request=request,
                                        base_commit_id=base_commit_id)

        key = self._make_file_exists_cache_key(
            path=path,
            revision=revision,
            base_commit_id=context.base_commit_id)

        if cache.get(make_cache_key(key)) == '1':
            return True

        exists = self._get_file_exists_uncached(path=path,
                                                revision=revision,
                                                context=context)

        if exists:
            cache_memoize(key, lambda: '1')

        return exists
Exemplo n.º 37
0
    def test_cache_memoize_iter_compressed(self):
        """Testing cache_memoize_iter with compression"""
        cache_key = 'abc123'
        data_yielded = []

        # This takes into account the size of the pickle data, and will
        # get us to exactly 2 chunks of data in cache, each.
        data1, pickled_data_1 = self._build_test_chunk_data(num_chunks=2)
        data2, pickled_data_2 = self._build_test_chunk_data(num_chunks=2)

        def cache_func():
            data_yielded.append('data1')
            yield data1

            data_yielded.append('data2')
            yield data2

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize_iter(cache_key, cache_func,
                                    compress_large_data=True)
        self.assertTrue(inspect.isgenerator(result))
        self.assertEqual(data_yielded, [])

        self.assertEqual(next(result), data1)
        self.assertEqual(data_yielded, ['data1'])

        self.assertEqual(next(result), data2)
        self.assertEqual(data_yielded, ['data1', 'data2'])

        with self.assertRaises(StopIteration):
            next(result)

        self.assertTrue(cache_func.spy.called)

        cache_key_main = make_cache_key(cache_key)
        cache_key_0 = make_cache_key('%s-0' % cache_key)

        self.assertTrue(cache_key_main in cache)
        self.assertTrue(cache_key_0 in cache)
        self.assertFalse(make_cache_key('%s-1' % cache_key) in cache)

        # Verify the contents of the stored data.
        self.assertEqual(cache.get(cache_key_main), '1')
        self.assertEqual(cache.get(cache_key_0)[0],
                         zlib.compress(pickled_data_1 + pickled_data_2))

        # Try fetching the data we stored.
        cache_func.spy.reset_calls()
        data_yielded = []

        result = cache_memoize_iter(cache_key, cache_func,
                                    compress_large_data=True)
        self.assertTrue(inspect.isgenerator(result))
        self.assertEqual(next(result), data1)
        self.assertEqual(next(result), data2)

        with self.assertRaises(StopIteration):
            next(result)

        self.assertEqual(data_yielded, [])
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 38
0
    def test_cache_memoize_iter_uncompressed(self):
        """Testing cache_memoize_iter without compression"""
        cache_key = 'abc123'
        data_yielded = []

        data1, pickled_data_1 = self._build_test_chunk_data(num_chunks=2)
        data2, pickled_data_2 = self._build_test_chunk_data(num_chunks=2)

        def cache_func():
            data_yielded.append('data1')
            yield data1

            data_yielded.append('data2')
            yield data2

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize_iter(cache_key, cache_func,
                                    compress_large_data=False)
        self.assertTrue(inspect.isgenerator(result))
        self.assertEqual(data_yielded, [])

        self.assertEqual(next(result), data1)
        self.assertEqual(data_yielded, ['data1'])

        self.assertEqual(next(result), data2)
        self.assertEqual(data_yielded, ['data1', 'data2'])

        with self.assertRaises(StopIteration):
            next(result)

        self.assertTrue(cache_func.spy.called)

        cache_key_main = make_cache_key(cache_key)
        cache_key_0 = make_cache_key('%s-0' % cache_key)
        cache_key_1 = make_cache_key('%s-1' % cache_key)
        cache_key_2 = make_cache_key('%s-2' % cache_key)
        cache_key_3 = make_cache_key('%s-3' % cache_key)

        self.assertTrue(cache_key_main in cache)
        self.assertTrue(cache_key_0 in cache)
        self.assertTrue(cache_key_1 in cache)
        self.assertTrue(cache_key_2 in cache)
        self.assertTrue(cache_key_3 in cache)
        self.assertFalse(make_cache_key('%s-4' % cache_key) in cache)

        # Verify the contents of the stored data.
        stored_data = b''.join(cache.get(cache_key_0) +
                               cache.get(cache_key_1) +
                               cache.get(cache_key_2) +
                               cache.get(cache_key_3))
        self.assertEqual(cache.get(cache_key_main), '4')
        self.assertEqual(stored_data, pickled_data_1 + pickled_data_2)

        # Try fetching the data we stored.
        cache_func.spy.reset_calls()
        data_yielded = []

        result = cache_memoize_iter(cache_key, cache_func,
                                    compress_large_data=False)
        self.assertTrue(inspect.isgenerator(result))

        self.assertEqual(next(result), data1)
        self.assertEqual(next(result), data2)

        with self.assertRaises(StopIteration):
            next(result)

        self.assertEqual(data_yielded, [])
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 39
0
    def get_file_exists(self,
                        path,
                        revision,
                        base_commit_id=None,
                        request=None):
        """Return whether or not a file exists in the repository.

        If the repository is backed by a hosting service, this will go
        through that. Otherwise, it will attempt to directly access the
        repository.

        The result of this call will be cached, making future lookups
        of this path and revision on this repository faster.

        This will send the
        :py:data:`~reviewboard.scmtools.signals.checking_file_exists` signal
        before beginning a file fetch from the repository (if not cached), and
        the :py:data:`~reviewboard.scmtools.signals.checked_file_exists` signal
        after.

        Args:
            path (unicode):
                The path to the file in the repository.

            revision (unicode);
                The revision of the file to check.

            base_commit_id (unicode, optional):
                The ID of the commit containing the revision of the file
                to check. This is required for some types of repositories
                where the revision of a file and the ID of a commit differ.

            request (django.http.HttpRequest, optional):
                The current HTTP request from the client. This is used for
                logging purposes.

        Returns:
            bool:
            ``True`` if the file exists in the repository. ``False`` if it
            does not.

        Raises:
            TypeError:
                One or more of the provided arguments is an invalid type.
                Details are contained in the error message.
        """
        if not isinstance(path, six.text_type):
            raise TypeError('"path" must be a Unicode string, not %s' %
                            type(path))

        if not isinstance(revision, six.text_type):
            raise TypeError('"revision" must be a Unicode string, not %s' %
                            type(revision))

        if (base_commit_id is not None
                and not isinstance(base_commit_id, six.text_type)):
            raise TypeError('"base_commit_id" must be a Unicode string, '
                            'not %s' % type(base_commit_id))

        key = self._make_file_exists_cache_key(path, revision, base_commit_id)

        if cache.get(make_cache_key(key)) == '1':
            return True

        exists = self._get_file_exists_uncached(path, revision, base_commit_id,
                                                request)

        if exists:
            cache_memoize(key, lambda: '1')

        return exists
Exemplo n.º 40
0
    def test_cache_memoize_iter_compressed(self):
        """Testing cache_memoize_iter with compression"""
        cache_key = 'abc123'
        data_yielded = []

        # This takes into account the size of the pickle data, and will
        # get us to exactly 2 chunks of data in cache, each.
        data1, pickled_data_1 = self._build_test_chunk_data(num_chunks=2)
        data2, pickled_data_2 = self._build_test_chunk_data(num_chunks=2)

        def cache_func():
            data_yielded.append('data1')
            yield data1

            data_yielded.append('data2')
            yield data2

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize_iter(cache_key,
                                    cache_func,
                                    compress_large_data=True)
        self.assertTrue(inspect.isgenerator(result))
        self.assertEqual(data_yielded, [])

        self.assertEqual(next(result), data1)
        self.assertEqual(data_yielded, ['data1'])

        self.assertEqual(next(result), data2)
        self.assertEqual(data_yielded, ['data1', 'data2'])

        with self.assertRaises(StopIteration):
            next(result)

        self.assertTrue(cache_func.spy.called)

        cache_key_main = make_cache_key(cache_key)
        cache_key_0 = make_cache_key('%s-0' % cache_key)

        self.assertTrue(cache_key_main in cache)
        self.assertTrue(cache_key_0 in cache)
        self.assertFalse(make_cache_key('%s-1' % cache_key) in cache)

        # Verify the contents of the stored data.
        self.assertEqual(cache.get(cache_key_main), '1')
        self.assertEqual(
            cache.get(cache_key_0)[0],
            zlib.compress(pickled_data_1 + pickled_data_2))

        # Try fetching the data we stored.
        cache_func.spy.reset_calls()
        data_yielded = []

        result = cache_memoize_iter(cache_key,
                                    cache_func,
                                    compress_large_data=True)
        self.assertTrue(inspect.isgenerator(result))
        self.assertEqual(next(result), data1)
        self.assertEqual(next(result), data2)

        with self.assertRaises(StopIteration):
            next(result)

        self.assertEqual(data_yielded, [])
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 41
0
    def test_cache_memoize_iter_uncompressed(self):
        """Testing cache_memoize_iter without compression"""
        cache_key = 'abc123'
        data_yielded = []

        data1, pickled_data_1 = self._build_test_chunk_data(num_chunks=2)
        data2, pickled_data_2 = self._build_test_chunk_data(num_chunks=2)

        def cache_func():
            data_yielded.append('data1')
            yield data1

            data_yielded.append('data2')
            yield data2

        self.spy_on(cache_func, call_original=True)

        result = cache_memoize_iter(cache_key,
                                    cache_func,
                                    compress_large_data=False)
        self.assertTrue(inspect.isgenerator(result))
        self.assertEqual(data_yielded, [])

        self.assertEqual(next(result), data1)
        self.assertEqual(data_yielded, ['data1'])

        self.assertEqual(next(result), data2)
        self.assertEqual(data_yielded, ['data1', 'data2'])

        with self.assertRaises(StopIteration):
            next(result)

        self.assertTrue(cache_func.spy.called)

        cache_key_main = make_cache_key(cache_key)
        cache_key_0 = make_cache_key('%s-0' % cache_key)
        cache_key_1 = make_cache_key('%s-1' % cache_key)
        cache_key_2 = make_cache_key('%s-2' % cache_key)
        cache_key_3 = make_cache_key('%s-3' % cache_key)

        self.assertTrue(cache_key_main in cache)
        self.assertTrue(cache_key_0 in cache)
        self.assertTrue(cache_key_1 in cache)
        self.assertTrue(cache_key_2 in cache)
        self.assertTrue(cache_key_3 in cache)
        self.assertFalse(make_cache_key('%s-4' % cache_key) in cache)

        # Verify the contents of the stored data.
        stored_data = b''.join(
            cache.get(cache_key_0) + cache.get(cache_key_1) +
            cache.get(cache_key_2) + cache.get(cache_key_3))
        self.assertEqual(cache.get(cache_key_main), '4')
        self.assertEqual(stored_data, pickled_data_1 + pickled_data_2)

        # Try fetching the data we stored.
        cache_func.spy.reset_calls()
        data_yielded = []

        result = cache_memoize_iter(cache_key,
                                    cache_func,
                                    compress_large_data=False)
        self.assertTrue(inspect.isgenerator(result))

        self.assertEqual(next(result), data1)
        self.assertEqual(next(result), data2)

        with self.assertRaises(StopIteration):
            next(result)

        self.assertEqual(data_yielded, [])
        self.assertFalse(cache_func.spy.called)
Exemplo n.º 42
0
    def _get_file_exists_uncached(self, path, revision, context):
        """Check for file existence, bypassing cache.

        This is called internally by :py:meth:`get_file_exists` if the file
        isn't already in the cache.

        This function is smart enough to check if the file exists in cache,
        and will use that for the result instead of making a separate call.

        This will send the
        :py:data:`~reviewboard.scmtools.signals.checking_file_exists` signal
        before beginning a file fetch from the repository, and the
        :py:data:`~reviewboard.scmtools.signals.checked_file_exists` signal
        after.

        Args:
            path (unicode):
                The path to the file in the repository.

            revision (unicode):
                The revision of the file to check.

            context (reviewboard.scmtools.core.FileLookupContext):
                Extra context used to help look up this file.

                Version Added:
                    4.0.5

        Returns:
            bool:
            ``True`` if the file exists. ``False`` if it does not.
        """
        request = context.request
        base_commit_id = context.base_commit_id

        # First we check to see if we've fetched the file before. If so,
        # it's in there and we can just return that we have it.
        file_cache_key = make_cache_key(
            self._make_file_cache_key(path=path,
                                      revision=revision,
                                      base_commit_id=base_commit_id))

        if file_cache_key in cache:
            exists = True
        else:
            # We didn't have that in the cache, so check from the repository.
            checking_file_exists.send(sender=self,
                                      path=path,
                                      revision=revision,
                                      base_commit_id=base_commit_id,
                                      request=request,
                                      context=context)

            hosting_service = self.hosting_service

            if hosting_service:
                exists = hosting_service.get_file_exists(
                    self,
                    path,
                    revision,
                    base_commit_id=base_commit_id,
                    context=context)
            else:
                tool = self.get_scmtool()
                exists = tool.file_exists(path,
                                          revision,
                                          base_commit_id=base_commit_id,
                                          context=context)

            checked_file_exists.send(sender=self,
                                     path=path,
                                     revision=revision,
                                     base_commit_id=base_commit_id,
                                     request=request,
                                     exists=exists,
                                     context=context)

        return exists
Exemplo n.º 43
0
    def get_commits(self, branch=None, start=None):
        """Return a list of commits.

        This will fetch a batch of commits from the repository for use in the
        API and New Review Request page.

        The resulting commits will be in order from newest to oldest, and
        should return upwards of a fixed number of commits (usually 30, but
        this depends on the type of repository and its limitations). It may
        also be limited to commits that exist on a given branch (if supported
        by the repository).

        This can be called multiple times in succession using the
        :py:attr:`Commit.parent` of the last entry as the ``start`` parameter
        in order to paginate through the history of commits in the repository.

        Args:
            branch (unicode, optional):
                The branch to limit commits to. This may not be supported by
                all repositories.

            start (unicode, optional):
                The commit to start at. If not provided, this will fetch the
                first commit in the repository.

        Returns:
            list of reviewboard.scmtools.core.Commit:
            The retrieved commits.

        Raises:
            reviewboard.hostingsvcs.errors.HostingServiceError:
                The hosting service backing the repository encountered an
                error.

            reviewboard.scmtools.errors.SCMError:
                The repository tool encountered an error.

            NotImplementedError:
                Commits retrieval is not available for this type of repository.
        """
        hosting_service = self.hosting_service

        commits_kwargs = {
            'branch': branch,
            'start': start,
        }

        if hosting_service:
            commits_callable = \
                lambda: hosting_service.get_commits(self, **commits_kwargs)
        else:
            commits_callable = \
                lambda: self.get_scmtool().get_commits(**commits_kwargs)

        # We cache both the entire list for 'start', as well as each individual
        # commit. This allows us to reduce API load when people are looking at
        # the "new review request" page more frequently than they're pushing
        # code, and will usually save 1 API request when they go to actually
        # create a new review request.
        if branch and start:
            cache_period = self.COMMITS_CACHE_PERIOD_LONG
        else:
            cache_period = self.COMMITS_CACHE_PERIOD_SHORT

        cache_key = make_cache_key('repository-commits:%s:%s:%s' %
                                   (self.pk, branch, start))
        commits = cache_memoize(cache_key, commits_callable, cache_period)

        for commit in commits:
            cache.set(self.get_commit_cache_key(commit.id), commit,
                      self.COMMITS_CACHE_PERIOD_LONG)

        return commits
Exemplo n.º 44
0
def get_usage_count(request, increment=False, limit_type=RATE_LIMIT_LOGIN):
    """Return rate limit status for a given user or IP address.

    This method performs validation checks on the input parameters
    and creates the cache key to keep track of the
    number of login attempts made by the user. It saves the new
    cache key and initial number of attempts or updates the
    existing cache key and number of attempts before returning
    the count, limit, and time_left.

    Args:
        request (django.http.HttpRequest):
            The HTTP request from the client.

        increment (bool, optional):
            Whether the number of login attempts should be incremented.

        limit_type (int, optional):
            The type of rate limit to check.

    Returns:
        dict:
        A dictionary with the following keys:

        ``count`` (:py:class:`int`):
            The number of login attempts made.

        ``limit`` (:py:class:`int`):
            The number of attempts allowed.

        ``time_left`` (:py:class:`int`):
            The time left before rate limit is over.
    """
    try:
        try:
            settings_key, default_value, cache_key_prefix = \
                _RATE_LIMIT_DATA[limit_type]
        except KeyError:
            raise ValueError(
                '"limit_type" argument had unexpected value "%s"' % limit_type)

        limit_str = getattr(settings, settings_key, default_value)

        if limit_str is None:
            # If the setting is explicitly None, don't do any rate limiting.
            return None

        rate_limit = Rate.parse(limit_str)
    except ValueError:
        raise ImproperlyConfigured('LOGIN_LIMIT_RATE setting could not '
                                   'be parsed.')

    limit = rate_limit.count
    period = rate_limit.seconds

    # Determine user ID or IP address from HTTP request.
    user_id_or_ip = get_user_id_or_ip(request)

    # Prepare cache key to add or update to cache and determine remaining time
    # period left.
    cache_key = make_cache_key(
        '%s:%d/%d%s%s' %
        (cache_key_prefix, limit, period, user_id_or_ip, _get_window(period)))
    time_left = _get_window(period) - int(time.time())

    count = None

    if increment:
        try:
            count = cache.incr(cache_key)
        except ValueError:
            cache.add(cache_key, 1)

    if count is None:
        count = cache.get(cache_key, 0)

    if not increment:
        # Add one to the returned value, even if we aren't incrementing the
        # stored value. This makes it so that we're consistent in how many
        # tries per period regardless of whether we're incrementing now or
        # later.
        count += 1

    return {
        'count': count,
        'limit': limit,
        'time_left': time_left,
    }
Exemplo n.º 45
0
def get_usage_count(request, increment=False, limit_type=RATE_LIMIT_LOGIN):
    """Return rate limit status for a given user or IP address.

    This method performs validation checks on the input parameters
    and creates the cache key to keep track of the
    number of login attempts made by the user. It saves the new
    cache key and initial number of attempts or updates the
    existing cache key and number of attempts before returning
    the count, limit, and time_left.

    Args:
        request (django.http.HttpRequest):
            The HTTP request from the client.

        increment (bool, optional):
            Whether the number of login attempts should be incremented.

        limit_type (int, optional):
            The type of rate limit to check.

    Returns:
        dict:
        A dictionary with the following keys:

        ``count`` (:py:class:`int`):
            The number of login attempts made.

        ``limit`` (:py:class:`int`):
            The number of attempts allowed.

        ``time_left`` (:py:class:`int`):
            The time left before rate limit is over.
    """
    try:
        try:
            settings_key, default_value, cache_key_prefix = \
                _RATE_LIMIT_DATA[limit_type]
        except KeyError:
            raise ValueError('"limit_type" argument had unexpected value "%s"'
                             % limit_type)

        limit_str = getattr(settings, settings_key, default_value)

        if limit_str is None:
            # If the setting is explicitly None, don't do any rate limiting.
            return None

        rate_limit = Rate.parse(limit_str)
    except ValueError:
        raise ImproperlyConfigured('LOGIN_LIMIT_RATE setting could not '
                                   'be parsed.')

    limit = rate_limit.count
    period = rate_limit.seconds

    # Determine user ID or IP address from HTTP request.
    user_id_or_ip = get_user_id_or_ip(request)

    # Prepare cache key to add or update to cache and determine remaining time
    # period left.
    cache_key = make_cache_key('%s:%d/%d%s%s'
                               % (cache_key_prefix, limit, period,
                                  user_id_or_ip, _get_window(period)))
    time_left = _get_window(period) - int(time.time())

    count = None

    if increment:
        try:
            count = cache.incr(cache_key)
        except ValueError:
            cache.add(cache_key, 1)

    if count is None:
        count = cache.get(cache_key, 0)

    if not increment:
        # Add one to the returned value, even if we aren't incrementing the
        # stored value. This makes it so that we're consistent in how many
        # tries per period regardless of whether we're incrementing now or
        # later.
        count += 1

    return {
        'count': count,
        'limit': limit,
        'time_left': time_left,
    }