def handle(self, *args, **options):
        api = slumber.API(settings.FUM_API_ENDPOINT, auth=_slumber_auth.TokenAuth(settings.FUM_ACCESS_TOKEN))
        cache = get_cache("user_mapping")
        user_cache = get_cache("users")

        c = 1
        while True:
            data = api.users.get(page=c)
            for user in data["results"]:
                if not "username" in user:
                    continue
                username = user["username"]
                email = user["email"]
                user_aliases = ["%s@futu" % username, email]
                cache.set("email-to-username-%s@futu" % username, username, self.KEY_EXPIRE)
                if "email" in user:
                    cache.set("email-to-username-%s" % email, username, self.KEY_EXPIRE)
                    cache.set("username-to-email-%s" % username, email, self.KEY_EXPIRE)
                for email_alias in user.get("email_aliases", []):
                    cache.set("email-to-username-%s" % email_alias, username, self.KEY_EXPIRE)
                    user_aliases.append(email_alias)
                user_cache.set("%s-aliases" % username, user_aliases, self.KEY_EXPIRE)
            c += 1
            if "next" not in data or data["next"] is None:
                break
Beispiel #2
0
def get_cache_backend():
    if hasattr(settings, 'CACHES'):
        if "post_office" in settings.CACHES:
            return get_cache("post_office")
        else:
            return get_cache("default")
    return None
Beispiel #3
0
    def _cache_support(self, expire_time, fragm_name, vary_on, lineno, cache_name, caller):
        try:
            expire_time = int(expire_time)
        except (ValueError, TypeError):
            raise TemplateSyntaxError('"%s" tag got a non-integer timeout '
                'value: %r' % (list(self.tags)[0], expire_time), lineno)

        if cache_name:
            try:
                cache = get_cache(cahe_name)
            except InvalidCacheBackendError:
                raise TemplateSyntaxError('Invalid cache name specified for cache tag: {}'.format(cache_name))
        else:
            try:
                cache = get_cache('template_fragments')
            except InvalidCacheBackendError:
                cache = get_cache('default')

        cache_key = make_template_fragment_key(fragm_name, vary_on)

        value = cache.get(cache_key)
        if value is None:
            value = caller()
            cache.set(cache_key, force_text(value), expire_time)

        return value
Beispiel #4
0
    def test_user_genres_in_recommendation_size_25(self):
        """
        [recommendation.api.GetRecommendation] At least 19 of the top genres in the size 25 recommendation
        """
        get_cache("default").clear()
        LogEntry.objects.all().delete()
        size = 25
        response = \
            self.client.get("/api/v2/recommend/%d/"
                            "00b65a359307654a7deee7c71a7563d2816d6b7e522377a66aaefe8848da5961/" % size)
        user_id = User.get_user_id_by_external_id("00b65a359307654a7deee7c71a7563d2816d6b7e522377a66aaefe8848da5961")

        user_genres = sorted(ItemGenre.genre_in(
            Item.get_item_by_id(item_id) for item_id in User.get_user_items(user_id)
        ).items(), key=lambda x: x[1], reverse=True)
        recommendation_genres = ItemGenre.genre_in(
            Item.get_item_by_external_id(item_eid) for item_eid in json.loads(response.content)["recommendations"]
        )
        measure = []
        for no, (genre, _) in enumerate(user_genres[:int(size)], start=1):
            if genre not in recommendation_genres:
                measure.append(no)
        assert len(measure) < 6, "Major genres failing by index: %s." \
                                 "\nUser %s" \
                                 "\nRecommendation %s" % (
            measure, user_genres, [ItemGenre.genre_in([Item.get_item_by_external_id(item)])
                                   for item in json.loads(response.content)["recommendations"]])
    def handle(self, *args, **options):
        if options["all"]:
            if args:
                raise CommandError("cannot use --all with a cache name")

            args = sorted(settings.CACHES.keys())
        else:
            if not args:
                raise CommandError("specify at least one cache to ping")

            # Make sure all names given exist
            for name in args:
                get_cache(name)

        failed = False
        for name in args:
            cache = get_cache(name)
            try:
                "foo" in cache
            except:  # pylint: disable=bare-except
                result = "... unsuccessful"
                failed = True
            else:
                result = "... successful"
            self.stdout.write("Pinging " + name + result)

        if failed:
            raise CommandError("ping failed")
Beispiel #6
0
    def test_valid_urls(self):
        test_data = [
            "example.com",
            "example.com:80",
            "example.com:80/foo",
            "http://example.com",
            "http://example.com/foo",
            "http://example.com:80",
            "http://example.com:80/foo",
            "https://example.com",
            "https://example.com/foo",
            "https://example.com:80",
            "https://example.com:80/foo",
            "ftp://example.com",
            "about:mozilla",
            "chrome://foo",
        ]
        for url in test_data:
            data = {
                "happy": True,
                "channel": u"stable",
                "version": u"1.1",
                "description": u"Great!",
                "product": u"Firefox OS",
                "platform": u"Firefox OS",
                "url": url,
                "locale": "en-US",
            }

            r = self.client.post(reverse("feedback-api"), content_type="application/json", data=json.dumps(data))
            eq_(r.status_code, 201, msg=("%s != 201 (%s)" % (r.status_code, url)))

            get_cache("default").clear()
def create_modulestore_instance(engine, options):
    """
    This will return a new instance of a modulestore given an engine and options
    """
    class_ = load_function(engine)

    _options = {}
    _options.update(options)

    for key in FUNCTION_KEYS:
        if key in _options and isinstance(_options[key], basestring):
            _options[key] = load_function(_options[key])

    if HAS_REQUEST_CACHE:
        request_cache = RequestCache.get_request_cache()
    else:
        request_cache = None

    try:
        metadata_inheritance_cache = get_cache('mongo_metadata_inheritance')
    except InvalidCacheBackendError:
        metadata_inheritance_cache = get_cache('default')

    return class_(
        metadata_inheritance_cache_subsystem=metadata_inheritance_cache,
        request_cache=request_cache,
        modulestore_update_signal=Signal(providing_args=['modulestore', 'course_id', 'location']),
        **_options
    )
Beispiel #8
0
    def wrapper(request, **kwargs) :
        if 'HTTP_DEVICE_ID' not in request.META :
            result = dict(success=False, message=u'device id is not in the request header')
            return HttpResponse(dumps(result))
        if 'HTTP_SERVER_KEY' not in request.META :
            result = dict(success=False, message=u'server key is not in the request header')
            return HttpResponse(dumps(result))

        device_id = request.META['HTTP_DEVICE_ID']
        server_key = request.META['HTTP_SERVER_KEY']
        key_cache = get_cache('user_key')
        saved_server_key = key_cache.get(device_id)
        
        if server_key != saved_server_key :
            result = dict(success=False, message=u'server key does not match')
            return HttpResponse(dumps(result))
        
        user_session = get_cache('user_session')
        user_info = user_session.get(server_key)
        if not user_info :
            result = dict(success=False, message=u'need login')
            return HttpResponse(dumps(result))
        user_info = loads(user_info)
        if not user_info['is_admin'] :
            result = dict(success=False, message=u'you are not an admin')
            return HttpResponse(dumps(result))
        return f(request, **kwargs)
Beispiel #9
0
    def test_non_default_cache(self):
        # Re-initalize the session backend to make use of overridden settings.
        self.session = self.backend()

        self.session.save()
        self.assertEqual(get_cache('default').get(self.session.cache_key), None)
        self.assertNotEqual(get_cache('sessions').get(self.session.cache_key), None)
    def handle(self, *args, **options):
        if options["all"]:
            if args:
                raise CommandError("cannot use --all with a cache name")

            args = settings.CACHES.keys()
        else:
            if not args:
                raise CommandError("specify at least one cache to clear")

            # Make sure all names given exist
            for name in args:
                get_cache(name)

        method = options["method"]
        noop = options["noop"]

        action = "Clearing " if not noop else "Not clearing "

        if method == "conservative":
            for name in args:
                config = settings.CACHES[name]
                backend = config["BACKEND"]
                failed = False
                if backend.startswith("django_redis."):
                    try:
                        import django_redis
                        django_redis.get_redis_connection(name)
                        # Yes, we grab all exceptions. It is up to the
                        # user to diagnose how their configuration is
                        # wrong.
                    except:  # pylint: disable=bare-except
                        failed = True
                else:
                    failed = True

                if failed:
                    raise CommandError(
                        "clearcache does not know how to "
                        "conservatively clear a "
                        "cache with backend {0}".format(backend))

            for name in args:
                self.stdout.write(action + name)
                config = settings.CACHES[name]
                cache = get_cache(name)
                prefix = cache.key_prefix
                backend = config["BACKEND"]
                if backend.startswith("django_redis."):
                    import django_redis
                    con = django_redis.get_redis_connection(name)
                    keys = con.keys(prefix + ':*')
                    if keys and not noop:
                        con.delete(*keys)
        else:
            for name in args:
                self.stdout.write(action + name)
                cache = get_cache(name)
                if not noop:
                    cache.clear()
Beispiel #11
0
def get_avatar_image(user, size):
    """
    Returns avatar image from cache (if available) or downloads it.
    """

    cache_key = u'avatar-img-{0}-{1}'.format(
        user.username,
        size
    )

    # Try using avatar specific cache if available
    try:
        cache = get_cache('avatar')
    except InvalidCacheBackendError:
        cache = get_cache('default')

    image = cache.get(cache_key)
    if image is None:
        try:
            image = download_avatar_image(user, size)
            cache.set(cache_key, image)
        except IOError as error:
            weblate.logger.error(
                'Failed to fetch avatar for %s: %s',
                user.username,
                str(error)
            )
            fallback = os.path.join(
                appsettings.WEB_ROOT,
                'media/weblate-{0}.png'.format(size)
            )
            with open(fallback, 'r') as handle:
                return handle.read()

    return image
Beispiel #12
0
def get_avatar_image(user, size):
    """
    Returns avatar image from cache (if available) or downloads it.
    """

    cache_key = u'avatar-img-{0}-{1}'.format(
        user.username,
        size
    )

    # Try using avatar specific cache if available
    try:
        cache = get_cache('avatar')
    except InvalidCacheBackendError:
        cache = get_cache('default')

    image = cache.get(cache_key)
    if image is None:
        try:
            image = download_avatar_image(user, size)
            cache.set(cache_key, image)
        except IOError as error:
            weblate.logger.error(
                'Failed to fetch avatar for %s: %s',
                user.username,
                str(error)
            )
            return get_fallback_avatar(size)

    return image
Beispiel #13
0
 def setUp(self):
     self.dirname = tempfile.mkdtemp()
     self.cache = get_cache('file://%s?max_entries=30' % self.dirname)
     self.prefix_cache = get_cache('file://%s' % self.dirname, key_prefix='cacheprefix')
     self.v2_cache = get_cache('file://%s' % self.dirname, version=2)
     self.custom_key_cache = get_cache('file://%s' % self.dirname, key_func=custom_key_func)
     self.custom_key_cache2 = get_cache('file://%s' % self.dirname, key_func='regressiontests.cache.tests.custom_key_func')
Beispiel #14
0
def create_modulestore_instance(engine, doc_store_config, options, i18n_service=None):
    """
    This will return a new instance of a modulestore given an engine and options
    """
    class_ = load_function(engine)

    _options = {}
    _options.update(options)

    for key in FUNCTION_KEYS:
        if key in _options and isinstance(_options[key], basestring):
            _options[key] = load_function(_options[key])

    if HAS_REQUEST_CACHE:
        request_cache = RequestCache.get_request_cache()
    else:
        request_cache = None

    try:
        metadata_inheritance_cache = get_cache('mongo_metadata_inheritance')
    except InvalidCacheBackendError:
        metadata_inheritance_cache = get_cache('default')

    return class_(
        metadata_inheritance_cache_subsystem=metadata_inheritance_cache,
        request_cache=request_cache,
        modulestore_update_signal=Signal(providing_args=['modulestore', 'course_id', 'location']),
        xblock_mixins=getattr(settings, 'XBLOCK_MIXINS', ()),
        xblock_select=getattr(settings, 'XBLOCK_SELECT_FUNCTION', None),
        doc_store_config=doc_store_config,
        i18n_service=i18n_service or ModuleI18nService(),
        **_options
    )
Beispiel #15
0
def create_modulestore_instance(
    engine,
    content_store,
    doc_store_config,
    options,
    i18n_service=None,
    fs_service=None,
    user_service=None,
    signal_handler=None,
):
    """
    This will return a new instance of a modulestore given an engine and options
    """
    class_ = load_function(engine)

    _options = {}
    _options.update(options)

    FUNCTION_KEYS = ["render_template"]
    for key in FUNCTION_KEYS:
        if key in _options and isinstance(_options[key], basestring):
            _options[key] = load_function(_options[key])

    if HAS_REQUEST_CACHE:
        request_cache = RequestCache.get_request_cache()
    else:
        request_cache = None

    try:
        metadata_inheritance_cache = get_cache("mongo_metadata_inheritance")
    except InvalidCacheBackendError:
        metadata_inheritance_cache = get_cache("default")

    if issubclass(class_, MixedModuleStore):
        _options["create_modulestore_instance"] = create_modulestore_instance

    if issubclass(class_, BranchSettingMixin):
        _options["branch_setting_func"] = _get_modulestore_branch_setting

    if HAS_USER_SERVICE and not user_service:
        xb_user_service = DjangoXBlockUserService(get_current_user())
    else:
        xb_user_service = None

    if "read_preference" in doc_store_config:
        doc_store_config["read_preference"] = getattr(ReadPreference, doc_store_config["read_preference"])

    return class_(
        contentstore=content_store,
        metadata_inheritance_cache_subsystem=metadata_inheritance_cache,
        request_cache=request_cache,
        xblock_mixins=getattr(settings, "XBLOCK_MIXINS", ()),
        xblock_select=getattr(settings, "XBLOCK_SELECT_FUNCTION", None),
        doc_store_config=doc_store_config,
        i18n_service=i18n_service or ModuleI18nService(),
        fs_service=fs_service or xblock.reference.plugins.FSService(),
        user_service=user_service or xb_user_service,
        signal_handler=signal_handler or SignalHandler(class_),
        **_options
    )
    def instrument_course_progress_render(self, course_width, enable_ccx, queries, reads, xblocks):
        """
        Renders the progress page, instrumenting Mongo reads and SQL queries.
        """
        self.setup_course(course_width, enable_ccx)

        # Switch to published-only mode to simulate the LMS
        with self.settings(MODULESTORE_BRANCH='published-only'):
            # Clear all caches before measuring
            for cache in settings.CACHES:
                get_cache(cache).clear()

            # Refill the metadata inheritance cache
            modulestore().get_course(self.course.id, depth=None)

            # We clear the request cache to simulate a new request in the LMS.
            RequestCache.clear_request_cache()

            # Reset the list of provider classes, so that our django settings changes
            # can actually take affect.
            OverrideFieldData.provider_classes = None

            with self.assertNumQueries(queries):
                with check_mongo_calls(reads):
                    with check_sum_of_calls(XBlock, ['__init__'], xblocks, xblocks, include_arguments=False):
                        self.grade_course(self.course)
Beispiel #17
0
    def wrapper(request, user_id, **kwargs) :
        if 'HTTP_DEVICE_ID' not in request.META :
            result = dict(success=False, message=u'device id is not in the request header')
            return HttpResponse(dumps(result))
        if 'HTTP_SERVER_KEY' not in request.META :
            result = dict(success=False, message=u'server key is not in the request header')
            return HttpResponse(dumps(result))

        device_id = request.META['HTTP_DEVICE_ID']
        server_key = request.META['HTTP_SERVER_KEY']
        key_cache = get_cache('user_key')
        saved_server_key = key_cache.get(device_id)
        
        if server_key != saved_server_key :
            result = dict(success=False, message=u'server key does not match')
            return HttpResponse(dumps(result))

        user_session = get_cache('user_session')
        user_info = user_session.get(server_key)
        if not user_info :
            result = dict(success=False, message=u'need login')
            return HttpResponse(dumps(result))

        saved_user_id = get_user_id(request.META)
        user_id = int(user_id)
        if user_id != saved_user_id :
            result = dict(success=False, message=u'user id does not match with saved user id')
            return HttpResponse(dumps(result))
        return f(request, user_id=user_id, **kwargs)
Beispiel #18
0
    def test_valid_urls(self):
        test_data = [
            'example.com',
            'example.com:80',
            'example.com:80/foo',
            'http://example.com',
            'http://example.com/foo',
            'http://example.com:80',
            'http://example.com:80/foo',
            'https://example.com',
            'https://example.com/foo',
            'https://example.com:80',
            'https://example.com:80/foo',
            'ftp://example.com',
            'about:mozilla',
            'chrome://foo'
        ]
        for url in test_data:
            data = {
                'happy': True,
                'channel': u'stable',
                'version': u'1.1',
                'description': u'Great!',
                'product': u'Firefox OS',
                'platform': u'Firefox OS',
                'url': url,
                'locale': 'en-US',
            }

            r = self.client.post(reverse('feedback-api'), data)
            eq_(r.status_code, 201,
                msg=('%s != 201 (%s)' % (r.status_code, url)))

            get_cache('default').clear()
 def test_recording_get_cache(self):
     self.assertEqual(len(self.panel.calls), 0)
     default_cache = cache.get_cache(cache.DEFAULT_CACHE_ALIAS)
     second_cache = cache.get_cache('second')
     default_cache.set('foo', 'bar')
     second_cache.get('foo')
     self.assertEqual(len(self.panel.calls), 2)
Beispiel #20
0
    def get_most_federated_entities(self, maxlength=TOP_LENGTH, cache_expire=None):
        entities = None
        if cache_expire:
            cache = get_cache("default")
            entities = cache.get("most_federated_entities")

        if not entities or len(entities) < maxlength:
            # Entities with count how many federations belongs to, and sorted by most first
            ob_entities = Entity.objects.all().annotate(federationslength=Count("federations")).order_by("-federationslength")
            ob_entities = ob_entities.prefetch_related('types', 'federations')
            ob_entities = ob_entities[:maxlength]

            entities = []
            for entity in ob_entities:
                entities.append({
                    'entityid': entity.entityid,
                    'name': entity.name,
                    'absolute_url': entity.get_absolute_url(),
                    'types': [unicode(item) for item in entity.types.all()],
                    'federations': [(unicode(item.name), item.get_absolute_url()) for item in entity.federations.all()],
                })

        if cache_expire:
            cache = get_cache("default")
            cache.set("most_federated_entities", entities, cache_expire)

        return entities[:maxlength]
Beispiel #21
0
 def test_multiple_connection_pool_connections(self):
     pool._connection_pools = {}
     c1 = get_cache('redis_cache.cache://127.0.0.1:6379?db=15')
     self.assertEqual(len(pool._connection_pools), 1)
     c2 = get_cache('redis_cache.cache://127.0.0.1:6379?db=14')
     self.assertEqual(len(pool._connection_pools), 2)
     c3 = get_cache('redis_cache.cache://127.0.0.1:6379?db=15')
     self.assertEqual(len(pool._connection_pools), 2)
Beispiel #22
0
 def teardown_class(cls, *args, **kwargs):
     """
     Take elements from db
     """
     Item.objects.all().delete()
     User.objects.all().delete()
     Matrix.objects.all().delete()
     get_cache("default").clear()
Beispiel #23
0
 def test_multiple_connection_pool_connections(self):
     pool._connection_pools = {}
     c1 = get_cache('redis_cache.cache://%s:%s?db=15' % (server.host, server.port))
     self.assertEqual(len(pool._connection_pools), 1)
     c2 = get_cache('redis_cache.cache://%s:%s?db=14' % (server.host, server.port))
     self.assertEqual(len(pool._connection_pools), 2)
     c3 = get_cache('redis_cache.cache://%s:%s?db=15' % (server.host, server.port))
     self.assertEqual(len(pool._connection_pools), 2)
Beispiel #24
0
 def get_cache(self, backend=None):
     if VERSION[0] == 1 and VERSION[1] < 3:
         cache = get_cache(backend or 'redis_cache.cache://127.0.0.1:6379?db=15')
     elif VERSION[0] == 1 and VERSION[1] >= 3 and VERSION[1] <= 7:
         cache = get_cache(backend or 'default')
     else:
         cache = get_cache(backend or 'redis_cache.cache.CacheClass', LOCATION='127.0.0.1:6379')
     return cache
Beispiel #25
0
def get_pages_cache():
    try:
        return get_cache('pages')
    except:
        try:
            return get_cache('default')
        except:
            return None
Beispiel #26
0
 def setUp(self):
     # Spaces are used in the table name to ensure quoting/escaping is working
     self._table_name = 'test cache table'
     management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False)
     self.cache = get_cache('db://%s?max_entries=30' % self._table_name)
     self.prefix_cache = get_cache('db://%s' % self._table_name, key_prefix='cacheprefix')
     self.v2_cache = get_cache('db://%s' % self._table_name, version=2)
     self.custom_key_cache = get_cache('db://%s' % self._table_name, key_func=custom_key_func)
     self.custom_key_cache2 = get_cache('db://%s' % self._table_name, key_func='regressiontests.cache.tests.custom_key_func')
Beispiel #27
0
 def clearCaches(self):
     # flush everything on redis
     import redis
     self.redis_client = redis.StrictRedis(host=settings.REDIS_CONFIGURATION["host"],
                                           port=settings.REDIS_CONFIGURATION["port"],
                                           db=settings.REDIS_CONFIGURATION["db"])
     self.redis_client.flushall()
     # To be safe, also clear django cache
     get_cache("default").clear()
Beispiel #28
0
    def test_non_default_cache(self):
        # The session backend initiated in setUp() of SessionTestsMixin
        # does not make use of the overridden settings. Hence, we initate
        # it here aagain.
        session = self.backend()

        session.save()
        self.assertEqual(get_cache('default').get(session.cache_key), None)
        self.assertNotEqual(get_cache('sessions').get(session.cache_key), None)
Beispiel #29
0
    def setUp(self):
        self.client = APIClient()
        self.profile = ProfileFactory()

        client_oauth2 = create_oauth2_client(self.profile.user)
        self.client_authenticated = APIClient()
        authenticate_client(self.client_authenticated, client_oauth2, self.profile.user.username, 'hostel77')

        get_cache(extensions_api_settings.DEFAULT_USE_CACHE).clear()
 def setUp(self):
     self.testbed = testbed.Testbed()
     self.testbed.activate()
     self.testbed.init_memcache_stub()
     self.cache = cache
     random_prefix = ''.join(random.choice(string.ascii_letters) for x in range(10))
     self.prefix_cache = get_cache(self.backend_name, KEY_PREFIX=random_prefix)
     self.v2_cache = cache
     self.custom_key_cache = get_cache(self.backend_name, KEY_FUNCTION=custom_key_func)
 def test_create_cache(self):
     response = self.client.get('/test/')
     key = custom_caching.generate_cache_key('/test/', ['anon'])
     filecache = cache.get_cache('filecache')
     cached = filecache.get(key)
     self.assertTrue(cached)
Beispiel #32
0
from django.core.cache import get_cache
from django.conf import settings
from django.utils import importlib
from django.core.exceptions import ImproperlyConfigured
from rosetta.conf import settings as rosetta_settings
import hashlib
import time
import six
import django


cache = get_cache(rosetta_settings.ROSETTA_CACHE_NAME)


class BaseRosettaStorage(object):
    def __init__(self, request):
        self.request = request

    def get(self, key, default=None):
        raise NotImplementedError

    def set(self, key, val):
        raise NotImplementedError

    def has(self, key):
        raise NotImplementedError

    def delete(self, key):
        raise NotImplementedError

These overwrite
- user
- browser
- username
- emulate_legacy
- first_name
- last_name
"""

from django.contrib.auth.models import User as DjangoUser
from login_frontend.models import Browser
from django.utils.functional import SimpleLazyObject
from django_statsd.clients import statsd as sd
from django.core.cache import get_cache

dcache = get_cache("default")

__all__ = ["add_static_timestamp", "add_browser", "add_user", "session_info"]


@sd.timer("login_frontend.context_processors.add_static_timestamp")
def add_static_timestamp(request):
    """ Adds unique number used for static files. """
    #TODO: determine automatically
    return {"static_timestamp": 1}


@sd.timer("login_frontend.context_processors.add_browser")
def add_browser(request):
    """ Adds "browser" to context, if available. """
    ret = {}
Beispiel #34
0
 def clear_cache(self):
     cache_name = getattr(settings, "SOLO_CACHE", settings.SOLO_CACHE)
     if cache_name:
         cache = get_cache(cache_name)
         cache_key = self.get_cache_key()
         cache.delete(cache_key)
Beispiel #35
0
from casexml.apps.case.xml import V2
from corehq.apps.callcenter.indicator_sets import AAROHI_MOTHER_FORM, CallCenterIndicators, \
    cache_key, CachedIndicators
from corehq.apps.callcenter.utils import sync_call_center_user_case
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.callcenter.tests.sql_fixture import load_data, load_custom_data, clear_data
from corehq.apps.groups.models import Group
from corehq.apps.hqcase.utils import submit_case_blocks, get_case_by_domain_hq_user_id
from corehq.apps.users.models import CommCareUser
from django.test import TestCase

from django.core import cache

CASE_TYPE = 'cc_flw'

locmem_cache = cache.get_cache('django.core.cache.backends.locmem.LocMemCache')


def create_domain_and_user(domain_name, username):
    domain = create_domain(domain_name)
    user = CommCareUser.create(domain_name, username, '***')

    domain.call_center_config.enabled = True
    domain.call_center_config.case_owner_id = user.user_id
    domain.call_center_config.case_type = CASE_TYPE
    domain.save()

    sync_call_center_user_case(user)
    return domain, user

Beispiel #36
0
 def test_default_cache(self):
     self.session.save()
     self.assertNotEqual(
         get_cache('default').get(self.session.cache_key), None)
Beispiel #37
0
def is_ratelimited(request,
                   group=None,
                   fn=None,
                   key=None,
                   rate=None,
                   method=ALL,
                   increment=False):
    if not key:
        raise ImproperlyConfigured('Ratelimit key must be specified')
    if group is None:
        if hasattr(fn, '__self__'):
            parts = fn.__module__, fn.__self__.__class__.__name__, fn.__name__
        else:
            parts = (fn.__module__, fn.__name__)
        group = '.'.join(parts)

    if not getattr(settings, 'RATELIMIT_ENABLE', True):
        request.limited = False
        return False

    if not _method_match(request, method):
        return False

    old_limited = getattr(request, 'limited', False)

    if callable(rate):
        rate = rate(group, request)

    if rate is None:
        request.limited = old_limited
        return False

    limit, period = _split_rate(rate)

    cache_name = getattr(settings, 'RATELIMIT_USE_CACHE', 'default')
    cache = get_cache(cache_name)

    if callable(key):
        value = key(group, request)
    elif key in _SIMPLE_KEYS:
        value = _SIMPLE_KEYS[key](request)
    elif ':' in key:
        accessor, k = key.split(':', 1)
        if accessor not in _ACCESSOR_KEYS:
            raise ImproperlyConfigured('Unknown ratelimit key: %s' % key)
        value = _ACCESSOR_KEYS[accessor](request, k)
    elif '.' in key:
        mod, attr = key.rsplit('.', 1)
        keyfn = getattr(import_module(mod), attr)
        value = keyfn(group, request)
    else:
        raise ImproperlyConfigured('Could not understand ratelimit key: %s' %
                                   key)

    cache_key = _make_cache_key(group, rate, value, method)
    cache.add(cache_key, 0)
    if increment:
        count = cache.incr(cache_key)
    else:
        count = cache.get(cache_key)
    limited = count > limit
    if increment:
        request.limited = old_limited or limited
    return limited
Beispiel #38
0
from django.core.files.base import ContentFile
from django.core.cache import get_cache

from .ActivityManager import ActivityManager
from ..models import Verb, Statement, StatementRef, StatementAttachment, SubStatement, Agent 

att_cache = get_cache('attachment_cache')

class StatementManager():
    def __init__(self, stmt_data, auth_info):
        # auth_info contains define, endpoint, user, and request authority
        if self.__class__.__name__ == 'StatementManager':
            # Full statement is for a statement only, same with authority
            self.set_authority(auth_info, stmt_data)
        self.populate(auth_info, stmt_data)

    def set_authority(self, auth_info, stmt_data):
        # Could still have no authority in stmt if HTTP_AUTH and OAUTH are disabled
        # Have to set auth in kwarg dict for Agent auth object to be saved in statement
        # Also have to save auth in full_statement kwargs for when returning exact statements
        # Set object auth as well for when creating other objects in a substatement
        if auth_info['agent']:
            stmt_data['authority'] = auth_info['agent']
            stmt_data['full_statement']['authority'] = auth_info['agent'].to_dict()
        # If no auth in request, look in statement
        else:
            # If authority is given in statement
            if 'authority' in stmt_data:
                auth_info['agent'] = stmt_data['authority'] = Agent.objects.retrieve_or_create(**stmt_data['full_statement']['authority'])[0]
            # Empty auth in request or statement
            else:
Beispiel #39
0
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """
        # Don't import django.core.management if it isn't needed.
        from django.core.management import call_command

        test_database_name = self._get_test_db_name()

        if verbosity >= 1:
            test_db_repr = ''
            if verbosity >= 2:
                test_db_repr = " ('%s')" % test_database_name
            print "Creating test database for alias '%s'%s..." % (
                self.connection.alias, test_db_repr)

        self._create_test_db(verbosity, autoclobber)

        self.connection.close()
        self.connection.settings_dict["NAME"] = test_database_name

        # Confirm the feature set of the test database
        self.connection.features.confirm()

        # Report syncdb messages at one level lower than that requested.
        # This ensures we don't get flooded with messages during testing
        # (unless you really ask to be flooded)
        call_command('syncdb',
                     verbosity=max(verbosity - 1, 0),
                     interactive=False,
                     database=self.connection.alias,
                     load_initial_data=False)

        # We need to then do a flush to ensure that any data installed by
        # custom SQL has been removed. The only test data should come from
        # test fixtures, or autogenerated from post_syncdb triggers.
        # This has the side effect of loading initial data (which was
        # intentionally skipped in the syncdb).
        call_command('flush',
                     verbosity=max(verbosity - 1, 0),
                     interactive=False,
                     database=self.connection.alias)

        from django.core.cache import get_cache
        from django.core.cache.backends.db import BaseDatabaseCache
        for cache_alias in settings.CACHES:
            cache = get_cache(cache_alias)
            if isinstance(cache, BaseDatabaseCache):
                from django.db import router
                if router.allow_syncdb(self.connection.alias,
                                       cache.cache_model_class):
                    call_command('createcachetable',
                                 cache._table,
                                 database=self.connection.alias)

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
Beispiel #40
0
# distributedlock settings
import distributedlock
from django.core.cache import get_cache

distributedlock.DEFAULT_TIMEOUT = 60 * 5
distributedlock.DEFAULT_MEMCACHED_CLIENT = get_cache('default')

# import after distributedlock settings
from distributedlock import distributedlock, LockNotAcquiredError
Beispiel #41
0
# -*- coding: utf-8 -*-
import math

from django.core.cache import get_cache

from locations.models import Location
from places_core.helpers import round_to_ten

from models import MapPointer

cache = get_cache('default')


def round_to(n, precision):
    """ Round floating point number to selected precision.
    """
    correction = 0.5 if n >= 0 else -0.5
    return int(n / precision + correction) * precision


def round_to_05(n):
    """ Round float to nearest 0.5.
    """
    return round_to(n, 0.5)


def get_boundaries(ne, sw):
    """ A little helper to decipher GET params into tuples.
    """
    return {
        'ne': tuple(float(x) for x in ne.split('x')),
Beispiel #42
0
def write_redis_for_log(task_id, log, error=False):
    cache = get_cache('deploy',
                      **{'LOCATION': CACHES['deploy']['LOCATION'] + '2'})
    i2(cache, task_id, log)
Beispiel #43
0
def pre_delete_uncache(sender, instance, **kwargs):
    "General post-delete handler for removing cache for model instances."
    cache = get_cache(settings.DATA_CACHE)
    cache.delete(instance_cache_key(instance))
Beispiel #44
0
from django.core.cache import get_cache
from django.core.cache.backends.base import InvalidCacheBackendError

try:
    cache = get_cache('debug-panel')
except InvalidCacheBackendError:
    from django.core.cache import cache
Beispiel #45
0
 def __init__(self):
     self.no_cache_found = False
     try:
         self.cache = get_cache('course_structure_cache')
     except InvalidCacheBackendError:
         self.no_cache_found = True
        from .models import StateLog
        StateLog.pending_objects.commit_for_object(instance)


class SimpleBackend(BaseBackend):

    @staticmethod
    def setup_model(model):
        pass

    @staticmethod
    def pre_transition_callback(sender, **kwargs):
        pass

    @staticmethod
    def post_transition_callback(sender, instance, name, source, target, **kwargs):
        from .models import StateLog
        return _pre_transition_callback(sender, instance, name, source, target, StateLog.objects, **kwargs)


if settings.DJANGO_FSM_LOG_STORAGE_METHOD == 'django_fsm_log.backends.CachedBackend':
    try:
        from django.core.cache import caches
    except ImportError:
        from django.core.cache import get_cache  # Deprecated, removed in 1.9.
        cache = get_cache(settings.DJANGO_FSM_LOG_CACHE_BACKEND)
    else:
        cache = caches[settings.DJANGO_FSM_LOG_CACHE_BACKEND]
else:
    cache = None
Beispiel #47
0
 def test_close(self):
     cache = get_cache('default')
     cache.set("f", "1")
     cache.close()
Beispiel #48
0
 def get_cache(self, backend=None):
     if VERSION[0] == 1 and VERSION[1] < 3:
         cache = get_cache(backend or 'redis_cache.cache://127.0.0.1:6379?db=15')
     elif VERSION[0] == 1 and VERSION[1] >= 3:
         cache = get_cache(backend or 'default')
     return cache
Beispiel #49
0
 def setUp(self):
     self._orig_setting = redis_cache.cache.DJANGO_REDIS_IGNORE_EXCEPTIONS
     redis_cache.cache.DJANGO_REDIS_IGNORE_EXCEPTIONS = True
     self.cache = get_cache('doesnotexist')
Beispiel #50
0
 def get_cache(self):
     try:
         return cache.get_cache(settings.REPORT_CACHE)
     except ValueError:
         return cache.cache
Beispiel #51
0
 def cache(self):
     if not getattr(self, '_cache', None):
         from django.conf import settings
         self._cache = get_cache(settings.IMAGEKIT_CACHE_BACKEND)
     return self._cache
Beispiel #52
0
"""
Django Model baseclass for database-backed configuration.
"""
from django.db import models
from django.contrib.auth.models import User
from django.core.cache import get_cache, InvalidCacheBackendError

try:
    cache = get_cache('configuration')  # pylint: disable=invalid-name
except InvalidCacheBackendError:
    from django.core.cache import cache


class ConfigurationModel(models.Model):
    """
    Abstract base class for model-based configuration

    Properties:
        cache_timeout (int): The number of seconds that this configuration
            should be cached
    """
    class Meta(object):  # pylint: disable=missing-docstring
        abstract = True

    # The number of seconds
    cache_timeout = 600

    change_date = models.DateTimeField(auto_now_add=True)
    changed_by = models.ForeignKey(User,
                                   editable=False,
                                   null=True,
Beispiel #53
0
 def __init__(self):
     self.cache_alias = getattr(settings, 'CACHEBACK_CACHE_ALIAS',
                                DEFAULT_CACHE_ALIAS)
     self.cache = get_cache(self.cache_alias)
Beispiel #54
0
def home(request):

    try:
        cache = get_cache('default')

        if not cache.get('flickr_main'):
            # from flickr photos get one tagged "main"(should have only one)
            main = flickr.api.walk(user_id=flickr.user_id,
                                   tags="main",
                                   tag_mode='all',
                                   sort="date-posted-desc")

            cache.set('flickr_main', list(iter(main)), 3600)

        main = cache.get('flickr_main')

        if not cache.get('flickr_favorites'):
            # from flickr get all photo elements tagged "favorite"
            favorites = flickr.api.walk(user_id=flickr.user_id,
                                        tags="favorite, -main",
                                        tag_mode='all',
                                        sort="date-posted-desc")
            cache.set('flickr_favorites', list(iter(favorites)), 3600)

        favorites = cache.get('flickr_favorites')

        images = []
        sizes = ['496x374', '296x224', '296x146', '194x146', '194x224']

        main_photo = main[0]
        images.append((flickr.get_url(main_photo,
                                      'b'), sizes[0], main_photo.get('title')))

        sizes = sizes[1:]

        j = 0
        # create an image file from every favorite
        for i, favorite in enumerate(favorites):

            if main_photo.get('id') != favorite.get('id'):
                images.append((flickr.get_url(favorite,
                                              'b'), sizes[j % len(sizes)],
                               favorite.get('title')))
                j += 1
    except Exception as e:
        import traceback
        traceback.print_exc(e)
        images = []

    # get recent blog posts
    recent = Post.objects.filter(is_active=True).order_by('-created_on')[:5]

    # get upcoming events
    upcoming = Event.objects.filter(is_active=True,
                                    date__gte=datetime.today())[:5]

    videos = Video.objects.filter(is_active=True)[:3]

    context = dict(images=images,
                   recent=recent,
                   upcoming=upcoming,
                   videos=videos)
    return render(request, 'public/home.html', context)
Beispiel #55
0
#coding:utf-8
import logging
import md5
try:
    from collections import OrderedDict
except ImportError:
    from ordereddict import OrderedDict

from time import time
from django.core.cache import get_cache

logger = logging.getLogger(__name__)

try:
    cache = get_cache('redis')
except ImportError as e:
    logger.warn(u'加载redis时出错:[%s], 改为内存缓存', e)
    cache = get_cache('default')


def cache_decorator(expiration=3 * 60):
    def wrapper(func):
        def news(*args, **kwargs):
            unique_str = repr((func, args, kwargs))
            m = md5.new(unique_str)
            key = m.hexdigest()
            value = cache.get(key)
            if value:
                return value
            else:
                value = func(*args, **kwargs)