コード例 #1
0
def test_sqla_cache(app, db, blueprint, request):
    cache = Cache(app)

    class OAuth(OAuthConsumerMixin, db.Model):
        pass

    blueprint.storage = SQLAlchemyStorage(OAuth, db.session, cache=cache)

    db.create_all()

    def done():
        db.session.remove()
        db.drop_all()

    request.addfinalizer(done)

    with record_queries(db.engine) as queries:
        with app.test_client() as client:
            # reset the session before the request
            with client.session_transaction() as sess:
                sess["test-service_oauth_state"] = "random-string"
            # make the request
            resp = client.get(
                "/login/test-service/authorized?code=secret-code&state=random-string",
                base_url="https://a.b.c",
            )
            # check that we redirected the client
            assert resp.status_code == 302
            assert resp.headers["Location"] == "https://a.b.c/oauth_done"

    assert len(queries) == 2

    expected_token = {"access_token": "foobar", "token_type": "bearer", "scope": [""]}

    # check the database
    authorizations = OAuth.query.all()
    assert len(authorizations) == 1
    oauth = authorizations[0]
    assert oauth.provider == "test-service"
    assert isinstance(oauth.token, dict)
    assert oauth.token == expected_token

    # cache should be invalidated
    assert cache.get("flask_dance_token|test-service|None") is None

    # first reference to the token should generate SQL queries
    with record_queries(db.engine) as queries:
        assert blueprint.token == expected_token
    assert len(queries) == 1

    # should now be in the cache
    assert cache.get("flask_dance_token|test-service|None") == expected_token

    # subsequent references should not generate SQL queries
    with record_queries(db.engine) as queries:
        assert blueprint.token == expected_token
    assert len(queries) == 0
コード例 #2
0
ファイル: test_cache.py プロジェクト: ktosiu/flask-caching
 def test_21_redis_url_custom_db(self):
     config = {
         'CACHE_TYPE': 'redis',
         'CACHE_REDIS_URL': 'redis://localhost:6379/2',
     }
     cache = Cache()
     cache.init_app(self.app, config=config)
     rconn = self.app.extensions['cache'][cache] \
                 ._client.connection_pool.get_connection('foo')
     assert rconn.db == 2
コード例 #3
0
ファイル: test_cache.py プロジェクト: sh4nks/flask-caching
def test_cache_delete_many_ignored(app):
    cache = Cache(config={"CACHE_TYPE": "simple", "CACHE_IGNORE_ERRORS": True})
    cache.init_app(app)

    cache.set("hi", "hello")
    assert cache.get("hi") == "hello"
    cache.delete_many("ho", "hi")
    assert cache.get("hi") is None
コード例 #4
0
ファイル: test_cache.py プロジェクト: ktosiu/flask-caching
 def test_20_redis_url_default_db(self):
     config = {
         'CACHE_TYPE': 'redis',
         'CACHE_REDIS_URL': 'redis://localhost:6379',
     }
     cache = Cache()
     cache.init_app(self.app, config=config)
     from werkzeug.contrib.cache import RedisCache
     assert isinstance(self.app.extensions['cache'][cache], RedisCache)
     rconn = self.app.extensions['cache'][cache] \
                 ._client.connection_pool.get_connection('foo')
     assert rconn.db == 0
コード例 #5
0
ファイル: test_cache.py プロジェクト: ktosiu/flask-caching
    def test_06a_memoize(self):
        self.app.config['CACHE_DEFAULT_TIMEOUT'] = 1
        self.cache = Cache(self.app)

        with self.app.test_request_context():
            @self.cache.memoize(50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 100000)

            result = big_foo(5, 2)

            time.sleep(2)

            assert big_foo(5, 2) == result
コード例 #6
0
 def set(self, *args, **kwargs):
     "Proxy function for internal cache object."
     # only when cache is enabled via config
     if not self.cache_mode:
         return None
     return Cache.set(self, *args, **kwargs)
コード例 #7
0
ファイル: test_cache.py プロジェクト: ktosiu/flask-caching
 def test_21_init_app_sets_app_attribute(self):
     cache = Cache()
     cache.init_app(self.app)
     assert cache.app == self.app
コード例 #8
0
ファイル: test_cache.py プロジェクト: ktosiu/flask-caching
 def test_19_dict_config_both(self):
     cache = Cache(config={'CACHE_TYPE': 'null'})
     cache.init_app(self.app, config={'CACHE_TYPE': 'simple'})
     from werkzeug.contrib.cache import SimpleCache
     assert isinstance(self.app.extensions['cache'][cache], SimpleCache)
コード例 #9
0
ファイル: test_cache.py プロジェクト: ktosiu/flask-caching
    def test_17_dict_config(self):
        cache = Cache(config={'CACHE_TYPE': 'simple'})
        cache.init_app(self.app)

        assert cache.config['CACHE_TYPE'] == 'simple'
コード例 #10
0
ファイル: test_cache.py プロジェクト: ktosiu/flask-caching
 def setUp(self):
     app = Flask(__name__, template_folder=os.path.dirname(__file__))
     app.debug = True
     self._set_app_config(app)
     self.cache = Cache(app)
     self.app = app
コード例 #11
0
ファイル: test_cache.py プロジェクト: ktosiu/flask-caching
class CacheTestCase(unittest.TestCase):
    def _set_app_config(self, app):
        app.config['CACHE_TYPE'] = 'simple'

    def setUp(self):
        app = Flask(__name__, template_folder=os.path.dirname(__file__))
        app.debug = True
        self._set_app_config(app)
        self.cache = Cache(app)
        self.app = app

    def tearDown(self):
        self.app = None
        self.cache = None
        self.tc = None

    def test_00_set(self):
        self.cache.set('hi', 'hello')

        assert self.cache.get('hi') == 'hello'

    def test_01_add(self):
        self.cache.add('hi', 'hello')
        assert self.cache.get('hi') == 'hello'

        self.cache.add('hi', 'foobar')
        assert self.cache.get('hi') == 'hello'

    def test_02_delete(self):
        self.cache.set('hi', 'hello')
        self.cache.delete('hi')
        assert self.cache.get('hi') is None

    def test_03_cached_view(self):
        @self.app.route('/')
        @self.cache.cached(5)
        def cached_view():
            return str(time.time())

        tc = self.app.test_client()

        rv = tc.get('/')
        the_time = rv.data.decode('utf-8')

        time.sleep(2)

        rv = tc.get('/')

        assert the_time == rv.data.decode('utf-8')

        time.sleep(5)

        rv = tc.get('/')
        assert the_time != rv.data.decode('utf-8')

    def test_04_cached_view_unless(self):
        @self.app.route('/a')
        @self.cache.cached(5, unless=lambda: True)
        def non_cached_view():
            return str(time.time())

        @self.app.route('/b')
        @self.cache.cached(5, unless=lambda: False)
        def cached_view():
            return str(time.time())

        tc = self.app.test_client()

        rv = tc.get('/a')
        the_time = rv.data.decode('utf-8')

        time.sleep(1)

        rv = tc.get('/a')
        assert the_time != rv.data.decode('utf-8')

        rv = tc.get('/b')
        the_time = rv.data.decode('utf-8')

        time.sleep(1)
        rv = tc.get('/b')

        assert the_time == rv.data.decode('utf-8')

    def test_05_cached_function(self):
        with self.app.test_request_context():
            @self.cache.cached(2, key_prefix='MyBits')
            def get_random_bits():
                return [random.randrange(0, 2) for i in range(50)]

            my_list = get_random_bits()
            his_list = get_random_bits()

            assert my_list == his_list

            time.sleep(4)

            his_list = get_random_bits()

            assert my_list != his_list

    def test_06_memoize(self):
        with self.app.test_request_context():
            @self.cache.memoize(5)
            def big_foo(a, b):
                return a + b + random.randrange(0, 100000)

            result = big_foo(5, 2)

            time.sleep(1)

            assert big_foo(5, 2) == result

            result2 = big_foo(5, 3)
            assert result2 != result

            time.sleep(6)

            assert big_foo(5, 2) != result

            time.sleep(1)

            assert big_foo(5, 3) != result2

    def test_06a_memoize(self):
        self.app.config['CACHE_DEFAULT_TIMEOUT'] = 1
        self.cache = Cache(self.app)

        with self.app.test_request_context():
            @self.cache.memoize(50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 100000)

            result = big_foo(5, 2)

            time.sleep(2)

            assert big_foo(5, 2) == result

    def test_06b_memoize_annotated(self):
        if sys.version_info >= (3, 0):
            with self.app.test_request_context():
                @self.cache.memoize(50)
                def big_foo_annotated(a, b):
                    return a + b + random.randrange(0, 100000)
                big_foo_annotated.__annotations__ = {'a': int, 'b': int, 'return': int}

                result = big_foo_annotated(5, 2)

                time.sleep(2)

                assert big_foo_annotated(5, 2) == result

    def test_06c_memoize_utf8_arguments(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def big_foo(a, b):
                return "{}-{}".format(a, b)

            big_foo("æøå", "chars")

    def test_06d_memoize_unicode_arguments(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def big_foo(a, b):
                return u"{}-{}".format(a, b)

            big_foo(u"æøå", "chars")

    def test_07_delete_memoize(self):
        with self.app.test_request_context():
            @self.cache.memoize(5)
            def big_foo(a, b):
                return a + b + random.randrange(0, 100000)

            result = big_foo(5, 2)
            result2 = big_foo(5, 3)

            time.sleep(1)

            assert big_foo(5, 2) == result
            assert big_foo(5, 2) == result
            assert big_foo(5, 3) != result
            assert big_foo(5, 3) == result2

            self.cache.delete_memoized(big_foo)

            assert big_foo(5, 2) != result
            assert big_foo(5, 3) != result2

    def test_07b_delete_memoized_verhash(self):
        with self.app.test_request_context():
            @self.cache.memoize(5)
            def big_foo(a, b):
                return a + b + random.randrange(0, 100000)

            result = big_foo(5, 2)
            result2 = big_foo(5, 3)

            time.sleep(1)

            assert big_foo(5, 2) == result
            assert big_foo(5, 2) == result
            assert big_foo(5, 3) != result
            assert big_foo(5, 3) == result2

            self.cache.delete_memoized_verhash(big_foo)

            _fname, _fname_instance = function_namespace(big_foo)
            version_key = self.cache._memvname(_fname)
            assert self.cache.get(version_key) is None

            assert big_foo(5, 2) != result
            assert big_foo(5, 3) != result2

            assert self.cache.get(version_key) is not None

    def test_07c_delete_memoized_annotated(self):
            with self.app.test_request_context():
                @self.cache.memoize(5)
                def big_foo_annotated(a, b):
                    return a + b + random.randrange(0, 100000)

                big_foo_annotated.__annotations__ = {'a': int, 'b': int, 'return': int}

                result = big_foo_annotated(5, 2)
                result2 = big_foo_annotated(5, 3)

                time.sleep(1)

                assert big_foo_annotated(5, 2) == result
                assert big_foo_annotated(5, 2) == result
                assert big_foo_annotated(5, 3) != result
                assert big_foo_annotated(5, 3) == result2

                self.cache.delete_memoized_verhash(big_foo_annotated)

                _fname, _fname_instance = function_namespace(big_foo_annotated)
                version_key = self.cache._memvname(_fname)
                assert self.cache.get(version_key) is None

                assert big_foo_annotated(5, 2) != result
                assert big_foo_annotated(5, 3) != result2

                assert self.cache.get(version_key) is not None

    def test_08_delete_memoize(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def big_foo(a, b):
                return a + b + random.randrange(0, 100000)

            result_a = big_foo(5, 1)
            result_b = big_foo(5, 2)

            assert big_foo(5, 1) == result_a
            assert big_foo(5, 2) == result_b
            self.cache.delete_memoized(big_foo, 5, 2)

            assert big_foo(5, 1) == result_a
            assert big_foo(5, 2) != result_b

            # Cleanup bigfoo 5,1 5,2 or it might conflict with
            # following run if it also uses memecache
            self.cache.delete_memoized(big_foo, 5, 2)
            self.cache.delete_memoized(big_foo, 5, 1)

    def test_09_args_memoize(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def big_foo(a, b):
                return sum(a) + sum(b) + random.randrange(0, 100000)

            result_a = big_foo([5, 3, 2], [1])
            result_b = big_foo([3, 3], [3, 1])

            assert big_foo([5, 3, 2], [1]) == result_a
            assert big_foo([3, 3], [3, 1]) == result_b

            self.cache.delete_memoized(big_foo, [5, 3, 2], [1])

            assert big_foo([5, 3, 2], [1]) != result_a
            assert big_foo([3, 3], [3, 1]) == result_b

            # Cleanup bigfoo 5,1 5,2 or it might conflict with
            # following run if it also uses memecache
            self.cache.delete_memoized(big_foo, [5, 3, 2], [1])
            self.cache.delete_memoized(big_foo, [3, 3], [1])

    def test_10_kwargs_memoize(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def big_foo(a, b=None):
                return a + sum(b.values()) + random.randrange(0, 100000)

            result_a = big_foo(1, dict(one=1, two=2))
            result_b = big_foo(5, dict(three=3, four=4))

            assert big_foo(1, dict(one=1, two=2)) == result_a
            assert big_foo(5, dict(three=3, four=4)) == result_b

            self.cache.delete_memoized(big_foo, 1, dict(one=1, two=2))

            assert big_foo(1, dict(one=1, two=2)) != result_a
            assert big_foo(5, dict(three=3, four=4)) == result_b

    def test_10a_kwargonly_memoize(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def big_foo(a=None):
                if a is None:
                    a = 0
                return a + random.random()

            result_a = big_foo()
            result_b = big_foo(5)

            assert big_foo() == result_a
            assert big_foo() < 1
            assert big_foo(5) == result_b
            assert big_foo(5) >= 5 and big_foo(5) < 6

    def test_10a_arg_kwarg_memoize(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def f(a, b, c=1):
                return a + b + c + random.randrange(0, 100000)

            assert f(1, 2) == f(1, 2, c=1)
            assert f(1, 2) == f(1, 2, 1)
            assert f(1, 2) == f(1, 2)
            assert f(1, 2, 3) != f(1, 2)
            with self.assertRaises(TypeError):
                f(1)

    def test_10b_classarg_memoize(self):
        @self.cache.memoize()
        def bar(a):
            return a.value + random.random()

        class Adder(object):
            def __init__(self, value):
                self.value = value

        adder = Adder(15)
        adder2 = Adder(20)

        y = bar(adder)
        z = bar(adder2)

        assert y != z
        assert bar(adder) == y
        assert bar(adder) != z
        adder.value = 14
        assert bar(adder) == y
        assert bar(adder) != z

        assert bar(adder) != bar(adder2)
        assert bar(adder2) == z

    def test_10c_classfunc_memoize(self):
        class Adder(object):
            def __init__(self, initial):
                self.initial = initial

            @self.cache.memoize()
            def add(self, b):
                return self.initial + b

        adder1 = Adder(1)
        adder2 = Adder(2)

        x = adder1.add(3)
        assert adder1.add(3) == x
        assert adder1.add(4) != x
        assert adder1.add(3) != adder2.add(3)

    def test_10d_classfunc_memoize_delete(self):
        with self.app.test_request_context():
            class Adder(object):
                def __init__(self, initial):
                    self.initial = initial

                @self.cache.memoize()
                def add(self, b):
                    return self.initial + b + random.random()

            adder1 = Adder(1)
            adder2 = Adder(2)

            a1 = adder1.add(3)
            a2 = adder2.add(3)

            assert a1 != a2
            assert adder1.add(3) == a1
            assert adder2.add(3) == a2

            self.cache.delete_memoized(adder1.add)

            a3 = adder1.add(3)
            a4 = adder2.add(3)

            self.assertNotEqual(a1, a3)
            assert a1 != a3
            self.assertEqual(a2, a4)

            self.cache.delete_memoized(Adder.add)

            a5 = adder1.add(3)
            a6 = adder2.add(3)

            self.assertNotEqual(a5, a6)
            self.assertNotEqual(a3, a5)
            self.assertNotEqual(a4, a6)

    def test_10e_delete_memoize_classmethod(self):
        with self.app.test_request_context():
            class Mock(object):
                @classmethod
                @self.cache.memoize(5)
                def big_foo(cls, a, b):
                    return a + b + random.randrange(0, 100000)

            result = Mock.big_foo(5, 2)
            result2 = Mock.big_foo(5, 3)

            time.sleep(1)

            assert Mock.big_foo(5, 2) == result
            assert Mock.big_foo(5, 2) == result
            assert Mock.big_foo(5, 3) != result
            assert Mock.big_foo(5, 3) == result2

            self.cache.delete_memoized(Mock.big_foo)

            assert Mock.big_foo(5, 2) != result
            assert Mock.big_foo(5, 3) != result2

    def test_11_cache_key_property(self):
        @self.app.route('/')
        @self.cache.cached(5)
        def cached_view():
            return str(time.time())

        assert hasattr(cached_view, "make_cache_key")
        assert callable(cached_view.make_cache_key)

        tc = self.app.test_client()

        rv = tc.get('/')
        the_time = rv.data.decode('utf-8')

        with self.app.test_request_context():
            cache_data = self.cache.get(cached_view.make_cache_key())
            assert the_time == cache_data

    def test_12_make_cache_key_function_property(self):
        @self.app.route('/<foo>/<bar>')
        @self.cache.memoize(5)
        def cached_view(foo, bar):
            return str(time.time())

        assert hasattr(cached_view, "make_cache_key")
        assert callable(cached_view.make_cache_key)

        tc = self.app.test_client()

        rv = tc.get('/a/b')
        the_time = rv.data.decode('utf-8')

        cache_key = cached_view.make_cache_key(cached_view.uncached, foo=u"a", bar=u"b")
        cache_data = self.cache.get(cache_key)
        assert the_time == cache_data

        different_key = cached_view.make_cache_key(cached_view.uncached, foo=u"b", bar=u"a")
        different_data = self.cache.get(different_key)
        assert the_time != different_data

    def test_13_cache_timeout_property(self):
        @self.app.route('/')
        @self.cache.memoize(5)
        def cached_view1():
            return str(time.time())

        @self.app.route('/<foo>/<bar>')
        @self.cache.memoize(10)
        def cached_view2(foo, bar):
            return str(time.time())

        assert hasattr(cached_view1, "cache_timeout")
        assert hasattr(cached_view2, "cache_timeout")
        assert cached_view1.cache_timeout == 5
        assert cached_view2.cache_timeout == 10

        # test that this is a read-write property
        cached_view1.cache_timeout = 15
        cached_view2.cache_timeout = 30

        assert cached_view1.cache_timeout == 15
        assert cached_view2.cache_timeout == 30

        tc = self.app.test_client()

        rv1 = tc.get('/')
        time1 = rv1.data.decode('utf-8')
        time.sleep(1)
        rv2 = tc.get('/a/b')
        time2 = rv2.data.decode('utf-8')

        # VIEW1
        # it's been 1 second, cache is still active
        assert time1 == tc.get('/').data.decode('utf-8')
        time.sleep(16)
        # it's been >15 seconds, cache is not still active
        assert time1 != tc.get('/').data.decode('utf-8')

        # VIEW2
        # it's been >17 seconds, cache is still active
        self.assertEqual(time2, tc.get('/a/b').data.decode('utf-8'))
        time.sleep(30)
        # it's been >30 seconds, cache is not still active
        assert time2 != tc.get('/a/b').data.decode('utf-8')

    def test_14_memoized_multiple_arg_kwarg_calls(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def big_foo(a, b, c=[1, 1], d=[1, 1]):
                return sum(a) + sum(b) + sum(c) + sum(d) + random.randrange(0, 100000)

            result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3])

            assert big_foo([5, 3, 2], [1], d=[3, 3], c=[3, 3]) == result_a
            assert big_foo(b=[1], a=[5, 3, 2], c=[3, 3], d=[3, 3]) == result_a
            assert big_foo([5, 3, 2], [1], [3, 3], [3, 3]) == result_a

    def test_15_memoize_multiple_arg_kwarg_delete(self):
        with self.app.test_request_context():
            @self.cache.memoize()
            def big_foo(a, b, c=[1, 1], d=[1, 1]):
                return sum(a) + sum(b) + sum(c) + sum(d) + random.randrange(0, 100000)

            result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3])
            self.cache.delete_memoized(big_foo, [5, 3, 2], [1], [3, 3], [3, 3])
            result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3])
            assert result_a != result_b

            self.cache.delete_memoized(big_foo, [5, 3, 2], b=[1], c=[3, 3], d=[3, 3])
            result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3])
            assert result_a != result_b

            self.cache.delete_memoized(big_foo, [5, 3, 2], [1], c=[3, 3], d=[3, 3])
            result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3])
            assert result_a != result_b

            self.cache.delete_memoized(big_foo, [5, 3, 2], b=[1], c=[3, 3], d=[3, 3])
            result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3])
            assert result_a != result_b

            self.cache.delete_memoized(big_foo, [5, 3, 2], [1], c=[3, 3], d=[3, 3])
            result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3])
            assert result_a != result_b

            self.cache.delete_memoized(big_foo, [5, 3, 2], [1], [3, 3], [3, 3])
            result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3])
            assert result_a != result_b

    def test_16_memoize_kwargs_to_args(self):
        with self.app.test_request_context():
            def big_foo(a, b, c=None, d=None):
                return sum(a) + sum(b) + random.randrange(0, 100000)

            expected = (1, 2, 'foo', 'bar')

            args, kwargs = self.cache._memoize_kwargs_to_args(big_foo, 1, 2, 'foo', 'bar')
            assert (args == expected)
            args, kwargs = self.cache._memoize_kwargs_to_args(big_foo, 2, 'foo', 'bar', a=1)
            assert (args == expected)
            args, kwargs = self.cache._memoize_kwargs_to_args(big_foo, a=1, b=2, c='foo', d='bar')
            assert (args == expected)
            args, kwargs = self.cache._memoize_kwargs_to_args(big_foo, d='bar', b=2, a=1, c='foo')
            assert (args == expected)
            args, kwargs = self.cache._memoize_kwargs_to_args(big_foo, 1, 2, d='bar', c='foo')
            assert (args == expected)

    def test_17_dict_config(self):
        cache = Cache(config={'CACHE_TYPE': 'simple'})
        cache.init_app(self.app)

        assert cache.config['CACHE_TYPE'] == 'simple'

    def test_18_dict_config_initapp(self):
        cache = Cache()
        cache.init_app(self.app, config={'CACHE_TYPE': 'simple'})
        from werkzeug.contrib.cache import SimpleCache
        assert isinstance(self.app.extensions['cache'][cache], SimpleCache)

    def test_19_dict_config_both(self):
        cache = Cache(config={'CACHE_TYPE': 'null'})
        cache.init_app(self.app, config={'CACHE_TYPE': 'simple'})
        from werkzeug.contrib.cache import SimpleCache
        assert isinstance(self.app.extensions['cache'][cache], SimpleCache)

    def test_20_jinja2ext_cache(self):
        somevar = ''.join([random.choice(string.ascii_letters) for x in range(6)])

        testkeys = [
            make_template_fragment_key("fragment1"),
            make_template_fragment_key("fragment1", vary_on=["key1"]),
            make_template_fragment_key("fragment1", vary_on=["key1", somevar]),
        ]
        delkey = make_template_fragment_key("fragment2")

        with self.app.test_request_context():
            #: Test if elements are cached
            render_template("test_template.html", somevar=somevar, timeout=60)
            for k in testkeys:
                assert self.cache.get(k) == somevar
            assert self.cache.get(delkey) == somevar

            #: Test timeout=del to delete key
            render_template("test_template.html", somevar=somevar, timeout="del")
            for k in testkeys:
                assert self.cache.get(k) == somevar
            assert self.cache.get(delkey) is None

            #: Test rendering templates from strings
            output = render_template_string(
                """{% cache 60, "fragment3" %}{{somevar}}{% endcache %}""",
                somevar=somevar
            )
            assert self.cache.get(make_template_fragment_key("fragment3")) == somevar
            assert output == somevar

            #: Test backwards compatibility
            output = render_template_string(
                """{% cache 30 %}{{somevar}}{% endcache %}""",
                somevar=somevar)
            assert self.cache.get(make_template_fragment_key("None1")) == somevar
            assert output == somevar

            output = render_template_string(
                """{% cache 30, "fragment4", "fragment5"%}{{somevar}}{% endcache %}""",
                somevar=somevar)
            k = make_template_fragment_key("fragment4", vary_on=["fragment5"])
            assert self.cache.get(k) == somevar
            assert output == somevar

    def test_21_init_app_sets_app_attribute(self):
        cache = Cache()
        cache.init_app(self.app)
        assert cache.app == self.app

    def test_22_cached_view_forced_update(self):
        forced_update = False

        def forced_update_fn():
            return forced_update

        @self.app.route('/a')
        @self.cache.cached(5, forced_update=lambda: forced_update)
        def view():
            return str(time.time())

        tc = self.app.test_client()

        rv = tc.get('/a')
        the_time = rv.data.decode('utf-8')
        time.sleep(1)
        rv = tc.get('/a')
        assert the_time == rv.data.decode('utf-8')

        forced_update = True
        rv = tc.get('/a')
        new_time = rv.data.decode('utf-8')
        assert new_time != the_time

        forced_update = False
        time.sleep(1)
        rv = tc.get('/a')
        assert new_time == rv.data.decode('utf-8')

    def test_23_memoize_forced_update(self):
        with self.app.test_request_context():
            forced_update = False

            @self.cache.memoize(5, forced_update=lambda: forced_update)
            def big_foo(a, b):
                return a + b + random.randrange(0, 100000)

            result = big_foo(5, 2)
            time.sleep(1)
            assert big_foo(5, 2) == result

            forced_update = True
            new_result = big_foo(5, 2)
            assert new_result != result

            forced_update = False
            time.sleep(1)
            assert big_foo(5, 2) == new_result

    def test_24_generate_cache_key_from_different_view(self):
        @self.app.route('/cake/<flavor>')
        @self.cache.cached()
        def view_cake(flavor):
            # What's the cache key for apple cake? thanks for making me hungry
            view_cake.cake_cache_key = view_cake.make_cache_key('apple')
            # print view_cake.cake_cache_key

            return str(time.time())
        view_cake.cake_cache_key = ''

        @self.app.route('/pie/<flavor>')
        @self.cache.cached()
        def view_pie(flavor):
            # What's the cache key for apple cake?
            view_pie.cake_cache_key = view_cake.make_cache_key('apple')
            # print view_pie.cake_cache_key

            return str(time.time())
        view_pie.cake_cache_key = ''

        tc = self.app.test_client()
        rv1 = tc.get('/cake/chocolate')
        rv2 = tc.get('/pie/chocolate')

        # print view_cake.cake_cache_key
        # print view_pie.cake_cache_key
        assert view_cake.cake_cache_key == view_pie.cake_cache_key
コード例 #12
0
import dash_core_components as dcc
import datetime
from flask_caching import Cache
import numpy as np
import os
import pandas as pd
import time


app = dash.Dash(__name__)
CACHE_CONFIG = {
    # try 'filesystem' if you don't want to setup redis
    'CACHE_TYPE': 'redis',
    'CACHE_REDIS_URL': os.environ.get('REDIS_URL', 'localhost:6379')
}
cache = Cache()
cache.init_app(app.server, config=CACHE_CONFIG)

N = 100

df = pd.DataFrame({
    'category': (
        (['apples'] * 5 * N) +
        (['oranges'] * 10 * N) +
        (['figs'] * 20 * N) +
        (['pineapples'] * 15 * N)
    )
})
df['x'] = np.random.randn(len(df['category']))
df['y'] = np.random.randn(len(df['category']))
コード例 #13
0
from shelljob import proc
import time
import os
import sys
import eventlet

eventlet.monkey_patch()

reload(sys)
sys.setdefaultencoding("utf-8")


#######flask
app=Flask(__name__)
app.secret_key=os.path.join(config_dir,'PyOne'+password)
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
limiter = Limiter(
    app,
    key_func=get_remote_address,
    default_limits=["200/minute", "50/second"],
)

rd=Redis(host='localhost',port=6379)

################################################################################
###################################功能函数#####################################
################################################################################
def md5(string):
    a=hashlib.md5()
    a.update(string.encode(encoding='utf-8'))
    return a.hexdigest()
コード例 #14
0
    def configure_extensions(self):

        Empty.configure_extensions(self)
        self.celery = Celery(self.name,
                             broker=self.config['CELERY_BROKER_URL'],
                             beat=True)
        self.celery.conf.update(self.config)
        self.celery.conf.ONCE = {
            'backend': 'celery_once.backends.Redis',
            'settings': {
                'url': self.config['CELERY_BROKER_URL'],
                'default_timeout': 60 * 60 * 24
            }
        }

        class ContextTask(self.celery.Task):
            def __call__(self, *args, **kwargs):
                with app.app_context():
                    return self.run(*args, **kwargs)

        self.celery.Task = ContextTask

        # Make QueueOnce app context aware.
        class ContextQueueOnce(QueueOnce):
            def __call__(self, *args, **kwargs):
                with app.app_context():
                    return super(ContextQueueOnce,
                                 self).__call__(*args, **kwargs)

        # Attach to celery object for easy access.
        self.celery.QueueOnce = ContextQueueOnce

        app = self

        if 'root_path' in self.config:
            self.root_path = self.config['root_path']

        if 'WHYIS_TEMPLATE_DIR' in self.config and app.config[
                'WHYIS_TEMPLATE_DIR'] is not None:
            my_loader = jinja2.ChoiceLoader([
                jinja2.FileSystemLoader(p)
                for p in self.config['WHYIS_TEMPLATE_DIR']
            ] + [app.jinja_loader])
            app.jinja_loader = my_loader

        @self.celery.task(base=QueueOnce, once={'graceful': True})
        def process_resource(service_name, taskid=None):
            service = self.config['inferencers'][service_name]
            service.process_graph(app.db)

        @self.celery.task
        def process_nanopub(nanopub_uri, service_name, taskid=None):
            service = self.config['inferencers'][service_name]
            print(service, nanopub_uri)
            if app.nanopub_manager.is_current(nanopub_uri):
                nanopub = app.nanopub_manager.get(nanopub_uri)
                service.process_graph(nanopub)
            else:
                print("Skipping retired nanopub", nanopub_uri)

        def setup_periodic_task(task):
            @self.celery.task
            def find_instances():
                print("Triggered task", task['name'])
                for x, in task['service'].getInstances(app.db):
                    task['do'](x)

            @self.celery.task
            def do_task(uri):
                print("Running task", task['name'], 'on', uri)
                resource = app.get_resource(uri)

                # result never used
                task['service'].process_graph(resource.graph)

            task['service'].app = app
            task['find_instances'] = find_instances
            task['do'] = do_task

            return task

        app.inference_tasks = []
        if 'inference_tasks' in self.config:
            app.inference_tasks = [
                setup_periodic_task(task)
                for task in self.config['inference_tasks']
            ]

        for name, task in list(self.config['inferencers'].items()):
            task.app = app

        for task in app.inference_tasks:
            if 'schedule' in task:
                #print "Scheduling task", task['name'], task['schedule']
                self.celery.add_periodic_task(crontab(**task['schedule']),
                                              task['find_instances'].s(),
                                              name=task['name'])
            else:
                task['find_instances'].delay()

        @self.celery.task()
        def update(nanopub_uri):
            '''gets called whenever there is a change in the knowledge graph.
            Performs a breadth-first knowledge expansion of the current change.'''
            #print "Updating on", nanopub_uri
            #if not app.nanopub_manager.is_current(nanopub_uri):
            #    print("Skipping retired nanopub", nanopub_uri)
            #    return
            nanopub = app.nanopub_manager.get(nanopub_uri)
            nanopub_graph = ConjunctiveGraph(nanopub.store)
            if 'inferencers' in self.config:
                for name, service in list(self.config['inferencers'].items()):
                    service.app = self
                    if service.query_predicate == self.NS.whyis.updateChangeQuery:
                        if service.getInstances(nanopub_graph):
                            print("invoking", name, nanopub_uri)
                            process_nanopub.apply_async(kwargs={
                                'nanopub_uri': nanopub_uri,
                                'service_name': name
                            },
                                                        priority=1)
                for name, service in list(self.config['inferencers'].items()):
                    service.app = self
                    if service.query_predicate == self.NS.whyis.globalChangeQuery:
                        process_resource.apply_async(
                            kwargs={'service_name': name}, priority=5)

        def run_update(nanopub_uri):
            update.apply_async(args=[nanopub_uri], priority=9)

        self.nanopub_update_listener = run_update

        app = self

        @self.celery.task(base=self.celery.QueueOnce,
                          once={'graceful': True},
                          retry_backoff=True,
                          retry_jitter=True,
                          autoretry_for=(Exception, ),
                          max_retries=4,
                          bind=True)
        def run_importer(self, entity_name):
            entity_name = URIRef(entity_name)
            print('importing', entity_name)
            importer = app.find_importer(entity_name)
            if importer is None:
                return
            importer.app = app
            modified = importer.last_modified(entity_name, app.db,
                                              app.nanopub_manager)
            updated = importer.modified(entity_name)
            if updated is None:
                updated = datetime.now(pytz.utc)
            print("Remote modified:", updated, type(updated),
                  "Local modified:", modified, type(modified))
            if modified is None or (updated - modified
                                    ).total_seconds() > importer.min_modified:
                importer.load(entity_name, app.db, app.nanopub_manager)

        self.run_importer = run_importer

        self.template_imports = {}
        if 'template_imports' in self.config:
            for name, imp in list(self.config['template_imports'].items()):
                try:
                    m = importlib.import_module(imp)
                    self.template_imports[name] = m
                except Exception:
                    print(
                        "Error importing module %s into template variable %s."
                        % (imp, name))
                    raise

        self.nanopub_manager = NanopublicationManager(
            self.db.store,
            Namespace('%s/pub/' % (self.config['lod_prefix'])),
            self,
            update_listener=self.nanopub_update_listener)

        if 'CACHE_TYPE' in self.config:
            from flask_caching import Cache
            self.cache = Cache(self)
        else:
            self.cache = None
コード例 #15
0
class App(Empty):

    managed = False

    def configure_extensions(self):

        Empty.configure_extensions(self)
        self.celery = Celery(self.name,
                             broker=self.config['CELERY_BROKER_URL'],
                             beat=True)
        self.celery.conf.update(self.config)
        self.celery.conf.ONCE = {
            'backend': 'celery_once.backends.Redis',
            'settings': {
                'url': self.config['CELERY_BROKER_URL'],
                'default_timeout': 60 * 60 * 24
            }
        }

        class ContextTask(self.celery.Task):
            def __call__(self, *args, **kwargs):
                with app.app_context():
                    return self.run(*args, **kwargs)

        self.celery.Task = ContextTask

        # Make QueueOnce app context aware.
        class ContextQueueOnce(QueueOnce):
            def __call__(self, *args, **kwargs):
                with app.app_context():
                    return super(ContextQueueOnce,
                                 self).__call__(*args, **kwargs)

        # Attach to celery object for easy access.
        self.celery.QueueOnce = ContextQueueOnce

        app = self

        if 'root_path' in self.config:
            self.root_path = self.config['root_path']

        if 'WHYIS_TEMPLATE_DIR' in self.config and app.config[
                'WHYIS_TEMPLATE_DIR'] is not None:
            my_loader = jinja2.ChoiceLoader([
                jinja2.FileSystemLoader(p)
                for p in self.config['WHYIS_TEMPLATE_DIR']
            ] + [app.jinja_loader])
            app.jinja_loader = my_loader

        @self.celery.task(base=QueueOnce, once={'graceful': True})
        def process_resource(service_name, taskid=None):
            service = self.config['inferencers'][service_name]
            service.process_graph(app.db)

        @self.celery.task
        def process_nanopub(nanopub_uri, service_name, taskid=None):
            service = self.config['inferencers'][service_name]
            print(service, nanopub_uri)
            if app.nanopub_manager.is_current(nanopub_uri):
                nanopub = app.nanopub_manager.get(nanopub_uri)
                service.process_graph(nanopub)
            else:
                print("Skipping retired nanopub", nanopub_uri)

        def setup_periodic_task(task):
            @self.celery.task
            def find_instances():
                print("Triggered task", task['name'])
                for x, in task['service'].getInstances(app.db):
                    task['do'](x)

            @self.celery.task
            def do_task(uri):
                print("Running task", task['name'], 'on', uri)
                resource = app.get_resource(uri)

                # result never used
                task['service'].process_graph(resource.graph)

            task['service'].app = app
            task['find_instances'] = find_instances
            task['do'] = do_task

            return task

        app.inference_tasks = []
        if 'inference_tasks' in self.config:
            app.inference_tasks = [
                setup_periodic_task(task)
                for task in self.config['inference_tasks']
            ]

        for name, task in list(self.config['inferencers'].items()):
            task.app = app

        for task in app.inference_tasks:
            if 'schedule' in task:
                #print "Scheduling task", task['name'], task['schedule']
                self.celery.add_periodic_task(crontab(**task['schedule']),
                                              task['find_instances'].s(),
                                              name=task['name'])
            else:
                task['find_instances'].delay()

        @self.celery.task()
        def update(nanopub_uri):
            '''gets called whenever there is a change in the knowledge graph.
            Performs a breadth-first knowledge expansion of the current change.'''
            #print "Updating on", nanopub_uri
            #if not app.nanopub_manager.is_current(nanopub_uri):
            #    print("Skipping retired nanopub", nanopub_uri)
            #    return
            nanopub = app.nanopub_manager.get(nanopub_uri)
            nanopub_graph = ConjunctiveGraph(nanopub.store)
            if 'inferencers' in self.config:
                for name, service in list(self.config['inferencers'].items()):
                    service.app = self
                    if service.query_predicate == self.NS.whyis.updateChangeQuery:
                        if service.getInstances(nanopub_graph):
                            print("invoking", name, nanopub_uri)
                            process_nanopub.apply_async(kwargs={
                                'nanopub_uri': nanopub_uri,
                                'service_name': name
                            },
                                                        priority=1)
                for name, service in list(self.config['inferencers'].items()):
                    service.app = self
                    if service.query_predicate == self.NS.whyis.globalChangeQuery:
                        process_resource.apply_async(
                            kwargs={'service_name': name}, priority=5)

        def run_update(nanopub_uri):
            update.apply_async(args=[nanopub_uri], priority=9)

        self.nanopub_update_listener = run_update

        app = self

        @self.celery.task(base=self.celery.QueueOnce,
                          once={'graceful': True},
                          retry_backoff=True,
                          retry_jitter=True,
                          autoretry_for=(Exception, ),
                          max_retries=4,
                          bind=True)
        def run_importer(self, entity_name):
            entity_name = URIRef(entity_name)
            print('importing', entity_name)
            importer = app.find_importer(entity_name)
            if importer is None:
                return
            importer.app = app
            modified = importer.last_modified(entity_name, app.db,
                                              app.nanopub_manager)
            updated = importer.modified(entity_name)
            if updated is None:
                updated = datetime.now(pytz.utc)
            print("Remote modified:", updated, type(updated),
                  "Local modified:", modified, type(modified))
            if modified is None or (updated - modified
                                    ).total_seconds() > importer.min_modified:
                importer.load(entity_name, app.db, app.nanopub_manager)

        self.run_importer = run_importer

        self.template_imports = {}
        if 'template_imports' in self.config:
            for name, imp in list(self.config['template_imports'].items()):
                try:
                    m = importlib.import_module(imp)
                    self.template_imports[name] = m
                except Exception:
                    print(
                        "Error importing module %s into template variable %s."
                        % (imp, name))
                    raise

        self.nanopub_manager = NanopublicationManager(
            self.db.store,
            Namespace('%s/pub/' % (self.config['lod_prefix'])),
            self,
            update_listener=self.nanopub_update_listener)

        if 'CACHE_TYPE' in self.config:
            from flask_caching import Cache
            self.cache = Cache(self)
        else:
            self.cache = None

    _file_depot = None

    @property
    def file_depot(self):
        if self._file_depot is None:
            if DepotManager.get('files') is None:
                DepotManager.configure('files', self.config['file_archive'])
            self._file_depot = DepotManager.get('files')
        return self._file_depot

    _nanopub_depot = None

    @property
    def nanopub_depot(self):
        if self._nanopub_depot is None:
            if DepotManager.get('nanopublications') is None:
                DepotManager.configure('nanopublications',
                                       self.config['nanopub_archive'])
            self._nanopub_depot = DepotManager.get('nanopublications')
        return self._nanopub_depot

    def configure_database(self):
        """
        Database configuration should be set here
        """
        self.NS = NS
        self.NS.local = rdflib.Namespace(self.config['lod_prefix'] + '/')

        self.admin_db = database.engine_from_config(self.config, "admin_")
        self.db = database.engine_from_config(self.config, "knowledge_")
        self.db.app = self

        self.vocab = ConjunctiveGraph()
        #print URIRef(self.config['vocab_file'])
        default_vocab = Graph(store=self.vocab.store)
        default_vocab.parse(source=os.path.abspath(
            os.path.join(os.path.dirname(__file__), "default_vocab.ttl")),
                            format="turtle",
                            publicID=str(self.NS.local))
        custom_vocab = Graph(store=self.vocab.store)
        custom_vocab.parse(self.config['vocab_file'],
                           format="turtle",
                           publicID=str(self.NS.local))

        self.datastore = WhyisUserDatastore(self.admin_db, {},
                                            self.config['lod_prefix'])
        self.security = Security(self,
                                 self.datastore,
                                 register_form=ExtendedRegisterForm)

    def __weighted_route(self, *args, **kwargs):
        """
        Override the match_compare_key function of the Rule created by invoking Flask.route.
        This can only be done on the app, not in a blueprint, because blueprints lazily add Rule's when they are registered on an app.
        """
        def decorator(view_func):
            compare_key = kwargs.pop('compare_key', None)
            # register view_func with route
            self.route(*args, **kwargs)(view_func)

            if compare_key is not None:
                rule = self.url_map._rules[-1]
                rule.match_compare_key = lambda: compare_key

            return view_func

        return decorator

    def map_entity(self, name):
        for importer in self.config['namespaces']:
            if importer.matches(name):
                new_name = importer.map(name)
                #print 'Found mapped URI', new_name
                return new_name, importer
        return None, None

    def find_importer(self, name):
        for importer in self.config['namespaces']:
            if importer.resource_matches(name):
                return importer
        return None

    class Entity(rdflib.resource.Resource):
        _this = None

        def this(self):
            if self._this is None:
                self._this = self._graph.app.get_entity(self.identifier)
            return self._this

        _description = None

        def description(self):
            if self._description is None:
                #                try:
                result = Graph()
                #                try:
                for quad in self._graph.query(
                        '''
construct {
    ?e ?p ?o.
    ?o rdfs:label ?label.
    ?o skos:prefLabel ?prefLabel.
    ?o dc:title ?title.
    ?o foaf:name ?name.
    ?o ?pattr ?oattr.
    ?oattr rdfs:label ?oattrlabel
} where {
    graph ?g {
      ?e ?p ?o.
    }
    ?g a np:Assertion.
    optional {
      ?e sio:hasAttribute|sio:hasPart ?o.
      ?o ?pattr ?oattr.
      optional {
        ?oattr rdfs:label ?oattrlabel.
      }
    }
    optional {
      ?o rdfs:label ?label.
    }
    optional {
      ?o skos:prefLabel ?prefLabel.
    }
    optional {
      ?o dc:title ?title.
    }
    optional {
      ?o foaf:name ?name.
    }
}''',
                        initNs=NS.prefixes,
                        initBindings={'e': self.identifier}):
                    if len(quad) == 3:
                        s, p, o = quad
                    else:
                        # Last term is never used
                        s, p, o, _ = quad
                    result.add((s, p, o))
#                except:
#                    pass
                self._description = result.resource(self.identifier)
#                except Exception as e:
#                    print str(e), self.identifier
#                    raise e
            return self._description

    def get_resource(self, entity, async_=True, retrieve=True):
        if retrieve:
            mapped_name, importer = self.map_entity(entity)

            if mapped_name is not None:
                entity = mapped_name

            if importer is None:
                importer = self.find_importer(entity)
            print(entity, importer)

            if importer is not None:
                modified = importer.last_modified(entity, self.db,
                                                  self.nanopub_manager)
                if modified is None or async_ is False:
                    self.run_importer(entity)
                elif not importer.import_once:
                    print("Type of modified is", type(modified))
                    self.run_importer.delay(entity)

        return self.Entity(self.db, entity)

    def configure_template_filters(self):
        filters.configure(self)
        if 'filters' in self.config:
            for name, fn in self.config['filters'].items():
                self.template_filter(name)(fn)

    def add_file(self, f, entity, nanopub):
        entity = rdflib.URIRef(entity)
        old_nanopubs = []
        for np_uri, np_assertion, in self.db.query(
                '''select distinct ?np ?assertion where {
    hint:Query hint:optimizer "Runtime" .
    graph ?assertion {?e whyis:hasFileID ?fileid}
    ?np np:hasAssertion ?assertion.
}''',
                initNs=NS.prefixes,
                initBindings=dict(e=rdflib.URIRef(entity))):
            if not self._can_edit(np_uri):
                raise Unauthorized()
            old_nanopubs.append((np_uri, np_assertion))
        fileid = self.file_depot.create(f.stream, f.filename, f.mimetype)
        nanopub.add((nanopub.identifier, NS.sio.isAbout, entity))
        nanopub.assertion.add((entity, NS.whyis.hasFileID, Literal(fileid)))
        if current_user._get_current_object() is not None and hasattr(
                current_user, 'identifier'):
            nanopub.assertion.add(
                (entity, NS.dc.contributor, current_user.identifier))
        nanopub.assertion.add(
            (entity, NS.dc.created, Literal(datetime.utcnow())))
        nanopub.assertion.add(
            (entity, NS.ov.hasContentType, Literal(f.mimetype)))
        nanopub.assertion.add((entity, NS.RDF.type, NS.mediaTypes[f.mimetype]))
        nanopub.assertion.add(
            (NS.mediaTypes[f.mimetype], NS.RDF.type, NS.dc.FileFormat))
        nanopub.assertion.add(
            (entity, NS.RDF.type, NS.mediaTypes[f.mimetype.split('/')[0]]))
        nanopub.assertion.add((NS.mediaTypes[f.mimetype.split('/')[0]],
                               NS.RDF.type, NS.dc.FileFormat))
        nanopub.assertion.add((entity, NS.RDF.type, NS.pv.File))

        if current_user._get_current_object() is not None and hasattr(
                current_user, 'identifier'):
            nanopub.pubinfo.add((nanopub.assertion.identifier,
                                 NS.dc.contributor, current_user.identifier))
        nanopub.pubinfo.add((nanopub.assertion.identifier, NS.dc.created,
                             Literal(datetime.utcnow())))

        return old_nanopubs

    def delete_file(self, entity):
        for np_uri, in self.db.query('''select distinct ?np where {
    hint:Query hint:optimizer "Runtime" .
    graph ?np_assertion {?e whyis:hasFileID ?fileid}
    ?np np:hasAssertion ?np_assertion.
}''',
                                     initNs=NS.prefixes,
                                     initBindings=dict(e=entity)):
            if not self._can_edit(np_uri):
                raise Unauthorized()
            self.nanopub_manager.retire(np_uri)

    def add_files(self, uri, files, upload_type=NS.pv.File):
        nanopub = self.nanopub_manager.new()

        added_files = False

        old_nanopubs = []
        nanopub.assertion.add((uri, self.NS.RDF.type, upload_type))
        if upload_type == URIRef("http://purl.org/dc/dcmitype/Collection"):
            for f in files:
                filename = secure_filename(f.filename)
                if filename != '':
                    file_uri = URIRef(uri + "/" + filename)
                    old_nanopubs.extend(self.add_file(f, file_uri, nanopub))
                    nanopub.assertion.add((uri, NS.dc.hasPart, file_uri))
                    added_files = True
        elif upload_type == NS.dcat.Dataset:
            for f in files:
                filename = secure_filename(f.filename)
                if filename != '':
                    file_uri = URIRef(uri + "/" + filename)
                    old_nanopubs.extend(self.add_file(f, file_uri, nanopub))
                    nanopub.assertion.add(
                        (uri, NS.dcat.distribution, file_uri))
                    nanopub.assertion.add(
                        (file_uri, NS.RDF.type, NS.dcat.Distribution))
                    nanopub.assertion.add(
                        (file_uri, NS.dcat.downloadURL, file_uri))
                    added_files = True
        else:
            for f in files:
                if f.filename != '':
                    old_nanopubs.extend(self.add_file(f, uri, nanopub))
                    nanopub.assertion.add((uri, NS.RDF.type, NS.pv.File))
                    added_files = True
                    break

        if added_files:
            for old_np, old_np_assertion in old_nanopubs:
                nanopub.pubinfo.add((nanopub.assertion.identifier,
                                     NS.prov.wasRevisionOf, old_np_assertion))
                self.nanopub_manager.retire(old_np)

            for n in self.nanopub_manager.prepare(nanopub):
                self.nanopub_manager.publish(n)

    def _can_edit(self, uri):
        if self.managed:
            return True
        if current_user._get_current_object() is None:
            # This isn't null even when not authenticated, unless we are an autonomic agent.
            return True
        if not hasattr(current_user,
                       'identifier'):  # This is an anonymous user.
            return False
        if current_user.has_role('Publisher') or current_user.has_role(
                'Editor') or current_user.has_role('Admin'):
            return True
        if self.db.query('''ask {
    ?nanopub np:hasAssertion ?assertion; np:hasPublicationInfo ?info.
    graph ?info { ?assertion dc:contributor ?user. }
}''',
                         initBindings=dict(nanopub=uri,
                                           user=current_user.identifier),
                         initNs=dict(np=self.NS.np, dc=self.NS.dc)):
            #print "Is owner."
            return True
        return False

    def configure_views(self):
        def sort_by(resources, property):
            return sorted(resources, key=lambda x: x.value(property))

        def camel_case_split(identifier):
            matches = finditer(
                '.+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)',
                identifier)
            return [m.group(0) for m in matches]

        label_properties = [
            self.NS.skos.prefLabel, self.NS.RDFS.label, self.NS.schema.name,
            self.NS.dc.title, self.NS.foaf.name, self.NS.schema.name,
            self.NS.skos.notation
        ]

        @lru_cache(maxsize=1000)
        def get_remote_label(uri):
            for db in [self.db, self.admin_db]:
                g = Graph()
                try:
                    db.nsBindings = {}
                    g += db.query('''select ?s ?p ?o where {
                        hint:Query hint:optimizer "Runtime" .

                         ?s ?p ?o.}''',
                                  initNs=self.NS.prefixes,
                                  initBindings=dict(s=uri))
                    db.nsBindings = {}
                except:
                    pass
                resource_entity = g.resource(uri)
                if len(resource_entity.graph) == 0:
                    #print "skipping", db
                    continue
                for property in label_properties:
                    labels = self.lang_filter(resource_entity[property])
                    if len(labels) > 0:
                        return labels[0]

                if len(labels) == 0:
                    name = [
                        x.value for x in [
                            resource_entity.value(self.NS.foaf.givenName),
                            resource_entity.value(self.NS.foaf.familyName)
                        ] if x is not None
                    ]
                    if len(name) > 0:
                        label = ' '.join(name)
                        return label
            try:
                label = self.db.qname(uri).split(":")[1].replace("_", " ")
                return ' '.join(camel_case_split(label)).title()
            except Exception as e:
                print(str(e), uri)
                return str(uri)

        def get_label(resource):
            for property in label_properties:
                labels = self.lang_filter(resource[property])
                #print "mem", property, label
                if len(labels) > 0:
                    return labels[0]
            return get_remote_label(resource.identifier)

        self.get_label = get_label

        def initialize_g():
            if not hasattr(g, "initialized"):
                g.initialized = True
                g.ns = self.NS
                g.get_summary = get_summary
                g.get_label = get_label
                g.labelize = self.labelize
                g.get_resource = self.get_resource
                g.get_entity = self.get_entity
                g.rdflib = rdflib
                g.isinstance = isinstance
                g.current_user = current_user
                g.slugify = slugify
                g.db = self.db

        self.initialize_g = initialize_g

        @self.before_request
        def load_forms():
            if 'authenticators' in self.config:
                for authenticator in self.config['authenticators']:
                    user = authenticator.authenticate(request, self.datastore,
                                                      self.config)
                    if user is not None:
                        #    login_user(user)
                        break
            initialize_g()

        @self.login_manager.user_loader
        def load_user(user_id):
            if user_id != None:
                #try:
                user = self.datastore.find_user(id=user_id)
                return user
                #except:
                #    return None
            else:
                return None

        # def get_graphs(graphs):
        #     query = '''select ?s ?p ?o ?g where {
        #         hint:Query hint:optimizer "Runtime" .
        #
        #         graph ?g {?s ?p ?o}
        #         } values ?g { %s }'''
        #     query = query % ' '.join([graph.n3() for graph in graphs])
        #     #print query
        #     quads = self.db.store.query(query, initNs=self.NS.prefixes)
        #     result = rdflib.Dataset()
        #     result.addN(quads)
        #     return result

#         def explain(graph):
#             values = ')\n  ('.join([' '.join([x.n3() for x in triple]) for triple in graph.triples((None,None,None))])
#             values = 'VALUES (?s ?p ?o)\n{\n('+ values + ')\n}'
#
#             try:
#                 nanopubs = self.db.query('''select distinct ?np where {
#     hint:Query hint:optimizer "Runtime" .
#     ?np np:hasAssertion?|np:hasProvenance?|np:hasPublicationInfo? ?g;
#         np:hasPublicationInfo ?pubinfo;
#         np:hasAssertion ?assertion;
#     graph ?assertion { ?s ?p ?o.}
# }''' + values, initNs=self.NS.prefixes)
#                 result = ConjunctiveGraph()
#                 for nanopub_uri, in nanopubs:
#                     self.nanopub_manager.get(nanopub_uri, result)
#             except Exception as e:
#                 print(str(e), entity)
#                 raise e
#             return result.resource(entity)

        def get_entity_sparql(entity):
            try:
                statements = self.db.query(
                    '''select distinct ?s ?p ?o ?g where {
    hint:Query hint:optimizer "Runtime" .
            ?np np:hasAssertion?|np:hasProvenance?|np:hasPublicationInfo? ?g;
                np:hasPublicationInfo ?pubinfo;
                np:hasAssertion ?assertion;

            {graph ?np { ?np sio:isAbout ?e.}}
            UNION
            {graph ?assertion { ?e ?p ?o.}}
            graph ?g { ?s ?p ?o }
        }''',
                    initBindings={'e': entity},
                    initNs=self.NS.prefixes)
                result = ConjunctiveGraph()
                result.addN(statements)
            except Exception as e:
                print(str(e), entity)
                raise e
            #print result.serialize(format="trig")
            return result.resource(entity)

#         def get_entity_disk(entity):
#             try:
#                 nanopubs = self.db.query('''select distinct ?np where {
#     hint:Query hint:optimizer "Runtime" .
#             ?np np:hasAssertion?|np:hasProvenance?|np:hasPublicationInfo? ?g;
#                 np:hasPublicationInfo ?pubinfo;
#                 np:hasAssertion ?assertion;
#
#             {graph ?np { ?np sio:isAbout ?e.}}
#             UNION
#             {graph ?assertion { ?e ?p ?o.}}
#         }''',initBindings={'e':entity}, initNs=self.NS.prefixes)
#                 result = ConjunctiveGraph()
#                 for nanopub_uri, in nanopubs:
#                     self.nanopub_manager.get(nanopub_uri, result)
# #                result.addN(nanopubs)
#             except Exception as e:
#                 print(str(e), entity)
#                 raise e
#             #print result.serialize(format="trig")
#             return result.resource(entity)

        get_entity = get_entity_sparql

        self.get_entity = get_entity

        def get_summary(resource):
            summary_properties = [
                self.NS.skos.definition, self.NS.schema.description,
                self.NS.dc.abstract, self.NS.dc.description,
                self.NS.dc.summary, self.NS.RDFS.comment,
                self.NS.dcelements.description,
                URIRef("http://purl.obolibrary.org/obo/IAO_0000115"),
                self.NS.prov.value, self.NS.sio.hasValue
            ]
            if 'summary_properties' in self.config:
                summary_properties.extend(self.config['summary_properties'])
            for property in summary_properties:
                terms = self.lang_filter(resource[property])
                for term in terms:
                    yield (property, term)

        self.get_summary = get_summary

        if 'WHYIS_CDN_DIR' in self.config and self.config[
                'WHYIS_CDN_DIR'] is not None:

            @self.route('/cdn/<path:filename>')
            def cdn(filename):
                return send_from_directory(self.config['WHYIS_CDN_DIR'],
                                           filename)

        def render_view(resource, view=None, args=None, use_cache=True):
            self.initialize_g()
            if view is None and 'view' in request.args:
                view = request.args['view']

            if view is None:
                view = 'view'

            if use_cache and self.cache is not None:
                key = str((str(resource.identifier), view))
                result = self.cache.get(key)
                if result is not None:
                    r, headers = result
                    return r, 200, headers
            template_args = dict()
            template_args.update(self.template_imports)
            template_args.update(
                dict(ns=self.NS,
                     this=resource,
                     g=g,
                     current_user=current_user,
                     isinstance=isinstance,
                     args=request.args if args is None else args,
                     url_for=url_for,
                     app=self,
                     get_entity=get_entity,
                     get_summary=get_summary,
                     search=search,
                     rdflib=rdflib,
                     config=self.config,
                     hasattr=hasattr,
                     set=set))

            types = []
            if 'as' in request.args:
                types = [URIRef(request.args['as']), 0]

            types.extend(
                (x, 1) for x in self.vocab[resource.identifier:NS.RDF.type])
            if not types:  # KG types cannot override vocab types. This should keep views stable where critical.
                types.extend([(x.identifier, 1)
                              for x in resource[NS.RDF.type]])
            #if len(types) == 0:
            types.append([self.NS.RDFS.Resource, 100])
            type_string = ' '.join(
                ["(%s %d '%s')" % (x.n3(), i, view) for x, i in types])
            view_query = '''select ?id ?view (count(?mid)+?priority as ?rank) ?class ?c ?content_type where {
    values (?c ?priority ?id) { %s }
    ?c rdfs:subClassOf* ?mid.
    ?mid rdfs:subClassOf* ?class.
    ?class ?viewProperty ?view.
    ?viewProperty rdfs:subPropertyOf* whyis:hasView.
    ?viewProperty dc:identifier ?id.
    optional {
        ?viewProperty dc:format ?content_type
    }
} group by ?c ?class ?content_type order by ?rank
''' % type_string

            #print view_query
            views = list(
                self.vocab.query(view_query,
                                 initNs=dict(whyis=self.NS.whyis,
                                             dc=self.NS.dc)))
            if len(views) == 0:
                abort(404)

            headers = {'Content-Type': "text/html"}
            extension = views[0]['view'].value.split(".")[-1]
            if extension in DATA_EXTENSIONS:
                headers['Content-Type'] = DATA_EXTENSIONS[extension]
            print(views[0]['view'], views[0]['content_type'])
            if views[0]['content_type'] is not None:
                headers['Content-Type'] = views[0]['content_type']

            # default view (list of nanopubs)
            # if available, replace with class view
            # if available, replace with instance view
            return render_template(views[0]['view'].value,
                                   **template_args), 200, headers

        self.render_view = render_view

        # Register blueprints
        self.register_blueprint(nanopub_blueprint)
        self.register_blueprint(sparql_blueprint)
        self.register_blueprint(entity_blueprint)
        self.register_blueprint(tableview_blueprint)

    def get_entity_uri(self, name, format):
        content_type = None
        if format is not None:
            if format in DATA_EXTENSIONS:
                content_type = DATA_EXTENSIONS[format]
            else:
                name = '.'.join([name, format])
        if name is not None:
            entity = self.NS.local[name]
        elif 'uri' in request.args:
            entity = URIRef(request.args['uri'])
        else:
            entity = self.NS.local.Home
        return entity, content_type

    def get_send_file_max_age(self, filename):
        if self.debug:
            return 0
        else:
            return Empty.get_send_file_max_age(self, filename)
コード例 #16
0
from flask_caching import Cache
import flask
from flask import request

import re
from layout import layout as desktop_layout
# from mobile_layout import layout as mobile_layout -> Only a single layout for development

external_stylesheets = [dbc.themes.BOOTSTRAP,
"https://use.fontawesome.com/releases/v5.9.0/css/all.css"]

server = flask.Flask(__name__)

app = dash.Dash(__name__, external_stylesheets=external_stylesheets,
                server=server)
cache = Cache(app.server)
app.title = "Automated EDA"

def register_before_request(app):

    @app.server.before_request
    def before_request_func():      # pylint: disable=W0612
        """Checks if user agent is from mobile to determine which layout
        to serve before user makes any requests.
        """
        agent = request.headers.get("User_Agent")
        mobile_string = ("(?i)android|fennec|iemobile|iphone|opera"
                        " (?:mini|mobi)|mobile")
        re_mobile = re.compile(mobile_string)
        is_mobile = len(re_mobile.findall(agent)) > 0
コード例 #17
0
 def set(self, *args, **kwargs):
     "Proxy function for internal cache object."
     # only when cache is enabled via config
     if not self.cache_mode:
         return None
     return Cache.set(self, *args, **kwargs)
コード例 #18
0
from flask_bcrypt import Bcrypt
from flask_caching import Cache
from flask_mongoengine import MongoEngine

app = Flask(__name__)

from .config import Config

app.config.from_object(Config)

log_info = lambda x: app.logger.info(str(x))

bcrypt = Bcrypt(app)
cors = CORS(app, resources={r"/api/*": {"origins": "*"}})
db = MongoEngine(app)
cache = Cache(app, with_jinja2_ext=False)

with app.app_context():
    cache.clear()

from . import models
from .models import Buyer, Seller, BlacklistToken

models.init_app(app)

from .api import add_resources

app = add_resources(app)


def create_app():
コード例 #19
0
def setup_cache(app, cache_config):
    """Setup the flask-cache on a flask app"""
    if cache_config and cache_config.get('CACHE_TYPE') != 'null':
        return Cache(app, config=cache_config)
コード例 #20
0
ファイル: cache.py プロジェクト: tbri867/CS235FLIX-A2
from flask_caching import Cache

cache = Cache(config={"CACHE_TYPE": "simple",
                      "CACHE_DEFAULT_TIMEOUT": 300})
コード例 #21
0
# -*- coding: utf-8 -*-

from flask import Flask, request
from flask_nemo import Nemo
from capitains_nautilus.flask_ext import FlaskNautilus
from capitains_nautilus.cts.resolver import NautilusCTSResolver
from werkzeug.contrib.cache import FileSystemCache
from flask_caching import Cache
import os
from config import nemo_config
     
d = "/opt/data"
app = Flask("Nautilus")
nautilus_cache = FileSystemCache("/opt/cache")
http_cache = Cache(config={'CACHE_TYPE': "simple"})
nautilus_resolver = NautilusCTSResolver(
    [os.path.join(d,o) for o in os.listdir(d) if os.path.isdir(os.path.join(d,o))],
    cache=nautilus_cache
)
nautilus = FlaskNautilus(
    app=app,
    prefix="/api",
    name="nautilus",
    resolver=nautilus_resolver,
    flask_caching=http_cache
)

# We set up Nemo
nemo = Nemo(
    app=app,
    name="nemo",
コード例 #22
0
ファイル: __init__.py プロジェクト: honmaple/maple-bbs
# ********************************************************************************
from flask import request
from flask_wtf.csrf import CSRFProtect
from flask_avatar import Avatar
from flask_maple.models import db
from flask_maple.redis import Redis
from flask_maple.mail import Mail
from flask_principal import Principal
from flask_msearch import Search
from flask_caching import Cache
from . import babel, login, maple

db = db
csrf = CSRFProtect()
redis_data = Redis()
cache = Cache()
mail = Mail()
principal = Principal()
search = Search(db=db)
avatar = Avatar(cache=cache.cached(
    timeout=259200, key_prefix=lambda: "avatar:{}".format(request.url)))


def init_app(app):
    db.init_app(app)
    cache.init_app(app)
    avatar.init_app(app)
    csrf.init_app(app)
    principal.init_app(app)
    redis_data.init_app(app)
    mail.init_app(app)
コード例 #23
0
ファイル: manage.py プロジェクト: qxbug/zsky
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.config['SQLALCHEMY_POOL_SIZE']=5000
db = SQLAlchemy(app)
manager = Manager(app)
migrate = Migrate(app, db)
babel = Babel(app)
app.config['BABEL_DEFAULT_LOCALE'] = 'zh_CN'
loginmanager=LoginManager()
loginmanager.init_app(app)
loginmanager.session_protection='strong'
loginmanager.login_view='login'
loginmanager.login_message = "请先登录!"
cache = Cache(app,config = {
    'CACHE_TYPE': 'redis',
    'CACHE_REDIS_HOST': '127.0.0.1',
    'CACHE_REDIS_PORT': 6379,
    'CACHE_REDIS_DB': '',
    'CACHE_REDIS_PASSWORD': ''
})
cache.init_app(app)


DB_HOST='127.0.0.1'
DB_NAME_MYSQL='zsky'
DB_PORT_MYSQL=3306
DB_NAME_SPHINX='film'
DB_PORT_SPHINX=9306
DB_USER='******'
DB_PASS='******'
DB_CHARSET='utf8mb4'
コード例 #24
0
ファイル: app.py プロジェクト: plantly/scale-ai-templates
ownerships = get_unique(connection, flake_db, "HOME_OWNERSHIP")[1:]

# Close connection
connection.close()

# Make some calculations based on value range retrieved
loan_marks = loan_max // 4
loan_min //= loan_marks

# Define app
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
server = app.server
cache = Cache(
    app.server,
    config={
        "CACHE_TYPE": "redis",
        "CACHE_REDIS_URL": os.environ.get("REDIS_URL", "")
    },
)


# Cache functions
@cache.memoize(timeout=300)
def connect_read_sql(query, engine):
    connection = engine.connect()
    result = pd.read_sql(query, connection)
    connection.close()
    return result


# Build component parts