def test_generate_key(self): random_token = pretend.call_recorder( lambda: "EUmoN-Hsp0CFMcULe2KD5c3LjB_otLG-aXZueTkY3DM" ) store = RedisSessionStore(pretend.stub(), _random_token=random_token) assert (store.generate_key() == "EUmoN-Hsp0CFMcULe2KD5c3LjB_otLG-aXZueTkY3DM")
def test_refresh(self): store = RedisSessionStore( pretend.stub(expire=pretend.call_recorder(lambda key, ttl: None)), ) store.refresh(pretend.stub(sid="EUmoN")) assert store.redis.expire.calls == [ pretend.call("warehouse/session/data/EUmoN", 12 * 60 * 60), ]
def test_delete(self): store = RedisSessionStore( pretend.stub(delete=pretend.call_recorder(lambda key: None)), ) store.delete(pretend.stub(sid="EUmoN")) assert store.redis.delete.calls == [ pretend.call("warehouse/session/data/EUmoN"), ]
def test_cycle(self): store = RedisSessionStore(pretend.stub()) store.delete = pretend.call_recorder(lambda session: None) old_session = Session({"user.csrf": "ok"}, "123456", False) new_session = store.cycle(old_session) assert store.delete.calls == [pretend.call(old_session)] assert new_session == old_session assert new_session.new assert new_session.sid != old_session.sid
def test_get(self): store = RedisSessionStore( pretend.stub( get=lambda key: b"\x81\xa9user.csrf\xa3wat", ) ) store.refresh = pretend.call_recorder(lambda session: None) session = store.get("EUmoN-Hsp0CFMcULe2KD5c3LjB_otLG-aXZueTkY3DM") assert store.refresh.calls == [pretend.call(session)] assert not session.new assert session == {"user.csrf": "wat"} assert session.sid == "EUmoN-Hsp0CFMcULe2KD5c3LjB_otLG-aXZueTkY3DM"
def test_save(self): store = RedisSessionStore( pretend.stub( setex=pretend.call_recorder(lambda key, ttl, data: None), ), ) session = Session({"user.csrf": "wat"}, "EUmoN", False) store.save(session) assert store.redis.setex.calls == [ pretend.call( "warehouse/session/data/EUmoN", 12 * 60 * 60, b"\x81\xa9user.csrf\xa3wat", ), ]
def test_get_invalid_data_in_redis(self): store = RedisSessionStore(pretend.stub(get=lambda key: b"asdsa")) assert store.get("EUmoN-Hsp0CFMcULe2KD5c3LjB_otLG-aXZueTkY3DM").new
def test_get_invalid_session(self): store = RedisSessionStore(pretend.stub()) assert store.get("invalid key").new
def test_is_valid_key(self, key, valid): store = RedisSessionStore(pretend.stub()) assert store.is_valid_key(key) is valid
def test_redis_key(self): store = RedisSessionStore(pretend.stub()) assert store._redis_key("123456") == "warehouse/session/data/123456"
def __init__(self, config, engine=None, redis_class=redis.StrictRedis): self.config = AttributeDict(config) self.metadata = db.metadata # configure logging logging.config.dictConfig(self.config.logging) # Connect to the database if engine is None and self.config.get("database", {}).get("url"): engine = sqlalchemy.create_engine(self.config.database.url) self.engine = engine # Create our redis connections self.redises = { key: redis_class.from_url(url) for key, url in self.config.redis.items() } # Create our Store instance and associate our store modules with it self.db = AttributeDict() for name, klass in self.db_classes.items(): self.db[name] = klass( self, self.metadata, self.engine, self.redises["downloads"], ) # Create our Search Index instance and associate our mappings with it self.search = Index(self.db, self.config.search) self.search.register(ProjectMapping) # Set up our URL routing self.urls = urls.urls # Initialize our Translations engine self.translations = babel.support.NullTranslations() # Setup our Jinja2 Environment self.templates = jinja2.Environment( autoescape=True, auto_reload=self.config.debug, extensions=[ "jinja2.ext.i18n", ], loader=jinja2.PackageLoader("warehouse"), ) # Install Babel self.templates.filters.update({ "package_type_display": packaging_helpers.package_type_display, "format_number": babel.numbers.format_number, "format_decimal": babel.numbers.format_decimal, "format_percent": babel.numbers.format_percent, "format_date": babel.dates.format_date, "format_datetime": babel.dates.format_datetime, "format_time": babel.dates.format_time, }) # Install our translations self.templates.install_gettext_translations( self.translations, newstyle=True, ) # Setup our password hasher self.passlib = passlib.context.CryptContext( schemes=[ "bcrypt_sha256", "bcrypt", "django_bcrypt", "unix_disabled", ], default="bcrypt_sha256", deprecated=["auto"], ) # Setup our session storage self.session_store = RedisSessionStore( self.redises["sessions"], session_class=Session, ) # Add our Content Security Policy Middleware img_src = ["'self'"] if self.config.camo: camo_parsed = urllib.parse.urlparse(self.config.camo.url) img_src += [ "{}://{}".format(camo_parsed.scheme, camo_parsed.netloc), "https://secure.gravatar.com", ] else: img_src += ["*"] self.wsgi_app = guard.ContentSecurityPolicy( self.wsgi_app, { "default-src": ["'self'"], "font-src": ["'self'", "data:"], "img-src": img_src, "style-src": ["'self'", "cloud.typography.com"], }, ) if "sentry" in self.config: self.wsgi_app = Sentry(self.wsgi_app, Client(**self.config.sentry)) # Serve the static files that are packaged as part of Warehouse self.wsgi_app = WhiteNoise( self.wsgi_app, root=self.static_dir, prefix=self.static_path, max_age=31557600, ) # Add our Powered By Middleware self.wsgi_app = HeaderRewriterFix( self.wsgi_app, add_headers=[ ( "X-Powered-By", "Warehouse {__version__} ({__build__})".format( __version__=warehouse.__version__, __build__=warehouse.__build__, ), ), ], ) # Previously PyPI used a hand written disaptch method which depended # on things like the request's content type or url parameters. In order # to sanely support that in Warehouse we use this middleware to rewrite # those to "internal" URLs which we can then dispatch based on. self.wsgi_app = LegacyRewriteMiddleware(self.wsgi_app) # This is last because we want it processed first in the stack of # middlewares. This will ensure that we strip X-Forwarded-* headers # if the request doesn't come from Fastly self.wsgi_app = XForwardedTokenMiddleware( self.wsgi_app, self.config.site.access_token, )