Пример #1
0
def make_server(config, debug=False):
    app = create_server(config, debug)
    app.wsgi_app = ProxyFix(app.wsgi_app)
    app.wsgi_app = HeaderRewriterFix(app.wsgi_app,
                                     remove_headers=['Date'],
                                     add_headers=[('X-Powered-By', 'WSGI')])
    return app
Пример #2
0
def setup(settings=None):
    global config

    try:
        config = startup.read_settings(defs={
            "name": ["--process_num", "--process"],
            "help": "Additional port offset (for multiple Flask processes",
            "type": int,
            "dest": "process_num",
            "default": 0,
            "required": False
        },
                                       filename=settings)
        constants.set(config.constants)
        Log.start(config.debug)

        if config.args.process_num and config.flask.port:
            config.flask.port += config.args.process_num

        # PIPE REQUEST LOGS TO ES DEBUG
        if config.request_logs:
            request_logger = elasticsearch.Cluster(
                config.request_logs).get_or_create_index(config.request_logs)
            active_data.request_log_queue = request_logger.threaded_queue(
                max_size=2000)

        # SETUP DEFAULT CONTAINER, SO THERE IS SOMETHING TO QUERY
        containers.config.default = {
            "type": "elasticsearch",
            "settings": config.elasticsearch.copy()
        }

        # TURN ON /exit FOR WINDOWS DEBUGGING
        if config.flask.debug or config.flask.allow_exit:
            config.flask.allow_exit = None
            Log.warning("ActiveData is in debug mode")
            app.add_url_rule('/exit', 'exit', _exit)

        # TRIGGER FIRST INSTANCE
        FromESMetadata(config.elasticsearch)
        if config.saved_queries:
            setattr(save_query, "query_finder",
                    SaveQueries(config.saved_queries))
        HeaderRewriterFix(app, remove_headers=['Date', 'Server'])

        if config.flask.ssl_context:
            if config.args.process_num:
                Log.error(
                    "can not serve ssl and multiple Flask instances at once")
            setup_ssl()

        return app
    except Exception, e:
        Log.error(
            "Serious problem with ActiveData service construction!  Shutdown!",
            cause=e)
Пример #3
0
def main():
    try:
        parser = argparse.ArgumentParser()
        parser.add_argument(
            *["--settings", "--settings-file", "--settings_file"], **{
                "help": "path to JSON file with settings",
                "type": str,
                "dest": "filename",
                "default": "./settings.json",
                "required": False
            })
        namespace = parser.parse_args()
        args = {k: getattr(namespace, k) for k in vars(namespace)}

        if not os.path.exists(args["filename"]):
            raise Except("Can not file settings file {filename}".format(
                filename=args["filename"]))

        with codecs.open(args["filename"], "r", encoding="utf-8") as file:
            json_data = file.read()
        globals()["settings"] = json.loads(json_data)
        settings["args"] = args
        settings["whitelist"] = listwrap(settings.get("whitelist", None))

        globals()["logger"] = logging.getLogger('esFrontLine')
        logger.setLevel(logging.DEBUG)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

        for d in listwrap(settings["debug"]["log"]):
            if d.get("filename", None):
                fh = RotatingFileHandler(**d)
                fh.setLevel(logging.DEBUG)
                fh.setFormatter(formatter)
                logger.addHandler(fh)
            elif d.get("stream", None) in ("sys.stdout", "sys.stderr"):
                ch = logging.StreamHandler(stream=eval(d["stream"]))
                ch.setLevel(logging.DEBUG)
                ch.setFormatter(formatter)
                logger.addHandler(ch)

        HeaderRewriterFix(app, remove_headers=['Date', 'Server'])
        app.run(**settings["flask"])
    except Exception, e:
        print(str(e))
Пример #4
0
    def __init__(self,
                 listen_port,
                 handlers,
                 parameters,
                 shard=0,
                 listen_address=""):
        super(WebService, self).__init__(shard)

        static_files = parameters.pop('static_files', [])
        rpc_enabled = parameters.pop('rpc_enabled', False)
        rpc_auth = parameters.pop('rpc_auth', None)
        auth_middleware = parameters.pop('auth_middleware', None)
        is_proxy_used = parameters.pop('is_proxy_used', False)

        self.wsgi_app = tornado.wsgi.WSGIApplication(handlers, **parameters)
        self.wsgi_app.service = self

        for entry in static_files:
            self.wsgi_app = SharedDataMiddleware(self.wsgi_app,
                                                 {"/static": entry})

        if rpc_enabled:
            self.wsgi_app = DispatcherMiddleware(
                self.wsgi_app, {"/rpc": RPCMiddleware(self, rpc_auth)})

        # Remove any authentication header that a user may try to fake.
        self.wsgi_app = HeaderRewriterFix(
            self.wsgi_app,
            remove_headers=[WebService.AUTHENTICATED_USER_HEADER])

        if auth_middleware is not None:
            self.wsgi_app = auth_middleware(self.wsgi_app)

        # If is_proxy_used is set to True we'll use the content of the
        # X-Forwarded-For HTTP header (if provided) to determine the
        # client IP address, ignoring the one the request came from.
        # This allows to use the IP lock behind a proxy. Activate it
        # only if all requests come from a trusted source (if clients
        # were allowed to directlty communicate with the server they
        # could fake their IP and compromise the security of IP lock).
        if is_proxy_used:
            self.wsgi_app = ProxyFix(self.wsgi_app)

        self.web_server = WSGIServer((listen_address, listen_port),
                                     self.wsgi_app)
Пример #5
0
    def create_wsgi(app, **kwargs):
        application = wsgi_factory(app, **kwargs)

        # Remove X-Forwarded-For headers because Flask-Security doesn't know
        # how to deal with them properly. Note REMOTE_ADDR has already been
        # set correctly at this point by the ``wsgi_proxyfix`` factory.
        if app.config.get('WSGI_PROXIES'):
            application = HeaderRewriterFix(application,
                                            remove_headers=['X-Forwarded-For'])

        host = app.config.get('STATSD_HOST')
        port = app.config.get('STATSD_PORT', 8125)
        prefix = app.config.get('STATSD_PREFIX')

        if host and port and prefix:
            client = StatsClient(prefix=prefix, host=host, port=port)
            return StatsdTimingMiddleware(application, client)
        return application
Пример #6
0
def setup():
    global config

    config = startup.read_settings(
        filename=os.environ.get('ACTIVEDATA_CONFIG'),
        defs=[
            {
                "name": ["--process_num", "--process"],
                "help": "Additional port offset (for multiple Flask processes",
                "type": int,
                "dest": "process_num",
                "default": 0,
                "required": False
            }
        ]
    )

    constants.set(config.constants)
    Log.start(config.debug)

    # PIPE REQUEST LOGS TO ES DEBUG
    if config.request_logs:
        cluster = elasticsearch.Cluster(config.request_logs)
        request_logger = cluster.get_or_create_index(config.request_logs)
        active_data.request_log_queue = request_logger.threaded_queue(max_size=2000)

    if config.dockerflow:
        def backend_check():
            http.get_json(config.elasticsearch.host + ":" + text_type(config.elasticsearch.port))
        dockerflow(flask_app, backend_check)


    # SETUP DEFAULT CONTAINER, SO THERE IS SOMETHING TO QUERY
    container.config.default = {
        "type": "elasticsearch",
        "settings": config.elasticsearch.copy()
    }

    # TRIGGER FIRST INSTANCE
    if config.saved_queries:
        setattr(save_query, "query_finder", SaveQueries(config.saved_queries))

    HeaderRewriterFix(flask_app, remove_headers=['Date', 'Server'])
Пример #7
0
spheres = cycle(
    ['../assets/' + x for x in os.listdir('../assets') if 'JPG' in x])


class SnapShots(WebApp):
    def endpoint_get_image(self, adapter, request, **values):
        (rgb, ) = c.execute('select rgb from snapshot where id={}'.format(
            next(alright)))
        # rgb = np.fromstring(rgb[0], dtype=np.uint8)
        with open(rgb[0], 'rb') as f:
            return Response(f.read(), status=200)

    def endpoint_get_sphere(self, adapter, request, **values):
        s = next(spheres)
        with open(s, 'rb') as f:
            return Response(f.read(), status=200)

    url_map = Map([
        Rule('/get_image', endpoint='get_image', methods=['GET']),
        Rule('/get_sphere', endpoint='get_sphere', methods=['GET'])
    ])


app = SharedDataMiddleware(SnapShots(), {'/': '../assets'}, cache=True)
app = HeaderRewriterFix(app,
                        add_headers=[('Access-Control-Allow-Origin', '*'),
                                     ('Content-Security-Policy', '*')])

http = WSGIServer(('0.0.0.0', 8080), app)
http.serve_forever()
Пример #8
0
    def __init__(self, config, engine=None, redis_class=redis.StrictRedis):
        self.config = AttributeDict(config)

        self.metadata = db.metadata

        # configure logging
        logging.config.dictConfig(self.config.logging)

        # Connect to the database
        if engine is None and self.config.get("database", {}).get("url"):
            engine = sqlalchemy.create_engine(self.config.database.url)
        self.engine = engine

        # Create our redis connections
        self.redises = {
            key: redis_class.from_url(url)
            for key, url in self.config.redis.items()
        }

        # Create our Store instance and associate our store modules with it
        self.db = AttributeDict()
        for name, klass in self.db_classes.items():
            self.db[name] = klass(
                self,
                self.metadata,
                self.engine,
                self.redises["downloads"],
            )

        # Create our Search Index instance and associate our mappings with it
        self.search = Index(self.db, self.config.search)
        self.search.register(ProjectMapping)

        # Set up our URL routing
        self.urls = urls.urls

        # Initialize our Translations engine
        self.translations = babel.support.NullTranslations()

        # Setup our Jinja2 Environment
        self.templates = jinja2.Environment(
            autoescape=True,
            auto_reload=self.config.debug,
            extensions=[
                "jinja2.ext.i18n",
            ],
            loader=jinja2.PackageLoader("warehouse"),
        )

        # Install Babel
        self.templates.filters.update({
            "package_type_display":
            packaging_helpers.package_type_display,
            "format_number":
            babel.numbers.format_number,
            "format_decimal":
            babel.numbers.format_decimal,
            "format_percent":
            babel.numbers.format_percent,
            "format_date":
            babel.dates.format_date,
            "format_datetime":
            babel.dates.format_datetime,
            "format_time":
            babel.dates.format_time,
        })

        # Install our translations
        self.templates.install_gettext_translations(
            self.translations,
            newstyle=True,
        )

        # Setup our password hasher
        self.passlib = passlib.context.CryptContext(
            schemes=[
                "bcrypt_sha256",
                "bcrypt",
                "django_bcrypt",
                "unix_disabled",
            ],
            default="bcrypt_sha256",
            deprecated=["auto"],
        )

        # Setup our session storage
        self.session_store = RedisSessionStore(
            self.redises["sessions"],
            session_class=Session,
        )

        # Add our Content Security Policy Middleware
        img_src = ["'self'"]
        if self.config.camo:
            camo_parsed = urllib.parse.urlparse(self.config.camo.url)
            img_src += [
                "{}://{}".format(camo_parsed.scheme, camo_parsed.netloc),
                "https://secure.gravatar.com",
            ]
        else:
            img_src += ["*"]

        self.wsgi_app = guard.ContentSecurityPolicy(
            self.wsgi_app,
            {
                "default-src": ["'self'"],
                "font-src": ["'self'", "data:"],
                "img-src": img_src,
                "style-src": ["'self'", "cloud.typography.com"],
            },
        )

        if "sentry" in self.config:
            self.wsgi_app = Sentry(self.wsgi_app, Client(**self.config.sentry))

        # Serve the static files that are packaged as part of Warehouse
        self.wsgi_app = WhiteNoise(
            self.wsgi_app,
            root=self.static_dir,
            prefix=self.static_path,
            max_age=31557600,
        )

        # Add our Powered By Middleware
        self.wsgi_app = HeaderRewriterFix(
            self.wsgi_app,
            add_headers=[
                (
                    "X-Powered-By",
                    "Warehouse {__version__} ({__build__})".format(
                        __version__=warehouse.__version__,
                        __build__=warehouse.__build__,
                    ),
                ),
            ],
        )

        # Previously PyPI used a hand written disaptch method which depended
        # on things like the request's content type or url parameters. In order
        # to sanely support that in Warehouse we use this middleware to rewrite
        # those to "internal" URLs which we can then dispatch based on.
        self.wsgi_app = LegacyRewriteMiddleware(self.wsgi_app)

        # This is last because we want it processed first in the stack of
        # middlewares. This will ensure that we strip X-Forwarded-* headers
        # if the request doesn't come from Fastly
        self.wsgi_app = XForwardedTokenMiddleware(
            self.wsgi_app,
            self.config.site.access_token,
        )
Пример #9
0
from flask import Flask
from werkzeug.contrib.fixers import LighttpdCGIRootFix, HeaderRewriterFix

app = Flask(__name__)
app.config.from_object('config')
app.wsgi_app = LighttpdCGIRootFix(app.wsgi_app)
app.wsgi_app = HeaderRewriterFix(app.wsgi_app, remove_headers=['Date'], add_headers=[('X-Powered-By', 'WSGI'), ('Server', 'Noname Server')])

from app import views
Пример #10
0
                    headers={
                        "access-control-allow-origin": "*",
                        "content-type": "text/html"
                    })


if __name__ == "__main__":
    try:
        config = startup.read_settings()
        constants.set(config.constants)
        Log.start(config.debug)

        # SETUP TREEHERDER CACHE
        hg = HgMozillaOrg(use_cache=True, settings=config.hg)
        th = TreeherderService(hg, settings=config.treeherder)
        app.add_url_rule('/treeherder',
                         None,
                         th.get_treeherder_job,
                         methods=['GET'])

        HeaderRewriterFix(app, remove_headers=['Date', 'Server'])

        app.run(**config.flask)
    except Exception, e:
        Log.error("Serious problem with service construction!  Shutdown!",
                  cause=e)
    finally:
        Log.stop()

    sys.exit(0)