def app_config( app, settings="fence.settings", root_dir=None, config_path=None, file_name=None, ): """ Set up the config for the Flask app. """ if root_dir is None: root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) logger.info("Loading settings...") # not using app.config.from_object because we don't want all the extra flask cfg # vars inside our singleton when we pass these through in the next step settings_cfg = flask.Config(app.config.root_path) settings_cfg.from_object(settings) # dump the settings into the config singleton before loading a configuration file config.update(dict(settings_cfg)) # load the configuration file, this overwrites anything from settings/local_settings config.load( config_path=config_path, search_folders=CONFIG_SEARCH_FOLDERS, file_name=file_name, ) # load all config back into flask app config for now, we should PREFER getting config # directly from the fence config singleton in the code though. app.config.update(**config._configs) _setup_arborist_client(app) _setup_audit_service_client(app) _setup_data_endpoint_and_boto(app) _load_keys(app, root_dir) _set_authlib_cfgs(app) app.prometheus_counters = {} if config["ENABLE_PROMETHEUS_METRICS"]: logger.info("Enabling Prometheus metrics...") _setup_prometheus(app) else: logger.info("Prometheus metrics are NOT enabled.") app.storage_manager = StorageManager(config["STORAGE_CREDENTIALS"], logger=logger) app.debug = config["DEBUG"] # Following will update logger level, propagate, and handlers get_logger(__name__, log_level="debug" if config["DEBUG"] is True else "info") _setup_oidc_clients(app) with app.app_context(): _check_aws_creds_and_region(app) _check_azure_storage(app)
def restore_config(): """ Restore original config at teardown. """ saved_config = copy.deepcopy(config._configs) yield # restore old configs config.update(saved_config)
def config_idp_in_client(app, db_session, kid_2, rsa_private_key_2, rsa_public_key_2, restore_config): """ Set info about this fence's (client fence's) IDP in config. Reset when done. """ saved_keypairs = app.keypairs keypair = Keypair(kid=kid_2, public_key=rsa_public_key_2, private_key=rsa_private_key_2) app.keypairs = [keypair] saved_jwtpks = app.jwt_public_keys app.jwt_public_keys["/"] = OrderedDict([(kid_2, rsa_public_key_2)]) saved_db_Session = app.db.Session app.db.Session = lambda: db_session config.update({ "BASE_URL": "/", "MOCK_AUTH": False, "DEFAULT_LOGIN_IDP": "fence", "LOGIN_OPTIONS": [{ "name": "InCommon login", "idp": "fence", "fence_idp": "shibboleth", "shib_idps": ["some-incommon-entity-id"], }], "OPENID_CONNECT": { "fence": { "client_id": "other_fence_client_id", "client_secret": "other_fence_client_secret", "api_base_url": "http://other-fence", "authorize_url": "http://other-fence/oauth2/authorize", } }, }) app.fence_client = OAuthClient(**config["OPENID_CONNECT"]["fence"]) yield Dict( client_id=config["OPENID_CONNECT"]["fence"]["client_id"], client_secret=config["OPENID_CONNECT"]["fence"]["client_secret"], ) app.keypairs = saved_keypairs app.jwt_public_keys = saved_jwtpks app.db.Session = saved_db_Session
def _set_authlib_cfgs(app): # authlib OIDC settings # key will need to be added settings = {"OAUTH2_JWT_KEY": keys.default_private_key(app)} app.config.update(settings) config.update(settings) # only add the following if not already provided config.setdefault("OAUTH2_JWT_ENABLED", True) config.setdefault("OAUTH2_JWT_ALG", "RS256") config.setdefault("OAUTH2_JWT_ISS", app.config["BASE_URL"]) config.setdefault("OAUTH2_PROVIDER_ERROR_URI", "/api/oauth2/errors") app.config.setdefault("OAUTH2_JWT_ENABLED", True) app.config.setdefault("OAUTH2_JWT_ALG", "RS256") app.config.setdefault("OAUTH2_JWT_ISS", app.config["BASE_URL"]) app.config.setdefault("OAUTH2_PROVIDER_ERROR_URI", "/api/oauth2/errors")
def test_google_link_redirect_no_google_idp(client, app, restore_config, encoded_creds_jwt): """ Test that even if Google is not configured as an IDP, when we hit the link endpoint with valid creds, we get a redirect response. This should be redirecting to google's oauth """ # Don't include google in the enabled idps, but leave it configured # in the openid connect clients: override_settings = { "ENABLED_IDENTITY_PROVIDERS": { # ID for which of the providers to default to. "default": "fence", # Information for identity providers. "providers": { "fence": { "name": "Fence Multi-Tenant OAuth" }, "shibboleth": { "name": "NIH Login" }, }, }, "OPENID_CONNECT": { "google": { "client_id": "123", "client_secret": "456", "redirect_url": "789", } }, } config.update(override_settings) encoded_credentials_jwt = encoded_creds_jwt["jwt"] redirect = "http://localhost" r = client.get( "/link/google", query_string={"redirect": redirect}, headers={"Authorization": "Bearer " + encoded_credentials_jwt}, ) assert r.status_code == 302 url, query_params = split_url_and_query_params(r.location) google_url, google_query_params = split_url_and_query_params( app.google_client.get_auth_url()) assert google_url == url
def app(kid, rsa_private_key, rsa_public_key): """ Flask application fixture. """ mocker = Mocker() mocker.mock_functions() root_dir = os.path.dirname(os.path.realpath(__file__)) # delete the record operation from the data blueprint, because right now it calls a # whole bunch of stuff on the arborist client to do some setup for the uploader role fence.blueprints.data.blueprint.deferred_functions = [ f for f in fence.blueprints.data.blueprint.deferred_functions if f.__name__ != "record" ] app_init( fence.app, test_settings, root_dir=root_dir, config_path=os.path.join(root_dir, "test-fence-config.yaml"), ) # We want to set up the keys so that the test application can load keys # from the test keys directory, but the default keypair used will be the # one using the fixtures. So, stick the keypair at the front of the # keypairs list and reverse the ordered dictionary of public keys after # inserting the fixture keypair. fixture_keypair = Keypair( kid=kid, public_key=rsa_public_key, private_key=rsa_private_key ) fence.app.keypairs = [fixture_keypair] + fence.app.keypairs fence.app.jwt_public_keys[config["BASE_URL"]][kid] = rsa_public_key fence.app.jwt_public_keys[config["BASE_URL"]] = OrderedDict( reversed(list(fence.app.jwt_public_keys[config["BASE_URL"]].items())) ) config.update(BASE_URL=config["BASE_URL"]) config.update(ENCRYPTION_KEY=Fernet.generate_key().decode("utf-8")) yield fence.app mocker.unmock_functions()
def app_config(app, settings="fence.settings", root_dir=None, config_path=None, file_name=None): """ Set up the config for the Flask app. """ if root_dir is None: root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) logger.info("Loading settings...") # not using app.config.from_object because we don't want all the extra flask cfg # vars inside our singleton when we pass these through in the next step settings_cfg = flask.Config(app.config.root_path) settings_cfg.from_object(settings) # dump the settings into the config singleton before loading a configuration file config.update(dict(settings_cfg)) # load the configuration file, this overwrites anything from settings/local_settings config.load(config_path, file_name) # load all config back into flask app config for now, we should PREFER getting config # directly from the fence config singleton in the code though. app.config.update(**config._configs) _setup_arborist_client(app) _setup_data_endpoint_and_boto(app) _load_keys(app, root_dir) _set_authlib_cfgs(app) app.storage_manager = StorageManager(config["STORAGE_CREDENTIALS"], logger=logger) app.debug = config["DEBUG"] # Following will update logger level, propagate, and handlers get_logger(__name__, log_level="debug" if config["DEBUG"] == True else "info") _setup_oidc_clients(app)
def main(): args = parse_arguments() # get database information sys.path.append(args.path) # replicate cfg loading done in flask app to maintain backwards compatibility # TODO (DEPRECATE LOCAL_SETTINGS): REMOVE this when putting cfg in # settings/local_settings is deprecated import flask settings_cfg = flask.Config(".") settings_cfg.from_object("fence.settings") config.update(dict(settings_cfg)) # END - TODO (DEPRECATE LOCAL_SETTINGS): REMOVE config.load(search_folders=CONFIG_SEARCH_FOLDERS) DB = os.environ.get("FENCE_DB") or config.get("DB") # attempt to get from settings, this is backwards-compatibility for integration # tests if DB is None: try: from fence.settings import DB except ImportError: pass BASE_URL = os.environ.get("BASE_URL") or config.get("BASE_URL") ROOT_DIR = os.environ.get("ROOT_DIR") or os.path.dirname( os.path.dirname(os.path.realpath(__file__))) dbGaP = os.environ.get("dbGaP") or config.get("dbGaP") if not isinstance(dbGaP, list): dbGaP = [dbGaP] STORAGE_CREDENTIALS = os.environ.get("STORAGE_CREDENTIALS") or config.get( "STORAGE_CREDENTIALS") usersync = config.get("USERSYNC", {}) sync_from_visas = usersync.get("sync_from_visas", False) fallback_to_dbgap_sftp = usersync.get("fallback_to_dbgap_sftp", False) arborist = None if args.arborist: arborist = ArboristClient( arborist_base_url=args.arborist, logger=get_logger("user_syncer.arborist_client"), authz_provider="user-sync", ) if args.action == "create": yaml_input = args.__dict__["yaml-file-path"] create_sample_data(DB, yaml_input) elif args.action == "client-create": confidential = not args.public create_client_action( DB, username=args.username, client=args.client, urls=args.urls, auto_approve=args.auto_approve, grant_types=args.grant_types, confidential=confidential, arborist=arborist, policies=args.policies, allowed_scopes=args.allowed_scopes, ) elif args.action == "client-modify": modify_client_action( DB, client=args.client, delete_urls=args.delete_urls, urls=args.urls, name=args.name, description=args.description, set_auto_approve=args.set_auto_approve, unset_auto_approve=args.unset_auto_approve, arborist=arborist, policies=args.policies, allowed_scopes=args.allowed_scopes, append=args.append, ) elif args.action == "client-delete": delete_client_action(DB, args.client) elif args.action == "client-list": list_client_action(DB) elif args.action == "user-delete": delete_users(DB, args.users) elif args.action == "expired-service-account-delete": delete_expired_service_accounts(DB) elif args.action == "bucket-access-group-verify": verify_bucket_access_group(DB) elif args.action == "sync": sync_users( dbGaP, STORAGE_CREDENTIALS, DB, projects=args.project_mapping, is_sync_from_dbgap_server=str2bool(args.sync_from_dbgap), sync_from_local_csv_dir=args.csv_dir, sync_from_local_yaml_file=args.yaml, folder=args.folder, arborist=arborist, sync_from_visas=sync_from_visas, fallback_to_dbgap_sftp=fallback_to_dbgap_sftp, ) elif args.action == "dbgap-download-access-files": download_dbgap_files( dbGaP, STORAGE_CREDENTIALS, DB, folder=args.folder, ) elif args.action == "google-manage-keys": remove_expired_google_service_account_keys(DB) elif args.action == "google-init": google_init(DB) elif args.action == "google-manage-user-registrations": verify_user_registration(DB) elif args.action == "google-manage-account-access": remove_expired_google_accounts_from_proxy_groups(DB) elif args.action == "google-bucket-create": # true if true provided, false if anything else provided, leave as # None if not provided at all (policy will remain unchanged) if args.public and args.public.lower().strip() == "true": args.public = True elif args.public is not None: args.public = False create_or_update_google_bucket( DB, args.unique_name, storage_class=args.storage_class, public=args.public, requester_pays=args.requester_pays, google_project_id=args.google_project_id, project_auth_id=args.project_auth_id, access_logs_bucket=args.access_logs_bucket, allowed_privileges=args.allowed_privileges, ) elif args.action == "google-logging-bucket-create": create_google_logging_bucket( args.unique_name, storage_class=args.storage_class, google_project_id=args.google_project_id, ) elif args.action == "link-external-bucket": link_external_bucket(DB, name=args.bucket_name) elif args.action == "link-bucket-to-project": link_bucket_to_project( DB, bucket_id=args.bucket_id, bucket_provider=args.bucket_provider, project_auth_id=args.project_auth_id, ) elif args.action == "google-list-authz-groups": google_list_authz_groups(DB) elif args.action == "token-create": keys_path = getattr(args, "keys-dir", os.path.join(ROOT_DIR, "keys")) keypairs = keys.load_keypairs(keys_path) # Default to the most recent one, but try to find the keypair with # matching ``kid`` to the argument provided. keypair = keypairs[-1] kid = getattr(args, "kid") if kid: for try_keypair in keypairs: if try_keypair.kid == kid: keypair = try_keypair break jwt_creator = JWTCreator( DB, BASE_URL, kid=keypair.kid, private_key=keypair.private_key, username=args.username, scopes=args.scopes, expires_in=args.exp, ) token_type = str(args.type).strip().lower() if token_type == "access_token" or token_type == "access": print(jwt_creator.create_access_token().token) elif token_type == "refresh_token" or token_type == "refresh": print(jwt_creator.create_refresh_token().token) else: print('invalid token type "{}"; expected "access" or "refresh"'. format(token_type)) sys.exit(1) elif args.action == "force-link-google": exp = force_update_google_link( DB, username=args.username, google_email=args.google_email, expires_in=args.expires_in, ) print(exp) elif args.action == "notify-problem-users": notify_problem_users(DB, args.emails, args.auth_ids, args.check_linking, args.google_project_id) elif args.action == "migrate": migrate_database(DB) elif args.action == "update-visas": update_user_visas( DB, chunk_size=args.chunk_size, concurrency=args.concurrency, thread_pool_size=args.thread_pool_size, buffer_size=args.buffer_size, )