def info_from_jwt(token): """ Check and retrieve authentication information from custom bearer token. Returned value will be passed in 'token_info' parameter of your operation function, if there is one. 'sub' or 'uid' will be set in 'user' parameter of your operation function, if there is one. :param token Token provided by Authorization header :type token: str :return: Decoded token information or None if token is invalid :rtype: dict | None """ server_url = connexion.request.headers["Host"] if not connexion.request.headers.get("X-Forwarded-Host")\ else connexion.request.headers["X-Forwarded-Host"] server_scheme = connexion.request.scheme if not connexion.request.headers.get("X-Forwarded-Proto")\ else connexion.request.headers["X-Forwarded-Proto"] # Configure client realm = KeycloakRealm(server_url="{}://{}".format(server_scheme, server_url), realm_name=current_app.config.get('KEYCLOAK_REALM')) oidc_client = realm.open_id_connect( client_id=current_app.config.get('KEYCLOAK_CLIENT_ID'), client_secret=current_app.config.get('KEYCLOAK_CLIENT_SECRET')) # Decode Token certs = oidc_client.certs() options = {"verify_signature": True, "verify_aud": True, "exp": True} try: return oidc_client.decode_token(token, key=certs, options=options) except JWTError as e: six.raise_from(Unauthorized, e)
def get_keycloak_users(server_url): realm = KeycloakRealm(server_url=server_url, realm_name=current_app.config.get('KEYCLOAK_REALM')) oidc_client = realm.open_id_connect( client_id=current_app.config.get('KEYCLOAK_CLIENT_ID'), client_secret=current_app.config.get('KEYCLOAK_CLIENT_SECRET')) credentials = oidc_client.client_credentials() admin_client = realm.admin.set_token(credentials.get("access_token")) return admin_client.realms.by_name( current_app.config.get('KEYCLOAK_REALM')).users
def new_realm(self): """ :param authentication.openid.models.Realm realm: :return keycloak.realm.Realm: """ return KeycloakRealm(server_url=self.server_url, realm_name=self.realm_name, headers={})
def __init__(self, server_url, realm_name, client_id, client_secret, **kwargs): self._realm = KeycloakRealm( server_url, realm_name, ) self._open_id_connect = self._realm.open_id_connect( client_id, client_secret, ) self._username = kwargs.get('username', None) self._password = kwargs.get('password', None) self._audience = kwargs.get('audience', None) self._access_token = None self._access_token_expire_datetime = None self._refresh_token = None self._jwt_token = None self._jwt_token_expire_datetime = None
def __get_server(self) -> KeycloakRealm: server = self.__server.get('server', None) if server is None: self.__server.update({ 'server': KeycloakRealm(server_url=self.server_url, realm_name=self.server_realm) }) server = self.__server.get('server', None) return server
def __configure(request): full_uri_with_path = request.build_absolute_uri() parsed_full_uri_with_path = urlparse(full_uri_with_path) extracted_full_uri_with_path = extract(full_uri_with_path) current_root_uri = '{}://{}'.format(parsed_full_uri_with_path.scheme, parsed_full_uri_with_path.netloc) realm_name = __get_realm(request) master_realm = KeycloakRealm(server_url=settings.KEYCLOAK_AUTH_URI, realm_name=settings.KEYCLOAK_MASTER_REALM) master_realm_client = master_realm.open_id_connect( client_id=settings.KEYCLOAK_ADMIN_CLIENT_ID, client_secret=settings.KEYCLOAK_ADMIN_CLIENT_SECRET ) token = master_realm_client.client_credentials() access_token = token['access_token'] admin_client = master_realm.admin admin_client.set_token(access_token) clients = admin_client.realms.by_name(realm_name).clients.all() clientId = settings.KEYCLOAK_CLIENT_ID client_id = None for client in clients: if client['clientId'] == clientId: client_id = client['id'] break client_secret = None if client_id is not None: client_secret = admin_client.realms.by_name(realm_name).clients.by_id(client_id).client_secret()['value'] if client_secret is not None: KEYCLOAK_CLIENT_ID = clientId KEYCLOAK_CLIENT_SECRET = client_secret PUBLIC_URI_FOR_KEYCLOAK = current_root_uri __configure_oidc('{}/auth/realms/{}'.format(settings.KEYCLOAK_AUTH_URI, realm_name), KEYCLOAK_CLIENT_ID, PUBLIC_URI_FOR_KEYCLOAK, client_secret=KEYCLOAK_CLIENT_SECRET) CLIENTS = OIDCClients(oc_settings) return CLIENTS
def __init__(self): self.authorization_endpoint = config.get( 'ckan.sso.authorization_endpoint', None) self.client_id = config.get('ckan.sso.client_id', None) self.client_secret = config.get('ckan.sso.client_secret', None) self.realm = config.get('ckan.sso.realm', 'ckan') self.profile_username_field = config.get( 'ckan.sso.profile_username_field', None) self.profile_fullname_field = config.get( 'ckan.sso.profile_fullname_field', None) self.profile_email_field = config.get('ckan.sso.profile_email_field', None) self.profile_group_field = config.get('ckan.sso.profile_group_field', None) self.sysadmin_group_name = config.get('ckan.sso.sysadmin_group_name', None) realm = KeycloakRealm(server_url=self.authorization_endpoint, realm_name=self.realm) self.oidc_client = realm.open_id_connect( client_id=self.client_id, client_secret=self.client_secret)
def __init__(self, server_url, realm_name, client_id, client_secret, audience=None): self._realm = KeycloakRealm( server_url, realm_name, ) self._open_id_connect = self._realm.open_id_connect( client_id, client_secret, ) self._audience = audience self._access_token = None self._access_token_expire_datetime = None self._refresh_token = None self._jwt_token = None self._jwt_token_expire_datetime = None
def __get_server(self) -> KeycloakRealm: server = self.__server.get('server', None) if server is None: server_url = ConfigurationManager.get_config_value( ConfigProp.auth_server_url()) server_realm = ConfigurationManager.get_config_value( ConfigProp.auth_env()) self.__server.update({ 'server': KeycloakRealm(server_url=server_url, realm_name=server_realm) }) server = self.__server.get('server', None) return server
def refresh_well_known_oidc(realm): """ Refresh Open ID Connect .well-known :param django_keycloak.models.Realm realm: :rtype django_keycloak.models.Realm """ server_url = realm.server.internal_url or realm.server.url # While fetching the well_known we should not use the prepared URL openid_api_client = KeycloakRealm(server_url=server_url, realm_name=realm.name).open_id_connect( client_id='', client_secret='') realm.well_known_oidc = openid_api_client.well_known.contents realm.save(update_fields=['_well_known_oidc']) return realm
def get_realm_api_client(realm): """ :param django_keycloak.models.Realm realm: :return keycloak.realm.Realm: """ headers = {} server_url = realm.server.url if realm.server.internal_url: # An internal URL is configured. We add some additional settings to let # Keycloak think that we access it using the server_url. server_url = realm.server.internal_url parsed_url = urlparse(realm.server.url) headers['Host'] = parsed_url.netloc if parsed_url.scheme == 'https': headers['X-Forwarded-Proto'] = 'https' return KeycloakRealm(server_url=server_url, realm_name=realm.name, headers=headers)
def start(self): self.started = True # thread name threading.currentThread().setName('CORE') # patch modules with encoding kludge patch_modules() # init core classes self.notifier_providers = NotifierProviders() self.metadata_providers = MetadataProviders() self.search_providers = SearchProviders() self.log = Logger() self.config = Config() self.alerts = Notifications() self.main_db = MainDB() self.cache_db = CacheDB() self.scheduler = TornadoScheduler() self.wserver = WebServer() self.name_cache = NameCache() self.show_queue = ShowQueue() self.search_queue = SearchQueue() self.postprocessor_queue = PostProcessorQueue() self.version_updater = VersionUpdater() self.show_updater = ShowUpdater() self.daily_searcher = DailySearcher() self.failed_snatch_searcher = FailedSnatchSearcher() self.backlog_searcher = BacklogSearcher() self.proper_searcher = ProperSearcher() self.trakt_searcher = TraktSearcher() self.subtitle_searcher = SubtitleSearcher() self.auto_postprocessor = AutoPostProcessor() self.upnp_client = UPNPClient() self.quicksearch_cache = QuicksearchCache() # setup oidc client realm = KeycloakRealm(server_url='https://auth.sickrage.ca', realm_name='sickrage') self.oidc_client = realm.open_id_connect( client_id='sickrage-app', client_secret='5d4710b2-ca70-4d39-b5a3-0705e2c5e703') # Check if we need to perform a restore first if os.path.exists( os.path.abspath(os.path.join(self.data_dir, 'restore'))): success = restoreSR( os.path.abspath(os.path.join(self.data_dir, 'restore')), self.data_dir) print("Restoring SiCKRAGE backup: %s!\n" % ("FAILED", "SUCCESSFUL")[success]) if success: shutil.rmtree(os.path.abspath( os.path.join(self.data_dir, 'restore')), ignore_errors=True) # migrate old database file names to new ones if os.path.isfile( os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db'))): if os.path.isfile(os.path.join(self.data_dir, 'sickrage.db')): helpers.moveFile( os.path.join(self.data_dir, 'sickrage.db'), os.path.join( self.data_dir, '{}.bak-{}'.format( 'sickrage.db', datetime.datetime.now().strftime( '%Y%m%d_%H%M%S')))) helpers.moveFile( os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db')), os.path.abspath(os.path.join(self.data_dir, 'sickrage.db'))) # load config self.config.load() # set language self.config.change_gui_lang(self.config.gui_lang) # set socket timeout socket.setdefaulttimeout(self.config.socket_timeout) # setup logger settings self.log.logSize = self.config.log_size self.log.logNr = self.config.log_nr self.log.logFile = os.path.join(self.data_dir, 'logs', 'sickrage.log') self.log.debugLogging = self.config.debug self.log.consoleLogging = not self.quite # start logger self.log.start() # user agent if self.config.random_user_agent: self.user_agent = UserAgent().random urlparse.uses_netloc.append('scgi') urllib.FancyURLopener.version = self.user_agent # set torrent client web url torrent_webui_url(True) # Check available space try: total_space, available_space = getFreeSpace(self.data_dir) if available_space < 100: self.log.error( 'Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data ' 'otherwise. Only %sMB left', available_space) return except Exception: self.log.error('Failed getting disk space: %s', traceback.format_exc()) # perform database startup actions for db in [self.main_db, self.cache_db]: # initialize database db.initialize() # check integrity of database db.check_integrity() # migrate database db.migrate() # misc database cleanups db.cleanup() # upgrade database db.upgrade() # compact main database if self.config.last_db_compact < time.time() - 604800: # 7 days self.main_db.compact() self.config.last_db_compact = int(time.time()) # load name cache self.name_cache.load() # load data for shows from database self.load_shows() if self.config.default_page not in ('schedule', 'history', 'IRC'): self.config.default_page = 'home' # cleanup cache folder for folder in ['mako', 'sessions', 'indexers']: try: shutil.rmtree(os.path.join(sickrage.app.cache_dir, folder), ignore_errors=True) except Exception: continue # init anidb connection if self.config.use_anidb: def anidb_logger(msg): return self.log.debug("AniDB: {} ".format(msg)) try: self.adba_connection = adba.Connection(keepAlive=True, log=anidb_logger) self.adba_connection.auth(self.config.anidb_username, self.config.anidb_password) except Exception as e: self.log.warning("AniDB exception msg: %r " % repr(e)) if self.config.web_port < 21 or self.config.web_port > 65535: self.config.web_port = 8081 if not self.config.web_cookie_secret: self.config.web_cookie_secret = generate_secret() # attempt to help prevent users from breaking links by using a bad url if not self.config.anon_redirect.endswith('?'): self.config.anon_redirect = '' if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', self.config.root_dirs): self.config.root_dirs = '' self.config.naming_force_folders = check_force_season_folders() if self.config.nzb_method not in ('blackhole', 'sabnzbd', 'nzbget'): self.config.nzb_method = 'blackhole' if self.config.torrent_method not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged', 'download_station', 'rtorrent', 'qbittorrent', 'mlnet', 'putio'): self.config.torrent_method = 'blackhole' if self.config.autopostprocessor_freq < self.config.min_autopostprocessor_freq: self.config.autopostprocessor_freq = self.config.min_autopostprocessor_freq if self.config.daily_searcher_freq < self.config.min_daily_searcher_freq: self.config.daily_searcher_freq = self.config.min_daily_searcher_freq self.config.min_backlog_searcher_freq = get_backlog_cycle_time() if self.config.backlog_searcher_freq < self.config.min_backlog_searcher_freq: self.config.backlog_searcher_freq = self.config.min_backlog_searcher_freq if self.config.version_updater_freq < self.config.min_version_updater_freq: self.config.version_updater_freq = self.config.min_version_updater_freq if self.config.subtitle_searcher_freq < self.config.min_subtitle_searcher_freq: self.config.subtitle_searcher_freq = self.config.min_subtitle_searcher_freq if self.config.failed_snatch_age < self.config.min_failed_snatch_age: self.config.failed_snatch_age = self.config.min_failed_snatch_age if self.config.proper_searcher_interval not in ('15m', '45m', '90m', '4h', 'daily'): self.config.proper_searcher_interval = 'daily' if self.config.showupdate_hour < 0 or self.config.showupdate_hour > 23: self.config.showupdate_hour = 0 if self.config.subtitles_languages[0] == '': self.config.subtitles_languages = [] # add version checker job self.scheduler.add_job( self.version_updater.run, IntervalTrigger(hours=self.config.version_updater_freq), name=self.version_updater.name, id=self.version_updater.name) # add network timezones updater job self.scheduler.add_job(update_network_dict, IntervalTrigger(days=1), name="TZUPDATER", id="TZUPDATER") # add show updater job self.scheduler.add_job(self.show_updater.run, IntervalTrigger( days=1, start_date=datetime.datetime.now().replace( hour=self.config.showupdate_hour)), name=self.show_updater.name, id=self.show_updater.name) # add daily search job self.scheduler.add_job( self.daily_searcher.run, IntervalTrigger(minutes=self.config.daily_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4)), name=self.daily_searcher.name, id=self.daily_searcher.name) # add failed snatch search job self.scheduler.add_job( self.failed_snatch_searcher.run, IntervalTrigger(hours=1, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4)), name=self.failed_snatch_searcher.name, id=self.failed_snatch_searcher.name) # add backlog search job self.scheduler.add_job( self.backlog_searcher.run, IntervalTrigger(minutes=self.config.backlog_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=30)), name=self.backlog_searcher.name, id=self.backlog_searcher.name) # add auto-postprocessing job self.scheduler.add_job( self.auto_postprocessor.run, IntervalTrigger(minutes=self.config.autopostprocessor_freq), name=self.auto_postprocessor.name, id=self.auto_postprocessor.name) # add find proper job self.scheduler.add_job( self.proper_searcher.run, IntervalTrigger(minutes={ '15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60 }[self.config.proper_searcher_interval]), name=self.proper_searcher.name, id=self.proper_searcher.name) # add trakt.tv checker job self.scheduler.add_job(self.trakt_searcher.run, IntervalTrigger(hours=1), name=self.trakt_searcher.name, id=self.trakt_searcher.name) # add subtitles finder job self.scheduler.add_job( self.subtitle_searcher.run, IntervalTrigger(hours=self.config.subtitle_searcher_freq), name=self.subtitle_searcher.name, id=self.subtitle_searcher.name) # add upnp client job self.scheduler.add_job( self.upnp_client.run, IntervalTrigger(seconds=self.upnp_client._nat_portmap_lifetime), name=self.upnp_client.name, id=self.upnp_client.name) # start scheduler service self.scheduler.start() # start queue's self.search_queue.start() self.show_queue.start() self.postprocessor_queue.start() # start webserver self.wserver.start() # start ioloop self.io_loop.start()
def realm(self): if self._realm is None: self._realm = KeycloakRealm(server_url=self.server_url, realm_name=self.realm_name, headers={}) return self._realm
def _get_admin_client(self): realm = KeycloakRealm(server_url=self.server_url, realm_name="master") token = self._get_token(realm) admin_client = realm.admin admin_client.set_token(token.get("access_token")) return admin_client
def start(self): self.started = True self.io_loop = IOLoop.current() # thread name threading.currentThread().setName('CORE') # init core classes self.main_db = MainDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password) self.cache_db = CacheDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password) self.notifier_providers = NotifierProviders() self.metadata_providers = MetadataProviders() self.search_providers = SearchProviders() self.log = Logger() self.config = Config() self.alerts = Notifications() self.scheduler = TornadoScheduler({'apscheduler.timezone': 'UTC'}) self.wserver = WebServer() self.name_cache = NameCache() self.show_queue = ShowQueue() self.search_queue = SearchQueue() self.postprocessor_queue = PostProcessorQueue() self.version_updater = VersionUpdater() self.show_updater = ShowUpdater() self.tz_updater = TimeZoneUpdater() self.rsscache_updater = RSSCacheUpdater() self.daily_searcher = DailySearcher() self.failed_snatch_searcher = FailedSnatchSearcher() self.backlog_searcher = BacklogSearcher() self.proper_searcher = ProperSearcher() self.trakt_searcher = TraktSearcher() self.subtitle_searcher = SubtitleSearcher() self.auto_postprocessor = AutoPostProcessor() self.upnp_client = UPNPClient() self.quicksearch_cache = QuicksearchCache() # setup oidc client realm = KeycloakRealm(server_url='https://auth.sickrage.ca', realm_name='sickrage') self.oidc_client = realm.open_id_connect(client_id=self.oidc_client_id, client_secret=self.oidc_client_secret) # Check if we need to perform a restore first if os.path.exists(os.path.abspath(os.path.join(self.data_dir, 'restore'))): success = restore_app_data(os.path.abspath(os.path.join(self.data_dir, 'restore')), self.data_dir) self.log.info("Restoring SiCKRAGE backup: %s!" % ("FAILED", "SUCCESSFUL")[success]) if success: shutil.rmtree(os.path.abspath(os.path.join(self.data_dir, 'restore')), ignore_errors=True) # migrate old database file names to new ones if os.path.isfile(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db'))): if os.path.isfile(os.path.join(self.data_dir, 'sickrage.db')): helpers.move_file(os.path.join(self.data_dir, 'sickrage.db'), os.path.join(self.data_dir, '{}.bak-{}' .format('sickrage.db', datetime.datetime.now().strftime( '%Y%m%d_%H%M%S')))) helpers.move_file(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db')), os.path.abspath(os.path.join(self.data_dir, 'sickrage.db'))) # init encryption public and private keys encryption.initialize() # load config self.config.load() # set language self.config.change_gui_lang(self.config.gui_lang) # set socket timeout socket.setdefaulttimeout(self.config.socket_timeout) # setup logger settings self.log.logSize = self.config.log_size self.log.logNr = self.config.log_nr self.log.logFile = os.path.join(self.data_dir, 'logs', 'sickrage.log') self.log.debugLogging = self.config.debug self.log.consoleLogging = not self.quiet # start logger self.log.start() # user agent if self.config.random_user_agent: self.user_agent = UserAgent().random uses_netloc.append('scgi') FancyURLopener.version = self.user_agent # set torrent client web url torrent_webui_url(True) # Check available space try: total_space, available_space = get_free_space(self.data_dir) if available_space < 100: self.log.warning('Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left', available_space) return except Exception: self.log.error('Failed getting disk space: %s', traceback.format_exc()) # perform database startup actions for db in [self.main_db, self.cache_db]: # perform integrity check db.integrity_check() # migrate database db.migrate() # sync database repo db.sync_db_repo() # cleanup db.cleanup() # load name cache self.name_cache.load() if self.config.default_page not in ('schedule', 'history', 'IRC'): self.config.default_page = 'home' # cleanup cache folder for folder in ['mako', 'sessions', 'indexers']: try: shutil.rmtree(os.path.join(sickrage.app.cache_dir, folder), ignore_errors=True) except Exception: continue if self.config.web_port < 21 or self.config.web_port > 65535: self.config.web_port = 8081 if not self.config.web_cookie_secret: self.config.web_cookie_secret = generate_secret() # attempt to help prevent users from breaking links by using a bad url if not self.config.anon_redirect.endswith('?'): self.config.anon_redirect = '' if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', self.config.root_dirs): self.config.root_dirs = '' self.config.naming_force_folders = check_force_season_folders() if self.config.nzb_method not in ('blackhole', 'sabnzbd', 'nzbget'): self.config.nzb_method = 'blackhole' if self.config.torrent_method not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged', 'download_station', 'rtorrent', 'qbittorrent', 'mlnet', 'putio'): self.config.torrent_method = 'blackhole' if self.config.autopostprocessor_freq < self.config.min_autopostprocessor_freq: self.config.autopostprocessor_freq = self.config.min_autopostprocessor_freq if self.config.daily_searcher_freq < self.config.min_daily_searcher_freq: self.config.daily_searcher_freq = self.config.min_daily_searcher_freq if self.config.backlog_searcher_freq < self.config.min_backlog_searcher_freq: self.config.backlog_searcher_freq = self.config.min_backlog_searcher_freq if self.config.version_updater_freq < self.config.min_version_updater_freq: self.config.version_updater_freq = self.config.min_version_updater_freq if self.config.subtitle_searcher_freq < self.config.min_subtitle_searcher_freq: self.config.subtitle_searcher_freq = self.config.min_subtitle_searcher_freq if self.config.failed_snatch_age < self.config.min_failed_snatch_age: self.config.failed_snatch_age = self.config.min_failed_snatch_age if self.config.proper_searcher_interval not in ('15m', '45m', '90m', '4h', 'daily'): self.config.proper_searcher_interval = 'daily' if self.config.showupdate_hour < 0 or self.config.showupdate_hour > 23: self.config.showupdate_hour = 0 # add API token refresh job self.scheduler.add_job( API().refresh_token, IntervalTrigger( hours=1, ), name='SR-API', id='SR-API' ) # add version checker job self.scheduler.add_job( self.version_updater.run, IntervalTrigger( hours=self.config.version_updater_freq, ), name=self.version_updater.name, id=self.version_updater.name ) # add network timezones updater job self.scheduler.add_job( self.tz_updater.run, IntervalTrigger( days=1, ), name=self.tz_updater.name, id=self.tz_updater.name ) # add show updater job self.scheduler.add_job( self.show_updater.run, IntervalTrigger( days=1, start_date=datetime.datetime.now().replace(hour=self.config.showupdate_hour) ), name=self.show_updater.name, id=self.show_updater.name ) # add rss cache updater job self.scheduler.add_job( self.rsscache_updater.run, IntervalTrigger( minutes=15, ), name=self.rsscache_updater.name, id=self.rsscache_updater.name ) # add daily search job self.scheduler.add_job( self.daily_searcher.run, IntervalTrigger( minutes=self.config.daily_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4) ), name=self.daily_searcher.name, id=self.daily_searcher.name ) # add failed snatch search job self.scheduler.add_job( self.failed_snatch_searcher.run, IntervalTrigger( hours=1, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4) ), name=self.failed_snatch_searcher.name, id=self.failed_snatch_searcher.name ) # add backlog search job self.scheduler.add_job( self.backlog_searcher.run, IntervalTrigger( minutes=self.config.backlog_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=30) ), name=self.backlog_searcher.name, id=self.backlog_searcher.name ) # add auto-postprocessing job self.scheduler.add_job( self.auto_postprocessor.run, IntervalTrigger( minutes=self.config.autopostprocessor_freq ), name=self.auto_postprocessor.name, id=self.auto_postprocessor.name ) # add find proper job self.scheduler.add_job( self.proper_searcher.run, IntervalTrigger( minutes={ '15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60 }[self.config.proper_searcher_interval] ), name=self.proper_searcher.name, id=self.proper_searcher.name ) # add trakt.tv checker job self.scheduler.add_job( self.trakt_searcher.run, IntervalTrigger( hours=1 ), name=self.trakt_searcher.name, id=self.trakt_searcher.name ) # add subtitles finder job self.scheduler.add_job( self.subtitle_searcher.run, IntervalTrigger( hours=self.config.subtitle_searcher_freq ), name=self.subtitle_searcher.name, id=self.subtitle_searcher.name ) # add upnp client job self.scheduler.add_job( self.upnp_client.run, IntervalTrigger( seconds=self.upnp_client._nat_portmap_lifetime ), name=self.upnp_client.name, id=self.upnp_client.name ) # add namecache update job self.scheduler.add_job( self.name_cache.build_all, IntervalTrigger( days=1, ), name=self.name_cache.name, id=self.name_cache.name ) # start scheduler service self.scheduler.start() # start queue's self.io_loop.add_callback(self.search_queue.watch) self.io_loop.add_callback(self.show_queue.watch) self.io_loop.add_callback(self.postprocessor_queue.watch) # fire off startup events self.io_loop.run_in_executor(None, self.quicksearch_cache.run) self.io_loop.run_in_executor(None, self.name_cache.run) self.io_loop.run_in_executor(None, self.version_updater.run) self.io_loop.run_in_executor(None, self.tz_updater.run) # start web server self.wserver.start() # launch browser window if all([not sickrage.app.no_launch, sickrage.app.config.launch_browser]): self.io_loop.run_in_executor(None, functools.partial(launch_browser, ('http', 'https')[sickrage.app.config.enable_https], sickrage.app.config.web_host, sickrage.app.config.web_port)) def started(): self.log.info("SiCKRAGE :: STARTED") self.log.info("SiCKRAGE :: APP VERSION:[{}]".format(sickrage.version())) self.log.info("SiCKRAGE :: CONFIG VERSION:[v{}]".format(self.config.config_version)) self.log.info("SiCKRAGE :: DATABASE VERSION:[v{}]".format(self.main_db.version)) self.log.info("SiCKRAGE :: DATABASE TYPE:[{}]".format(self.db_type)) self.log.info("SiCKRAGE :: URL:[{}://{}:{}{}]".format(('http', 'https')[self.config.enable_https], self.config.web_host, self.config.web_port, self.config.web_root)) # start io_loop self.io_loop.add_callback(started) self.io_loop.start()
from keycloak.realm import KeycloakRealm realm = KeycloakRealm(server_url='http://localhost:8080', realm_name='test') print(realm) oidc_client = realm.open_id_connect(client_id='admin', client_secret='admin') print(oidc_client) import pdb pdb.set_trace()
class Sso: def __init__(self, server_url, realm_name, client_id, client_secret, **kwargs): self._realm = KeycloakRealm( server_url, realm_name, ) self._open_id_connect = self._realm.open_id_connect( client_id, client_secret, ) self._username = kwargs.get('username', None) self._password = kwargs.get('password', None) self._audience = kwargs.get('audience', None) self._access_token = None self._access_token_expire_datetime = None self._refresh_token = None self._jwt_token = None self._jwt_token_expire_datetime = None def __init_access_token(self): logger.info('Init the access token') payload_access_token = self._open_id_connect.client_credentials() self._access_token = payload_access_token['access_token'] self._refresh_token = payload_access_token['refresh_token'] self._access_token_expire_datetime = datetime.now( ) + timedelta(seconds=payload_access_token['expires_in']) def __refresh_access_token(self): logger.info('Refresh the access token') payload_refresh_access_token = self._open_id_connect.refresh_token( refresh_token=self._refresh_token) self._access_token = payload_refresh_access_token['access_token'] self._refresh_token = payload_refresh_access_token['refresh_token'] self._access_token_expire_datetime = datetime.now( ) + timedelta(seconds=payload_refresh_access_token['expires_in']) def __refresh_jwt_token(self): logger.info('Refresh the jwt token') payload_jwt_token = None if self._username is not None: payload_jwt_token = self._open_id_connect.password_credentials( username=self._username, password=self._password, ) if payload_jwt_token is None: payload_jwt_token = self._open_id_connect.token_exchange( subject_token=self._access_token, audience=self._audience, ) self._jwt_token = payload_jwt_token['access_token'] self._jwt_token_expire_datetime = datetime.now( ) + timedelta(seconds=payload_jwt_token['expires_in']) def jwt_token(self): """ Refresh jwt token if necessary and return it """ if self._access_token is None: self.__init_access_token() if self._access_token is not None and self._access_token_expire_datetime < datetime.now(): self.__refresh_access_token() if self._jwt_token is None: self.__refresh_jwt_token() if self._jwt_token is not None and self._jwt_token_expire_datetime < datetime.now(): self.__refresh_jwt_token() return self._jwt_token def logout(self): """ Unvalid the jwt token and logout the user """ if self._refresh_token: logger.info('Logout the authenticated user') self._open_id_connect.logout(self._refresh_token)
def setUp(self): self.realm = KeycloakRealm('https://example.com', 'some-realm', headers={'some': 'header'})
class KeycloakRealmTestCase(TestCase): def setUp(self): self.realm = KeycloakRealm('https://example.com', 'some-realm', headers={'some': 'header'}) def test_instance(self): """ Case: Realm is instantiated Expected: Name and server_url are exposed """ self.assertEqual(self.realm.realm_name, 'some-realm') self.assertEqual(self.realm.server_url, 'https://example.com') @mock.patch('keycloak.realm.KeycloakClient', autospec=True) def test_client(self, mocked_client): """ Case: Client get requested Expected: Client get returned and the second time the same get returned """ client = self.realm.client self.assertIsInstance(client, KeycloakClient) self.assertEqual(client, self.realm.client) mocked_client.assert_called_once_with(server_url='https://example.com', headers={'some': 'header'}) @mock.patch('keycloak.realm.KeycloakOpenidConnect', autospec=True) def test_openid_connect(self, mocked_openid_client): """ Case: OpenID client get requested Expected: OpenID client get returned """ openid_client = self.realm.open_id_connect( client_id='client-id', client_secret='client-secret') self.assertIsInstance(openid_client, KeycloakOpenidConnect) self.assertEqual(openid_client, mocked_openid_client.return_value) mocked_openid_client.assert_called_once_with( realm=self.realm, client_id='client-id', client_secret='client-secret') @mock.patch('keycloak.realm.KeycloakAdmin', autospec=True) def test_admin(self, mocked_admin_client): """ Case: Admin client get requested Expected: Admin client get returned """ admin_client = self.realm.admin self.assertIsInstance(admin_client, KeycloakAdmin) mocked_admin_client.assert_called_once_with(realm=self.realm) @mock.patch('keycloak.realm.KeycloakAuthz', autospec=True) def test_authz(self, mocked_authz_client): """ Case: Authz client get requested Expected: Authz client get returned """ authz_client = self.realm.authz(client_id='client-id') self.assertIsInstance(authz_client, KeycloakAuthz) mocked_authz_client.assert_called_once_with(realm=self.realm, client_id='client-id')
import sys import re from keycloak.realm import KeycloakRealm from keycloak.well_known import KeycloakWellKnown # from keycloak.keycloak_openid import KeycloakOpenID import json client_id = 'python-ckan-datagate' secret = '...secret...' #token = json.loads( "'''" + sys.argv[1] + "'''" ) access_token = sys.argv[1].split("access_token':")[1].split("'")[1] refresh_token = sys.argv[1].split("refresh_token':")[1].split("'")[1] realm = KeycloakRealm(server_url='https://www.snap4city.org', realm_name='master') oidc_client = realm.open_id_connect(client_id=client_id, client_secret=secret) userinfo = {} try: userinfo = oidc_client.userinfo(access_token) except Exception as e: new_token = oidc_client.refresh_token(refresh_token) userinfo = oidc_client.userinfo(new_token['access_token']) print(userinfo)
def new_realm(self): return KeycloakRealm( server_url=self.server_url, realm_name=self.realm_name, headers={} )
def run_config(config): config = ConfigLoader(config) CustomDownloaderMiddleware.driver = config.driver DocumentationSpider.NB_INDEXED = 0 strategy = DefaultStrategy(config) meilisearch_helper = MeiliSearchHelper(config.app_id, config.api_key, config.index_uid, config.custom_settings) root_module = 'src.' if __name__ == '__main__' else 'scraper.src.' DOWNLOADER_MIDDLEWARES_PATH = root_module + 'custom_downloader_middleware.' + CustomDownloaderMiddleware.__name__ DUPEFILTER_CLASS_PATH = root_module + 'custom_dupefilter.' + CustomDupeFilter.__name__ headers = { "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en", } # Defaults for scrapy https://docs.scrapy.org/en/latest/topics/settings.html#default-request-headers if os.getenv("CF_ACCESS_CLIENT_ID") and os.getenv( "CF_ACCESS_CLIENT_SECRET"): headers.update({ "CF-Access-Client-Id": os.getenv("CF_ACCESS_CLIENT_ID"), "CF-Access-Client-Secret": os.getenv("CF_ACCESS_CLIENT_SECRET"), }) elif os.getenv("IAP_AUTH_CLIENT_ID") and os.getenv( "IAP_AUTH_SERVICE_ACCOUNT_JSON"): iap_token = IAPAuth( client_id=os.getenv("IAP_AUTH_CLIENT_ID"), service_account_secret_dict=json.loads( os.getenv("IAP_AUTH_SERVICE_ACCOUNT_JSON")), )(requests.Request()).headers["Authorization"] headers.update({"Authorization": iap_token}) elif os.getenv("KC_URL") and os.getenv("KC_REALM") and os.getenv( "KC_CLIENT_ID") and os.getenv("KC_CLIENT_SECRET"): realm = KeycloakRealm(server_url=os.getenv("KC_URL"), realm_name=os.getenv("KC_REALM")) oidc_client = realm.open_id_connect( client_id=os.getenv("KC_CLIENT_ID"), client_secret=os.getenv("KC_CLIENT_SECRET")) token_response = oidc_client.client_credentials() token = token_response["access_token"] headers.update({"Authorization": 'bearer ' + token}) DEFAULT_REQUEST_HEADERS = headers process = CrawlerProcess({ 'LOG_ENABLED': '1', 'LOG_LEVEL': 'ERROR', 'USER_AGENT': config.user_agent, 'DOWNLOADER_MIDDLEWARES': { DOWNLOADER_MIDDLEWARES_PATH: 900 }, # Need to be > 600 to be after the redirectMiddleware 'DUPEFILTER_USE_ANCHORS': config.use_anchors, # Use our custom dupefilter in order to be scheme agnostic regarding link provided 'DUPEFILTER_CLASS': DUPEFILTER_CLASS_PATH, 'DEFAULT_REQUEST_HEADERS': DEFAULT_REQUEST_HEADERS, }) process.crawl(DocumentationSpider, config=config, meilisearch_helper=meilisearch_helper, strategy=strategy) process.start() process.stop() # Kill browser if needed BrowserHandler.destroy(config.driver) if len(config.extra_records) > 0: meilisearch_helper.add_records(config.extra_records, "Extra records", False) print("") if DocumentationSpider.NB_INDEXED > 0: # meilisearch_helper.commit_tmp_index() print('Nb hits: {}'.format(DocumentationSpider.NB_INDEXED)) else: print('Crawling issue: nbHits 0 for ' + config.index_uid) # meilisearch_helper.report_crawling_issue() sys.exit(EXIT_CODE_NO_RECORD) print("")
def start(self): self.started = True self.io_loop = IOLoop.current() # thread name threading.currentThread().setName('CORE') # patch modules with encoding kludge patch_modules() # init core classes self.notifier_providers = NotifierProviders() self.metadata_providers = MetadataProviders() self.search_providers = SearchProviders() self.log = Logger() self.config = Config() self.alerts = Notifications() self.main_db = MainDB() self.cache_db = CacheDB() self.scheduler = TornadoScheduler() self.wserver = WebServer() self.name_cache = NameCache() self.show_queue = ShowQueue() self.search_queue = SearchQueue() self.postprocessor_queue = PostProcessorQueue() self.event_queue = EventQueue() self.version_updater = VersionUpdater() self.show_updater = ShowUpdater() self.tz_updater = TimeZoneUpdater() self.rsscache_updater = RSSCacheUpdater() self.daily_searcher = DailySearcher() self.failed_snatch_searcher = FailedSnatchSearcher() self.backlog_searcher = BacklogSearcher() self.proper_searcher = ProperSearcher() self.trakt_searcher = TraktSearcher() self.subtitle_searcher = SubtitleSearcher() self.auto_postprocessor = AutoPostProcessor() self.upnp_client = UPNPClient() self.quicksearch_cache = QuicksearchCache() # setup oidc client realm = KeycloakRealm(server_url='https://auth.sickrage.ca', realm_name='sickrage') self.oidc_client = realm.open_id_connect(client_id='sickrage-app', client_secret='5d4710b2-ca70-4d39-b5a3-0705e2c5e703') # Check if we need to perform a restore first if os.path.exists(os.path.abspath(os.path.join(self.data_dir, 'restore'))): success = restoreSR(os.path.abspath(os.path.join(self.data_dir, 'restore')), self.data_dir) self.log.info("Restoring SiCKRAGE backup: {}!".format(("FAILED", "SUCCESSFUL")[success])) if success: shutil.rmtree(os.path.abspath(os.path.join(self.data_dir, 'restore')), ignore_errors=True) # migrate old database file names to new ones if os.path.isfile(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db'))): if os.path.isfile(os.path.join(self.data_dir, 'sickrage.db')): helpers.move_file(os.path.join(self.data_dir, 'sickrage.db'), os.path.join(self.data_dir, '{}.bak-{}' .format('sickrage.db', datetime.datetime.now().strftime( '%Y%m%d_%H%M%S')))) helpers.move_file(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db')), os.path.abspath(os.path.join(self.data_dir, 'sickrage.db'))) # load config self.config.load() # set language self.config.change_gui_lang(self.config.gui_lang) # set socket timeout socket.setdefaulttimeout(self.config.socket_timeout) # setup logger settings self.log.logSize = self.config.log_size self.log.logNr = self.config.log_nr self.log.logFile = os.path.join(self.data_dir, 'logs', 'sickrage.log') self.log.debugLogging = self.config.debug self.log.consoleLogging = not self.quiet # start logger self.log.start() # user agent if self.config.random_user_agent: self.user_agent = UserAgent().random urlparse.uses_netloc.append('scgi') urllib.FancyURLopener.version = self.user_agent # set torrent client web url torrent_webui_url(True) # Check available space try: total_space, available_space = getFreeSpace(self.data_dir) if available_space < 100: self.log.error('Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data ' 'otherwise. Only %sMB left', available_space) return except Exception: self.log.error('Failed getting disk space: %s', traceback.format_exc()) # perform database startup actions for db in [self.main_db, self.cache_db]: # initialize database db.initialize() # check integrity of database db.check_integrity() # migrate database db.migrate() # misc database cleanups db.cleanup() # upgrade database db.upgrade() # compact main database if self.config.last_db_compact < time.time() - 604800: # 7 days self.main_db.compact() self.config.last_db_compact = int(time.time()) # load name cache self.name_cache.load() # load data for shows from database self.load_shows() if self.config.default_page not in ('schedule', 'history', 'IRC'): self.config.default_page = 'home' # cleanup cache folder for folder in ['mako', 'sessions', 'indexers']: try: shutil.rmtree(os.path.join(sickrage.app.cache_dir, folder), ignore_errors=True) except Exception: continue if self.config.web_port < 21 or self.config.web_port > 65535: self.config.web_port = 8081 if not self.config.web_cookie_secret: self.config.web_cookie_secret = generate_secret() # attempt to help prevent users from breaking links by using a bad url if not self.config.anon_redirect.endswith('?'): self.config.anon_redirect = '' if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', self.config.root_dirs): self.config.root_dirs = '' self.config.naming_force_folders = check_force_season_folders() if self.config.nzb_method not in ('blackhole', 'sabnzbd', 'nzbget'): self.config.nzb_method = 'blackhole' if self.config.torrent_method not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged', 'download_station', 'rtorrent', 'qbittorrent', 'mlnet', 'putio'): self.config.torrent_method = 'blackhole' if self.config.autopostprocessor_freq < self.config.min_autopostprocessor_freq: self.config.autopostprocessor_freq = self.config.min_autopostprocessor_freq if self.config.daily_searcher_freq < self.config.min_daily_searcher_freq: self.config.daily_searcher_freq = self.config.min_daily_searcher_freq if self.config.backlog_searcher_freq < self.config.min_backlog_searcher_freq: self.config.backlog_searcher_freq = self.config.min_backlog_searcher_freq if self.config.version_updater_freq < self.config.min_version_updater_freq: self.config.version_updater_freq = self.config.min_version_updater_freq if self.config.subtitle_searcher_freq < self.config.min_subtitle_searcher_freq: self.config.subtitle_searcher_freq = self.config.min_subtitle_searcher_freq if self.config.failed_snatch_age < self.config.min_failed_snatch_age: self.config.failed_snatch_age = self.config.min_failed_snatch_age if self.config.proper_searcher_interval not in ('15m', '45m', '90m', '4h', 'daily'): self.config.proper_searcher_interval = 'daily' if self.config.showupdate_hour < 0 or self.config.showupdate_hour > 23: self.config.showupdate_hour = 0 # add version checker job self.scheduler.add_job( self.version_updater.run, IntervalTrigger( hours=self.config.version_updater_freq, ), name=self.version_updater.name, id=self.version_updater.name ) # add network timezones updater job self.scheduler.add_job( self.tz_updater.run, IntervalTrigger( days=1, ), name=self.tz_updater.name, id=self.tz_updater.name ) # add show updater job self.scheduler.add_job( self.show_updater.run, IntervalTrigger( days=1, start_date=datetime.datetime.now().replace(hour=self.config.showupdate_hour) ), name=self.show_updater.name, id=self.show_updater.name ) # add rss cache updater job self.scheduler.add_job( self.rsscache_updater.run, IntervalTrigger( minutes=15, ), name=self.rsscache_updater.name, id=self.rsscache_updater.name ) # add daily search job self.scheduler.add_job( self.daily_searcher.run, IntervalTrigger( minutes=self.config.daily_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4) ), name=self.daily_searcher.name, id=self.daily_searcher.name ) # add failed snatch search job self.scheduler.add_job( self.failed_snatch_searcher.run, IntervalTrigger( hours=1, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4) ), name=self.failed_snatch_searcher.name, id=self.failed_snatch_searcher.name ) # add backlog search job self.scheduler.add_job( self.backlog_searcher.run, IntervalTrigger( minutes=self.config.backlog_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=30) ), name=self.backlog_searcher.name, id=self.backlog_searcher.name ) # add auto-postprocessing job self.scheduler.add_job( self.auto_postprocessor.run, IntervalTrigger( minutes=self.config.autopostprocessor_freq ), name=self.auto_postprocessor.name, id=self.auto_postprocessor.name ) # add find proper job self.scheduler.add_job( self.proper_searcher.run, IntervalTrigger( minutes={ '15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60 }[self.config.proper_searcher_interval] ), name=self.proper_searcher.name, id=self.proper_searcher.name ) # add trakt.tv checker job self.scheduler.add_job( self.trakt_searcher.run, IntervalTrigger( hours=1 ), name=self.trakt_searcher.name, id=self.trakt_searcher.name ) # add subtitles finder job self.scheduler.add_job( self.subtitle_searcher.run, IntervalTrigger( hours=self.config.subtitle_searcher_freq ), name=self.subtitle_searcher.name, id=self.subtitle_searcher.name ) # add upnp client job self.scheduler.add_job( self.upnp_client.run, IntervalTrigger( seconds=self.upnp_client._nat_portmap_lifetime ), name=self.upnp_client.name, id=self.upnp_client.name ) # add namecache update job self.scheduler.add_job( self.name_cache.build_all, IntervalTrigger( days=1, ), name=self.name_cache.name, id=self.name_cache.name ) # start scheduler service self.scheduler.start() # start queue's self.search_queue.start() self.show_queue.start() self.postprocessor_queue.start() self.event_queue.start() # fire off startup events self.event_queue.fire_event(self.name_cache.build_all) self.event_queue.fire_event(self.version_updater.run) self.event_queue.fire_event(self.tz_updater.run) # start webserver self.wserver.start() # launch browser window if all([not sickrage.app.no_launch, sickrage.app.config.launch_browser]): self.event_queue.fire_event(lambda: launch_browser(('http', 'https')[sickrage.app.config.enable_https], sickrage.app.config.web_host, sickrage.app.config.web_port)) # start ioloop self.io_loop.start()