def test_from_environment_dict_required(monkeypatch): """Raise an error where we require the environment to provide a value.""" with pytest.raises(OSError): EXPECTED_CONFIG = { 'HOME': None, 'LANGUAGE': 'en_US' } monkeypatch.delenv("HOME", raising=False) monkeypatch.setenv("LANGUAGE", "ja_JP") from_environment(EXPECTED_CONFIG)
def log_configuration() -> None: """Write the daemon configuration to the log.""" # log the way this component has been configured config = from_environment(EXPECTED_CONFIG) LOG.info("Zoom Meeting List is configured:") for name in config: LOG.info(f"{name} = {config[name]}")
def delete_service_role(client_id, token=None): cfg = from_environment({ 'KEYCLOAK_URL': None, }) url = f'{cfg["KEYCLOAK_URL"]}/auth/admin/realms/master/clients' r = requests.get(url, headers={'Authorization': f'bearer {token}'}) r.raise_for_status() clients = r.json() # get actual system id system_id = None for c in clients: if c['clientId'] == client_id: system_id = c['id'] break if not system_id: print(f'client "{client_id}" does not exist') else: print(f'deleting client "{client_id}"') url = f'{cfg["KEYCLOAK_URL"]}/auth/admin/realms/master/clients/{system_id}' r = requests.delete(url, headers={'Authorization': f'bearer {token}'}) try: r.raise_for_status() except Exception: print(r.text) raise
async def clear_catalog(): # configure a RestClient from the environment config = from_environment(EXPECTED_CONFIG) rc = RestClient(config["FILE_CATALOG_REST_URL"], token=config["FILE_CATALOG_REST_TOKEN"]) # while there are still files clearing = True while clearing: try: # get a list of up to 50 files response = await rc.request("GET", "/api/files?start=0&limit=50") files = response["files"] # for each file that we found for x in files: # remove it from the file catalog uuid = x["uuid"] logical_name = x["logical_name"] print(f"DELETE /api/files/{uuid} - {logical_name}") response2 = await rc.request("DELETE", f"/api/files/{uuid}") # if we didn't get any files back, we're done if len(files) < 1: clearing = False except Exception as e: # whoopsy daisy... clearing = False print(e)
def main() -> None: """Configure a monitoring component from the environment and set it running.""" config = from_environment(EXPECTED_CONFIG) # configure structured logging for the application structured_formatter = StructuredFormatter(component_type='Monitoring', ndjson=True) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(structured_formatter) root_logger = logging.getLogger(None) root_logger.setLevel(logging.NOTSET) root_logger.addHandler(stream_handler) logger = logging.getLogger("lta.monitoring") monitors = [] loop = asyncio.get_event_loop() for name in MONITOR_NAMES: if check_bool(cast(str, config['ENABLE_' + name])): logger.info(f"Setting up monitor {name}") kwargs = { n.split('_', 1)[-1].lower(): config[n] for n in config if n.startswith(name) } kwargs.update({ 'lta_rest_url': config['LTA_REST_URL'], 'lta_rest_token': config['LTA_REST_TOKEN'], }) m = MONITOR_NAMES[name](**kwargs) # type: ignore[arg-type] monitors.append(m) loop.create_task(m.run()) logger.info("Starting asyncio loop") loop.run_forever()
async def clear_lta_transfer_requests(): # configure a RestClient from the environment config = from_environment(EXPECTED_CONFIG) rc = RestClient(config["LTA_REST_URL"], token=config["LTA_REST_TOKEN"]) # while there are still transfer requests clearing = True while clearing: try: # get a list of up to 50 transfer requests # technically a lie; the LTA DB honors neither start nor limit response = await rc.request("GET", "/TransferRequests?start=0&limit=50") results = response["results"] # for each file that we found for x in results: # remove it from the file catalog uuid = x["uuid"] print(f"DELETE /TransferRequests/{uuid}") response2 = await rc.request("DELETE", f"/TransferRequests/{uuid}") # if we didn't get any files back, we're done if len(results) < 1: clearing = False except Exception as e: # whoopsy daisy... clearing = False print(e)
def test_from_environment_list(monkeypatch): """Return a dictionary with a list of environment variables.""" monkeypatch.setenv("HOME", "/home/tux") monkeypatch.setenv("LANGUAGE", "ja_JP") obj = from_environment(["HOME", "LANGUAGE"]) assert len(obj.keys()) == 2 assert obj["HOME"] == "/home/tux" assert obj["LANGUAGE"] == "ja_JP"
def main(): config = from_environment(default_config) logformat='%(asctime)s %(levelname)s %(name)s %(module)s:%(lineno)s - %(message)s' logging.basicConfig(format=logformat, level=setlevel[config['LOG_LEVEL'].upper()]) # start server create_server(config) asyncio.get_event_loop().run_forever()
async def test_create_indexes(mongo_client): default_config = { 'DB_URL': None, } config = from_environment(default_config) db_url, db_name = config['DB_URL'].rsplit('/', 1) pubs.utils.create_indexes(db_url, db_name, background=False) indexes = await mongo_client.publications.index_information() assert 'text_index' in indexes
def configure_logging() -> None: """Configure the logging object according to the supplied configuration.""" # figure out how we want to configure the logging for the service daemon config = from_environment(EXPECTED_CONFIG) format = config["LOGGING_FORMAT"] level = config["LOGGING_LEVEL"] # configure the logging appropriately logging.basicConfig(format=format) LOG = logging.getLogger(__name__) LOG.setLevel(level)
def start(debug: bool = False) -> RestServer: """Start a Mad Dash REST service.""" config = from_environment(EXPECTED_CONFIG) for name in config: logging.info(f"{name} = {config[name]}") args = RestHandlerSetup( { "auth": { "secret": config["MAD_DASH_AUTH_SECRET"], "issuer": config["MAD_DASH_AUTH_ISSUER"], "algorithm": config["MAD_DASH_AUTH_ALGORITHM"], }, "debug": debug, } ) # configure access to MongoDB as a backing store mongo_user = quote_plus(config["MAD_DASH_MONGODB_AUTH_USER"]) mongo_pass = quote_plus(config["MAD_DASH_MONGODB_AUTH_PASS"]) mongo_host = config["MAD_DASH_MONGODB_HOST"] mongo_port = int(config["MAD_DASH_MONGODB_PORT"]) mongodb_url = f"mongodb://{mongo_host}:{mongo_port}" if mongo_user and mongo_pass: mongodb_url = f"mongodb://{mongo_user}:{mongo_pass}@{mongo_host}:{mongo_port}" # ensure indexes md_mc = MadDashMotorClient(MotorClient(mongodb_url)) asyncio.get_event_loop().run_until_complete(md_mc.ensure_all_databases_indexes()) args["motor_client"] = MotorClient(mongodb_url) # configure REST routes server = RestServer(debug=debug) server.add_route(r"/$", MainHandler, args) server.add_route( r"/databases/names$", DatabasesNamesHandler, args ) # get database names server.add_route( r"/collections/names$", CollectionsNamesHandler, args ) # get collection names server.add_route( r"/collections/histograms/names$", CollectionsHistogramsNamesHandler, args ) # get all histogram names in collection server.add_route( r"/collections/histograms$", CollectionsHistogramsHandler, args ) # get all histogram objects in collection server.add_route(r"/histogram$", HistogramHandler, args) # get histogram object server.add_route(r"/files/names$", FileNamesHandler, args) # get file names server.startup( address=config["MAD_DASH_REST_HOST"], port=int(config["MAD_DASH_REST_PORT"]) ) return server
def __init__(self): self.config = from_environment({ 'LDAP_URL': None, 'LDAP_ADMIN_USER': '******', 'LDAP_ADMIN_PASSWORD': '******', 'LDAP_USER_BASE': 'ou=people,dc=icecube,dc=wisc,dc=edu', })
def test_from_environment_dict(monkeypatch): """Return a dictionary where we override one default but leave the other.""" EXPECTED_CONFIG = { 'HOME': '/home/tux', 'LANGUAGE': 'en_US' } monkeypatch.delenv("HOME", raising=False) monkeypatch.setenv("LANGUAGE", "ja_JP") obj = from_environment(EXPECTED_CONFIG) assert len(obj.keys()) == 2 assert obj["HOME"] == "/home/tux" assert obj["LANGUAGE"] == "ja_JP"
async def sync_zoom_to_google() -> None: """Synchronize upcoming Zoom Events to our Google Calendar.""" # configure ourselves and figure out what we want to do config = from_environment(EXPECTED_CONFIG) calendarId = config["CALENDAR_ID"] max_days = int(config["MAX_DAYS"]) max_page_size = int(config["MAX_PAGE_SIZE"]) token = config["ZOOM_TOKEN"] LOG.info( f"Synchronizing {max_days} days worth of Zoom Meetings to Google Calendar" ) # query Zoom and get the current list of all meetings meetings = await get_all_upcoming_zoom_meetings(token, max_page_size) meetings = filter_and_sort_zoom_meetings(meetings, max_days) LOG.info(f"Found {len(meetings)} Zoom Meetings to synchronize") # print_zoom_meetings_as_json(meetings) # query Google and get the current list of all calendar events service = get_google_calendar_service() events = await get_all_google_events(service, calendarId, max_page_size * 3) LOG.info(f"Found {len(events)} Google Calendar Events to synchronize") # for each meeting for meeting in meetings: # try to find the google calendar event that corresponds event = get_corresponding_event(events, meeting) # if we didn't find one, create one if not event: new_event = as_calendar_event(meeting) new_event = service.events().insert(calendarId=calendarId, conferenceDataVersion=0, sendUpdates="none", body=new_event).execute() LOG.info( f"Created new Google Calender Event: {new_event.get('htmlLink')}" ) # otherwise, remove it from the event list else: events.remove(event) # for each event that wasn't matched to an upcoming Zoom meeting LOG.info(f"Will delete {len(events)} obsolete Google Calender Events") for event in events: # delete that event from the calendar service.events().delete(calendarId=calendarId, eventId=event["id"], sendUpdates="none").execute() LOG.info("Finished synchronization work")
def wait_for_keycloak(timeout=300): cfg = from_environment({ 'KEYCLOAK_URL': None, }) url = f'{cfg["KEYCLOAK_URL"]}/auth/' for _ in range(timeout): try: r = requests.get(url) r.raise_for_status() break except requests.exceptions.RequestException: time.sleep(1) else: raise Exception('Keycloak did not start')
def delete_realm(realm, token=None): cfg = from_environment({ 'KEYCLOAK_URL': None, }) try: url = f'{cfg["KEYCLOAK_URL"]}/auth/admin/realms/{realm}' r = requests.get(url, headers={'Authorization': f'bearer {token}'}) r.raise_for_status() except requests.exceptions.HTTPError: print(f'realm "{realm}" does not exist') else: print(f'deleting realm "{realm}"') url = f'{cfg["KEYCLOAK_URL"]}/auth/admin/realms/{realm}' r = requests.delete(url, headers={'Authorization': f'bearer {token}'}) r.raise_for_status() print(f'realm "{realm}" deleted')
async def work_loop() -> None: """Perform the synchronization task and sleep the configured number of seconds.""" # configure the work loop config = from_environment(EXPECTED_CONFIG) run_once_and_die = config["RUN_ONCE_AND_DIE"] work_sleep_duration_seconds = int(config["WORK_SLEEP_DURATION_SECONDS"]) # until forever... while True: # perform the work of synchronizing Zoom to Google LOG.info("Starting work cycle") await sync_zoom_to_google() # if we only wanted a one-shot run, then bail out of the loop if run_once_and_die: break # otherwise, sleep until it's time to perform work again LOG.info(f"Sleeping for {work_sleep_duration_seconds} seconds") await asyncio.sleep(work_sleep_duration_seconds)
def create_server(): static_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static') template_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates') default_config = { 'HOST': 'localhost', 'PORT': 8080, 'DEBUG': False, 'DB_URL': 'mongodb://localhost/pub_db', 'COOKIE_SECRET': binascii.hexlify(b'secret').decode('utf-8'), 'BASIC_AUTH': '', # user:pass,user:pass } config = from_environment(default_config) logging.info(f'DB: {config["DB_URL"]}') db_url, db_name = config['DB_URL'].rsplit('/', 1) logging.info(f'DB name: {db_name}') db = motor.motor_asyncio.AsyncIOMotorClient(db_url) create_indexes(db_url, db_name) users = {v.split(':')[0]: v.split(':')[1] for v in config['BASIC_AUTH'].split(',') if v} logging.info(f'BASIC_AUTH users: {users.keys()}') main_args = { 'debug': config['DEBUG'], 'db': db[db_name], 'basic_auth': users, } server = RestServer(static_path=static_path, template_path=template_path, cookie_secret=config['COOKIE_SECRET'], xsrf_cookies=True, debug=config['DEBUG']) server.add_route(r'/', Main, main_args) server.add_route(r'/csv', CSV, main_args) server.add_route(r'/manage', Manage, main_args) server.add_route(r'/api/publications', APIPubs, main_args) server.add_route(r'/api/publications/count', APIPubsCount, main_args) server.add_route(r'/api/filter_defaults', APIFilterDefaults, main_args) server.add_route(r'/api/types', APITypes, main_args) server.add_route(r'/api/projects', APIProjects, main_args) server.startup(address=config['HOST'], port=config['PORT']) return server
async def mongo_client(monkeypatch): if 'DB_URL' not in os.environ: monkeypatch.setenv('DB_URL', 'mongodb://localhost/pub_db_test') default_config = { 'DB_URL': None, } config = from_environment(default_config) db_url, db_name = config['DB_URL'].rsplit('/', 1) db = motor.motor_asyncio.AsyncIOMotorClient(db_url) ret = db[db_name] await ret.publications.drop() create_indexes(db_url, db_name, background=False) try: yield ret finally: await ret.publications.drop()
def get_token(): cfg = from_environment({ 'KEYCLOAK_URL': None, 'USERNAME': None, 'PASSWORD': None, }) url = f'{cfg["KEYCLOAK_URL"]}/auth/realms/master/protocol/openid-connect/token' args = { 'client_id': 'admin-cli', 'grant_type': 'password', 'username': cfg['USERNAME'], 'password': cfg['PASSWORD'], } r = requests.post(url, data=args) r.raise_for_status() req = r.json() return req['access_token']
def __init__(self, duration: Optional[int] = None): """Create a SiteGlobusProxy object.""" # load what we can from the environment self.cfg = from_environment(PROXY_CONFIG) # remove anything optional that wasn't specified cfg_keys = list(self.cfg.keys()) for key in cfg_keys: if self.cfg[key] == EMPTY_STRING_SENTINEL_VALUE: del self.cfg[key] # ensure duration is converted to an integer value if "GLOBUS_PROXY_DURATION" in self.cfg: self.cfg["GLOBUS_PROXY_DURATION"] = int(self.cfg["GLOBUS_PROXY_DURATION"]) # ensure we have at least an empty string for passphrase if "GLOBUS_PROXY_PASSPHRASE" not in self.cfg: self.cfg["GLOBUS_PROXY_PASSPHRASE"] = "" # override the duration if specified during construction if duration: self.cfg['GLOBUS_PROXY_DURATION'] = duration
def get_rest_client(): config = from_environment({ 'KEYCLOAK_REALM': None, 'KEYCLOAK_URL': None, 'KEYCLOAK_CLIENT_ID': 'rest-access', 'KEYCLOAK_CLIENT_SECRET': None, }) token_func = partial( get_token, config["KEYCLOAK_URL"], client_id=config['KEYCLOAK_CLIENT_ID'], client_secret=config['KEYCLOAK_CLIENT_SECRET'], ) return RestClient( f'{config["KEYCLOAK_URL"]}/auth/admin/realms/{config["KEYCLOAK_REALM"]}', token=token_func, timeout=10, )
async def mongo_client(): default_config = { 'DB_URL': 'mongodb://localhost/keycloak_user_mgmt', } config = from_environment(default_config) db = motor.motor_asyncio.AsyncIOMotorClient(config['DB_URL']) db_name = config['DB_URL'].split('/')[-1] ret = db[db_name] await ret.user_registrations.drop() await ret.inst_approvals.drop() await ret.group_approvals.drop() try: yield ret finally: await ret.user_registrations.drop() await ret.inst_approvals.drop() await ret.group_approvals.drop()
async def main(): # make sure we were given source and destination if len(sys.argv) < 3: print( "Usage: make_transfer_request.py <source_site> <dest_site> <path>") return # construct the TransferRequest body request_body = { "source": sys.argv[1], "dest": sys.argv[2], "path": sys.argv[3], } # configure a RestClient from the environment config = from_environment(EXPECTED_CONFIG) rc = RestClient(config["LTA_REST_URL"], token=config["LTA_REST_TOKEN"]) # attempt to post the TransferRequest to the LTA DB try: response = await rc.request("POST", "/TransferRequests", request_body) print(response) except Exception as e: print(e)
def create_realm(realm, token=None): cfg = from_environment({ 'KEYCLOAK_URL': None, }) try: url = f'{cfg["KEYCLOAK_URL"]}/auth/admin/realms/{realm}' r = requests.get(url, headers={'Authorization': f'bearer {token}'}) r.raise_for_status() except requests.exceptions.HTTPError: print(f'creating realm "{realm}"') url = f'{cfg["KEYCLOAK_URL"]}/auth/admin/realms/' r = requests.post(url, json={ 'realm': realm, 'enabled': True }, headers={'Authorization': f'bearer {token}'}) r.raise_for_status() print(f'realm "{realm}" created') else: print(f'realm "{realm}" already exists')
def bootstrap(): cfg = from_environment({ 'KEYCLOAK_REALM': None, 'KEYCLOAK_CLIENT_ID': 'rest-access', }) wait_for_keycloak() token = get_token() print('Keycloak token obtained, setting up...') create_realm(cfg['KEYCLOAK_REALM'], token=token) create_public_app(cfg['KEYCLOAK_REALM'], token=token) client_secret = create_service_role(cfg['KEYCLOAK_CLIENT_ID'], realm=cfg['KEYCLOAK_REALM'], token=token) print(f'\nclient_id={cfg["KEYCLOAK_CLIENT_ID"]}') print(f'client_secret={client_secret}') return client_secret
def runner() -> None: """Configure a RateLimiter component from the environment and set it running.""" # obtain our configuration from the environment config = from_environment(EXPECTED_CONFIG) # configure structured logging for the application structured_formatter = StructuredFormatter( component_type='RateLimiter', component_name=config["COMPONENT_NAME"], ndjson=True) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(structured_formatter) root_logger = logging.getLogger(None) root_logger.setLevel(logging.NOTSET) root_logger.addHandler(stream_handler) logger = logging.getLogger("lta.rate_limiter") # create our RateLimiter service rate_limiter = RateLimiter(config, logger) # let's get to work rate_limiter.logger.info("Adding tasks to asyncio loop") loop = asyncio.get_event_loop() loop.create_task(status_loop(rate_limiter)) loop.create_task(work_loop(rate_limiter))
async def add_catalog(site, path): # configure a RestClient from the environment config = from_environment(EXPECTED_CONFIG) rc = RestClient(config["FILE_CATALOG_REST_URL"], token=config["FILE_CATALOG_REST_TOKEN"]) # for each (dirpath, dirnames, filenames) tuple in the walk for root, dirs, files in os.walk(path): # don't recurse into deeper subdirectories if root != path: continue # for each file in our directory for data_file in files: # determine the logical name of the file logical_name = os.path.join(root, data_file) # create a catalog record for it file_record = { "uuid": str(uuid4()), "logical_name": logical_name, "checksum": { "sha512": token_hex(64), }, "locations": [ { "site": f"{site}", "path": logical_name, } ], "file_size": os.path.getsize(logical_name), } # if we're being pedantic about real checksums in test data if config["FAKE_CHECKSUM"] != "True": file_record["checksum"]["sha512"] = sha512sum(logical_name) # add the file to the File Catalog try: print(f"POST /api/files - {logical_name}") response = await rc.request("POST", "/api/files", file_record) except Exception as e: # whoopsy daisy... print(e)
async def clear_lta_bundles(): # configure a RestClient from the environment config = from_environment(EXPECTED_CONFIG) rc = RestClient(config["LTA_REST_URL"], token=config["LTA_REST_TOKEN"]) # while there are still bundles clearing = True while clearing: try: # get a list of all the bundles in the LTA DB response = await rc.request("GET", "/Bundles") results = response["results"] # for each bundle that we found for uuid in results: # remove it from the LTA DB print(f"DELETE /Bundles/{uuid}") response2 = await rc.request("DELETE", f"/Bundles/{uuid}") # if we didn't get any files back, we're done if len(results) < 1: clearing = False except Exception as e: # whoopsy daisy... clearing = False print(e)
def get_google_calendar_service() -> Any: """Query the Google Calendar API for a list of upcoming events.""" LOG.debug("Establishing credentials to use Google API") # load the paths from the configuration config = from_environment(EXPECTED_CONFIG) api_creds_path = config["GOOGLE_API_CREDS_PATH"] client_creds_path = config["GOOGLE_CLIENT_CREDS_PATH"] creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists(api_creds_path): LOG.debug(f"Loading API credentials from {api_creds_path}") with open(api_creds_path, 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: LOG.debug("Refreshing expired API credentials") creds.refresh(Request()) else: LOG.debug( f"Loading {client_creds_path} with client secret to obtain API credentials" ) flow = InstalledAppFlow.from_client_secrets_file( client_creds_path, GOOGLE_SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run LOG.debug(f"Writing API credentials to {api_creds_path}") with open(api_creds_path, 'wb') as token: pickle.dump(creds, token) # build the service object to query the Calendar API service = build('calendar', 'v3', credentials=creds) # return the service object to the caller return service