def init_auth_helper(self): if AuthHelper.isInitialized() == False: auth_helper = AuthHelper.create(self.app_client_id, self.app_client_secret) else: auth_helper = AuthHelper.instance() return auth_helper
def get_provenance_data_object(self, token, groupUUID=None): provenance_group = None try: if groupUUID != None: provenance_group = self.get_group_by_identifier(groupUUID) else: #manually find the group id given the current user: group_uuid = None entity = Entity(self.provenance_config['APP_CLIENT_ID'], self.provenance_config['APP_CLIENT_SECRET'], self.provenance_config['UUID_WEBSERVICE_URL']) group_list = entity.get_user_groups(token) for grp in group_list: if grp['generateuuid'] == True: groupUUID = grp['uuid'] # if provenance_group is already set, this means the user belongs to more than one writable group if provenance_group != None: ValueError( 'Error: Current user is a member of multiple groups allowed to create new entities. The user must select which one to use' ) provenance_group = self.get_group_by_identifier( groupUUID) #TODO: THIS IS HARDCODED!! WE NEED TO CHANGE THIS TO TRACK TEST GROUPS DIFFERENTLY # for now if the group is the IEC Testing group, keep looking for a different group # only use the IEC Testing group if no other writable group is found for the user # NOTE: this code will simply return the first writable group it encounters if groupUUID != '5bd084c8-edc2-11e8-802f-0e368f3075e8': break if groupUUID == None: raise ValueError( 'Unauthorized: Current user is not a member of a group allowed to create new entities' ) except ValueError as ve: raise ve ret_provenance_group = { HubmapConst.PROVENANCE_GROUP_UUID_ATTRIBUTE: groupUUID, HubmapConst.PROVENANCE_GROUP_NAME_ATTRIBUTE: provenance_group['displayname'] } authcache = None if AuthHelper.isInitialized() == False: authcache = AuthHelper.create( self.provenance_config['appclientid'], self.provenance_config['appclientsecret']) else: authcache = AuthHelper.instance() userinfo = authcache.getUserInfo(token, True) ret_provenance_group[ HubmapConst.PROVENANCE_SUB_ATTRIBUTE] = userinfo['sub'] ret_provenance_group[ HubmapConst.PROVENANCE_USER_EMAIL_ATTRIBUTE] = userinfo['email'] ret_provenance_group[ HubmapConst. PROVENANCE_USER_DISPLAYNAME_ATTRIBUTE] = userinfo['name'] return ret_provenance_group
def __init__(self, property_file_name): self.props = IngestProps(property_file_name, required_props=[ 'nexus.token', 'neo4j.server', 'neo4j.username', 'neo4j.password', 'collections.input.file', 'uuid.api.url' ]) self.uuid_helper = UUIDHelper(ingest_props=self.props) self.token = self.props.get('nexus.token') self.neo4j_server = self.props.get('neo4j.server') self.neo4j_user = self.props.get('neo4j.username') self.neo4j_password = self.props.get('neo4j.password') self.collections_tsv_path = self.props.get("collections.input.file") self.auth_helper = AuthHelper.instance() if string_helper.isBlank( self.collections_tsv_path) or not os.path.isfile( self.collections_tsv_path): raise Exception("collections tsf file does not exist:" + self.collections_tsv_path) if not self.collections_tsv_path.endswith(".tsv"): raise Exception("collections file must be of type .tsv : " + self.collections_tsv_path) self.collection_info = [] with open(self.collections_tsv_path, newline='') as tsvfile: reader = csv.DictReader(tsvfile, delimiter='\t') for row in reader: info_row = {} for key in row.keys(): info_row[key] = row[key] self.collection_info.append(info_row) self.graph = Graph(self.neo4j_server, auth=(self.neo4j_user, self.neo4j_password))
def __dataset_directory_relative_path(self, access_level, group_uuid, dataset_uuid, published): grp_name = AuthHelper.getGroupDisplayName(group_uuid) if access_level == 'protected': endpoint_id = self.appconfig['GLOBUS_PROTECTED_ENDPOINT_UUID'] rel_path = str( os.path.join( self. appconfig['RELATIVE_GLOBUS_PROTECTED_ENDPOINT_FILEPATH'], grp_name, dataset_uuid)) elif published: endpoint_id = self.appconfig['GLOBUS_PUBLIC_ENDPOINT_UUID'] rel_path = str( os.path.join( self.appconfig['RELATIVE_GLOBUS_PUBLIC_ENDPOINT_FILEPATH'], dataset_uuid)) else: endpoint_id = self.appconfig['GLOBUS_CONSORTIUM_ENDPOINT_UUID'] rel_path = str( os.path.join( self. appconfig['RELATIVE_GLOBUS_CONSORTIUM_ENDPOINT_FILEPATH'], grp_name, dataset_uuid)) return {"rel_path": rel_path, "globus_endpoint_uuid": endpoint_id}
def __init__(self, property_file_name): self.props = IngestProps(property_file_name, required_props = ['nexus.token', 'ingest.api.url', 'search.api.url', 'uuid.api.url', 'dataset.uuid.file', 'globus.app.client.id', 'globus.app.client.secret']) if len(sys.argv) >= 2: self.id_file = sys.argv[1] else: self.id_file = self.props.get('dataset.uuid.file') if string_helper.isBlank(self.id_file): raise ErrorMessage("ERROR: A list of dataset uuids must be specified in " + self.prop_file_name + " as as property 'dataset.uuid.file' or as the first argument on the command line") if not os.path.isfile(self.id_file): raise ErrorMessage("ERROR: Input file " + self.id_file + " does not exist.") base_file_name = os.path.splitext(os.path.basename(self.id_file))[0] dir_path = file_helper.ensureTrailingSlash(os.path.dirname(self.id_file)) #set up log files, first for errors, second to record all actions cur_time = time.strftime("%d-%m-%Y-%H-%M-%S") error_log_filename = dir_path + base_file_name + "-errors." + cur_time + ".log" self.error_logger = logging.getLogger('publish.datasets.err') self.error_logger.setLevel(logging.INFO) error_logFH = logging.FileHandler(error_log_filename) self.error_logger.addHandler(error_logFH) recording_log_filename = dir_path + base_file_name + "-rcding." + cur_time + ".log" self.recording_logger = logging.getLogger('publish.datasets.rcd') self.recording_logger.setLevel(logging.INFO) recording_logFH = logging.FileHandler(recording_log_filename) self.recording_logger.addHandler(recording_logFH) #initialize variables, get required values from property file self.dataset_info = None self.dataset_info_tsv_path = None self.token = self.props.get('nexus.token') self.search_api_url = file_helper.ensureTrailingSlashURL(self.props.get('search.api.url')) self.ingest_api_url = file_helper.ensureTrailingSlashURL(self.props.get('ingest.api.url')) #initialize the auth helper and use it to get the #user information for the person running the script auth_helper = AuthHelper.create(self.props.get('globus.app.client.id'), self.props.get('globus.app.client.secret')) user_info = auth_helper.getUserInfo(self.token, getGroups = True) if isinstance(user_info, Response): raise ErrorMessage("error validating auth token: " + user_info.get_data(as_text=True)) id_f = open(self.id_file, 'r') id_lines = id_f.readlines() id_f.close() self.ds_ids = [] for id_line in id_lines: if not string_helper.isBlank(id_line): tl = id_line.strip() if not tl.startswith('#'): self.ds_ids.append(tl) self.donors_to_reindex = [] self.set_acl_commands = []
def __init__(self, ingest_props=None, auth_token=None, uuid_service_url=None, globus_app_id=None, globus_app_secret=None): self.props = ingest_props self.token = self._get_property("nexus.token", auth_token) self.uuid_url = self._get_property("uuid.api.url", uuid_service_url) self.globus_app_id = self._get_property("globus.app.client.id", globus_app_id).strip() self.globus_app_secret = self._get_property("globus.app.client.secret", globus_app_secret).strip() auth_helper = AuthHelper(self.globus_app_id, self.globus_app_secret) user_info = auth_helper.getUserInfo(self.token, getGroups=True) if isinstance(user_info, Response): raise ErrorMessage("error validating auth token: " + user_info.get_data(as_text=True))
def get_group_by_identifier(self, identifier): if len(identifier) == 0: raise ValueError("identifier cannot be blank") authcache = None if AuthHelper.isInitialized() == False: authcache = AuthHelper.create( self.md_config['APP_CLIENT_ID'], self.md_config['APP_CLIENT_SECRET']) else: authcache = AuthHelper.instance() groupinfo = authcache.getHuBMAPGroupInfo() # search through the keys for the identifier, return the value for k in groupinfo.keys(): if str(k).lower() == str(identifier).lower(): group = groupinfo.get(k) return group else: group = groupinfo.get(k) if str(group['uuid']).lower() == str(identifier).lower(): return group raise ValueError("cannot find a Hubmap group matching: [" + identifier + "]")
def init_app(self, flask_app): client_id = get_config_param('APP_CLIENT_ID') client_secret = get_config_param('APP_CLIENT_SECRET') self.flask_app = flask_app self.login_manager.init_app(self.flask_app) self.globus_oauth = globus_sdk.ConfidentialAppAuthClient( get_config_param('APP_CLIENT_ID'), get_config_param('APP_CLIENT_SECRET')) self.login_manager.user_loader(self.load_user) self.flask_app.add_url_rule('/login', 'login', self.login) if not AuthHelper.isInitialized(): self.authHelper = AuthHelper.create(clientId=client_id, clientSecret=client_secret) else: self.authHelper = AuthHelper.instance()
def get_writeable_flag(self, token, writeable_uuid_list, current_record): authcache = None if AuthHelper.isInitialized() == False: authcache = AuthHelper.create(self.confdata['APP_CLIENT_ID'], self.confdata['APP_CLIENT_SECRET']) else: authcache = AuthHelper.instance() userinfo = None userinfo = authcache.getUserInfo(token, True) role_list = AuthCache.getHMRoles() data_curator_uuid = role_list['hubmap-data-curator']['uuid'] is_data_curator = False for role_uuid in userinfo['hmroleids']: if role_uuid == data_curator_uuid: is_data_curator = True break # the data curator role overrules the group level write rules if is_data_curator == True: if current_record['metadata_properties']['status'] in [ HubmapConst.DATASET_STATUS_QA ]: return True else: return False # perform two checks: # 1. make sure the user has write access to the record's group # 2. make sure the record has a status that is writable if current_record['metadata_properties'][ 'provenance_group_uuid'] in writeable_uuid_list: if current_record['metadata_properties']['status'] in [ HubmapConst.DATASET_STATUS_NEW, HubmapConst.DATASET_STATUS_ERROR, HubmapConst.DATASET_STATUS_REOPENED ]: return True return False
def __dataset_directory_absolute_path(self, access_level, group_uuid, dataset_uuid, published): grp_name = AuthHelper.getGroupDisplayName(group_uuid) if access_level == 'protected': base_dir = self.appconfig['GLOBUS_PROTECTED_ENDPOINT_FILEPATH'] abs_path = str(os.path.join(base_dir, grp_name, dataset_uuid)) elif published: base_dir = self.appconfig['GLOBUS_PUBLIC_ENDPOINT_FILEPATH'] abs_path = str(os.path.join(base_dir, dataset_uuid)) else: base_dir = self.appconfig['GLOBUS_CONSORTIUM_ENDPOINT_FILEPATH'] abs_path = str(os.path.join(base_dir, grp_name, dataset_uuid)) return abs_path
def __init__(self, clientId, clientSecret, dbHost, dbName, dbUsername, dbPassword): if clientId is None or clientSecret is None or isBlank( clientId) or isBlank(clientSecret): raise Exception( "Globus client id and secret are required in AuthHelper") if not AuthHelper.isInitialized(): self.authHelper = AuthHelper.create(clientId=clientId, clientSecret=clientSecret) else: self.authHelper.instance() #Open the config file self.logger = logging.getLogger('uuid.service') self.dbHost = dbHost self.dbName = dbName self.dbUsername = dbUsername self.dbPassword = dbPassword self.lock = threading.RLock() self.hmdb = DBConn(self.dbHost, self.dbUsername, self.dbPassword, self.dbName)
from hubmap_commons.hm_auth import AuthHelper from hubmap_commons.exceptions import HTTPException APP_CLIENT_ID = '21f293b0-5fa5-4ee1-9e0e-3cf88bd70114' APP_CLIENT_SECRET = 'gimzYEgm/jMtPmNJ0qoV11gdicAK8dgu+yigj2m3MTE=' helper = AuthHelper(APP_CLIENT_ID, APP_CLIENT_SECRET) #provide nexus tokens for the following, BE CAREFUL TO NOT CHECK THESE IN! #member of HuBMAP-Read group only read_only = '' #member of HuBMAP-Read and HuBMAP-Testing groups only test_only = '' #member of HuBMAP-Read and HuBMAP-Data-Admin groups only data_admin_only = '' #member of HuBMAP-Read and all HuBMAP data provider groups all_write = '' #member of no HuBMAP groups nothing = '' read_group = '5777527e-ec11-11e8-ab41-0af86edb4424' test_group = '5bd084c8-edc2-11e8-802f-0e368f3075e8' vandy_group = '73bb26e4-ed43-11e8-8f19-0a7c1eab007a' invalid_group = '73bb26e4-ed43-11e8-8f19-0a7c1eab007z' def check_access(token, group_uuid, access, access_msg):
def __init__(self, config): self.appconfig = config self.logger = logging.getLogger('ingest.service') self.auth_helper = AuthHelper.configured_instance( config['APP_CLIENT_ID'], config['APP_CLIENT_SECRET'])
def get_upload_directory_absolute_path(self, group_uuid, upload_uuid): grp_name = AuthHelper.getGroupDisplayName(group_uuid) base_dir = self.appconfig['GLOBUS_PROTECTED_ENDPOINT_FILEPATH'] abs_path = str(os.path.join(base_dir, grp_name, upload_uuid)) return abs_path
elif key.upper() in dct[section]: rslt = dct[section][key.upper()] else: raise AirflowConfigException('No config entry for [{}] {}'.format( section, key)) # airflow config reader leaves quotes, which we want to strip for qc in ['"', "'"]: if rslt.startswith(qc) and rslt.endswith(qc): rslt = rslt.strip(qc) return rslt else: raise AirflowConfigException('No config section [{}]'.format(section)) AUTH_HELPER = None if not AuthHelper.isInitialized(): AUTH_HELPER = AuthHelper.create(clientId=config('connections', 'app_client_id'), clientSecret=config( 'connections', 'app_client_secret')) else: AUTH_HELPER = authHelper.instance() class HubmapApiInputException(Exception): pass class HubmapApiConfigException(Exception): pass
LOG_FILE_NAME = "../log/ontology-api-" + time.strftime("%d-%m-%Y-%H-%M-%S") + ".log" logger = None # Specify the absolute path of the instance folder and use the config file relative to the instance path app = Flask(__name__, instance_path=os.path.join(os.path.abspath(os.path.dirname(__file__)), 'instance'), instance_relative_config=True) app.config.from_pyfile('app.cfg') # Enable/disable CORS from configuration based on docker or non-docker deployment if app.config['ENABLE_CORS']: CORS(app) token_list = {} # Initialize the AuthHelper # This is used by the @secured decorator if AuthHelper.isInitialized() == False: authcache = AuthHelper.create( app.config['APP_CLIENT_ID'], app.config['APP_CLIENT_SECRET']) else: authcache = AuthHelper.instance() @app.before_first_request def init(): global logger try: logger = logging.getLogger('ontology.service') logger.setLevel(logging.INFO) logFH = logging.FileHandler(LOG_FILE_NAME) logger.addHandler(logFH) logger.info("started") except Exception as e:
# File path defined in app.config['REQUESTS_CACHE_SQLITE_NAME'] without the .sqlite extension # Use the same CACHE_TTL from configuration requests_cache.install_cache(app.config['REQUESTS_CACHE_SQLITE_NAME'], backend='sqlite', expire_after=app.config['CACHE_TTL']) # Suppress InsecureRequestWarning warning when requesting status on https with ssl cert verify disabled requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) #################################################################################################### ## AuthHelper initialization #################################################################################################### # Initialize AuthHelper class and ensure singleton try: if AuthHelper.isInitialized() == False: auth_helper_instance = AuthHelper.create( app.config['GLOBUS_APP_ID'], app.config['GLOBUS_APP_SECRET']) logger.info("Initialized AuthHelper class successfully :)") else: auth_helper_instance = AuthHelper.instance() except Exception: msg = "Failed to initialize the AuthHelper class" # Log the full stack trace, prepend a line with our message logger.exception(msg) #################################################################################################### ## Default route ####################################################################################################
dataset.modify_dataset(driver, header, uuid, dataset_record, dataset_record['group_uuid']) except Exception as e: print ('An exception occurred: while initializing dataset access levels: ' + str(e)) tx.rollback() if __name__ == "__main__": NEO4J_SERVER = '' NEO4J_USERNAME = '' NEO4J_PASSWORD = '' APP_CLIENT_ID = '' APP_CLIENT_SECRET = '' UUID_WEBSERVICE_URL = '' HUBMAP_WEBSERVICE_FILEPATH = '' if AuthHelper.isInitialized() == False: authcache = AuthHelper.create( APP_CLIENT_ID, APP_CLIENT_SECRET) else: authcache = AuthHelper.instance() processed_secret = AuthHelper.instance().getProcessSecret() conf_data = {'NEO4J_SERVER' : NEO4J_SERVER, 'NEO4J_USERNAME': NEO4J_USERNAME, 'NEO4J_PASSWORD': NEO4J_PASSWORD, 'APP_CLIENT_ID': APP_CLIENT_ID, 'APP_CLIENT_SECRET': processed_secret, 'UUID_WEBSERVICE_URL': UUID_WEBSERVICE_URL, 'HUBMAP_WEBSERVICE_FILEPATH': HUBMAP_WEBSERVICE_FILEPATH} nexus_token = '' initialize_all_entity_access_levels(conf_data)
redirect_uri = url_for('hello', _external=True) # build the logout URI with query params # there is no tool to help build this (yet!) globus_logout_url = ( 'https://auth.globus.org/v2/web/logout' + '?client={}'.format(app.config['APP_CLIENT_ID']) + #'?client={}'.format(app.config['PORTAL_CLIENT_ID']) + '&redirect_uri={}'.format(redirect_uri) + '&redirect_name=Globus Example App') # Redirect the user to the Globus Auth logout page return redirect(globus_logout_url) if __name__ == "__main__": try: #hm_auth.hm_application = app cId = app.config['APP_CLIENT_ID'] cSecret = app.config['APP_CLIENT_SECRET'] if not AuthHelper.isInitialized(): authHelper = AuthHelper.create(clientId=cId, clientSecret=cSecret) else: authHelper.instance() app.run() except Exception as e: traceback.print_exc() print("Error during startup.") print(str(e))