def update_collection(self, uuid, record): if (HubmapConst.UUID_ATTRIBUTE in record or HubmapConst.DOI_ATTRIBUTE in record or HubmapConst.DISPLAY_DOI_ATTRIBUTE in record or HubmapConst.ENTITY_TYPE_ATTRIBUTE in record): raise HTTPException("ID attributes cannot be changed", 400) not_allowed = [] for attrib in record.keys(): if not attrib in self.allowed_collection_update_attributes: not_allowed.append(attrib) if len(not_allowed) > 0: raise HTTPException( "Attribute(s) not allowed: " + string_helper.listToDelimited(not_allowed, " "), 400) if HubmapConst.COLLECTION_CREATORS_ATTRIBUTE in record: creators = record[HubmapConst.COLLECTION_CREATORS_ATTRIBUTE] for creator in creators: for attrib in creator.keys(): if not attrib in self.allowed_creator_attributes and not attrib in not_allowed: not_allowed.append(attrib) if len(not_allowed) > 0: raise HTTPException( "Creator Aattribute(s) not allowed: " + string_helper.listToDelimited(not_allowed, " "), 400) save_record = {} for attrib in record.keys(): if attrib == HubmapConst.COLLECTION_CREATORS_ATTRIBUTE: save_record[attrib] = json.dumps(record[attrib]) else: save_record[attrib] = record[attrib] rval = self.get_py2neo_conn().run( "match(c:Collection {uuid: {uuid}}) set c += {params} return c.uuid", uuid=uuid, params=save_record).data() if len(rval) == 0: raise HTTPException( "Update failed for collection with uuid " + uuid + ". UUID possibly not found.", 400) else: if rval[0]['c.uuid'] != uuid: raise HTTPException( "Update failed, wrong uuid returned while trying to update " + uuid + " returned: " + rval[0]['c.uuid'])
def check_write_privs(self, groups_token, group_uuid): user_info = self.getUserInfo(groups_token, getGroups=True) if isinstance(user_info, Response): raise HTTPException(user_info.text, user_info.status_code) groups_by_id = self.getHMGroupsById() if not group_uuid in groups_by_id: raise HTTPException(f"{group_uuid} is not a valid group uuid", 400) grp = groups_by_id[group_uuid] if not 'data_provider' in grp or not grp['data_provider']: raise HTTPException(f"grop with uuid {group_uuid} is not a valid data provider group", 400) if 'hmgroupids' in user_info: if data_admin_group_uuid in user_info['hmgroupids']: return True elif group_uuid not in user_info['hmgroupids']: raise HTTPException("User not authorized for group.", 403) else: return True else: raise HTTPException("User is not authorized, no group membership", 403)
def move_dataset_files_for_publishing(self, uuid, group_uuid, dataset_access_level, trial_run=False): from_path = self.__dataset_directory_absolute_path( dataset_access_level, group_uuid, uuid, False) if not os.path.isdir(from_path): raise HTTPException( f"{uuid}: path not found to dataset will not publish, path is {from_path}", 500) data_access_level = 'protected' if not dataset_access_level == 'protected': data_access_level = 'public' to_path = self.__dataset_directory_absolute_path( data_access_level, group_uuid, uuid, True) if not trial_run: shutil.move(from_path, to_path) else: print(f"mv {from_path} {to_path}") return None
def get_user_roles_deprecated(self, token): authcache = None if AuthHelper.isInitialized() == False: authcache = AuthHelper.create( self.entity_config['APP_CLIENT_ID'], self.entity_config['APP_CLIENT_SECRET']) else: authcache = AuthHelper.instance() userinfo = authcache.getUserInfo(token, True) if type(userinfo) == Response and userinfo.status_code == 401: raise HTTPException('token is invalid.', 401) if 'hmgroupids' not in userinfo: raise ValueError("Cannot find Hubmap Group information for token") return_list = [] role_list = AuthCache.getHMRoles() for role_uuid in userinfo['hmroleids']: for role_name in role_list.keys(): if role_list[role_name]['uuid'] == role_uuid: return_list.append(role_list[role_name]) break return return_list
def get_write_group_uuid(self, request_or_token, group_uuid = None): if isinstance(request_or_token, str): user_info = self.getUserInfo(request_or_token, getGroups=True) else: user_info = self.getUserInfoUsingRequest(request_or_token, getGroups=True) if isinstance(user_info, Response): raise HTTPException("Error while getting user information from token. " + user_info.get_data(as_text=True), user_info.status_code) if len(AuthCache.groupsById) == 0: AuthCache.getHMGroups() groups_by_id = AuthCache.groupsById if not group_uuid is None: if group_uuid in groups_by_id: if not 'data_provider' in groups_by_id[group_uuid] or not groups_by_id[group_uuid]['data_provider']: raise HTTPException(f"Group {groups_by_id[group_uuid]['displayname']} is not a valid group for submitting data.", 403) #user must be a member of the group or a member of the data admin group elif not (group_uuid in user_info['hmgroupids'] or data_admin_group_uuid in user_info['hmgroupids']): raise HTTPException(f"User is not a member of the group {groups_by_id[group_uuid]['displayname']}", 403) else: return group_uuid else: raise HTTPException("Invalid group_uuid", 400) else: count = 0 found_group_uuid = None for grp_id in groups_by_id.keys(): grp_info = groups_by_id[grp_id] if grp_id in user_info['hmgroupids'] and 'data_provider' in grp_info and grp_info['data_provider'] == True: count = count + 1 found_group_uuid = grp_id if count == 0: if data_admin_group_uuid in user_info['hmgroupids']: raise HTTPException("User is not a member of any groups that can provide data, but is a member of the data admin group. Please specify which group in the group_uuid field") else: raise HTTPException("User is not a member of any groups that can provide data.", 403) elif count > 1: raise HTTPException("The user is a member of multiple groups that can provide data. Please specify which group in the group_uuid field", 400) else: return found_group_uuid
def create_derived_datastage(self, nexus_token, json_data): auth_header = {'Authorization': 'Bearer ' + nexus_token} app_header = {'X-Hubmap-Application': 'ingest-api'} source_dataset_uuids = json_data['source_dataset_uuids'] source_uuids = [] if isinstance(source_dataset_uuids, str): # Create a list from this string source_uuids = [source_dataset_uuids] elif isinstance(source_dataset_uuids, list): source_uuids = source_dataset_uuids else: raise TypeError( "json_data['source_dataset_uuids'] must either be a string or a list" ) # All of the source datasets come from the same data provider # Get the group_uuid based on the first source dataset via entity-api first_source_uuid = source_uuids[0] get_url = file_helper.ensureTrailingSlashURL( self.confdata['ENTITY_WEBSERVICE_URL'] ) + 'entities/' + first_source_uuid response = requests.get(get_url, headers=auth_header, verify=False) if response.status_code != 200: raise HTTPException( "Error retrieving source dataset " + first_source_uuid, response.status_code) first_source_dataset = response.json() # Create the derived dataset via entity-api # The entity-api validates each of the provided source dataset uuid for existenace check # The derived dataset will have the same group_uuid as the source datasets derived_dataset_to_post = { 'title': json_data['derived_dataset_name'], 'data_types': json_data['derived_dataset_types'], 'direct_ancestor_uuids': source_uuids, 'contains_human_genetic_sequences': False, 'group_uuid': first_source_dataset['group_uuid'] } post_url = file_helper.ensureTrailingSlashURL( self.confdata['ENTITY_WEBSERVICE_URL']) + 'entities/dataset' # Merge the auth_header and app_header for creating new Dataset response = requests.post(post_url, json=derived_dataset_to_post, headers={ **auth_header, **app_header }, verify=False) if response.status_code != 200: raise HTTPException( "Error creating derived dataset: " + response.text, response.status_code) derived_dataset = response.json() file_help = IngestFileHelper(self.confdata) sym_path = os.path.join( str(self.confdata['HUBMAP_WEBSERVICE_FILEPATH']), derived_dataset['uuid']) new_directory_path = file_help.get_dataset_directory_absolute_path( derived_dataset, derived_dataset['group_uuid'], derived_dataset['uuid']) new_path = IngestFileHelper.make_directory(new_directory_path, sym_path) try: x = threading.Thread(target=file_help.set_dir_permissions, args=['consortium', new_path]) x.start() except Exception as e: logger.error(e, exc_info=True) response_data = { 'derived_dataset_uuid': derived_dataset['uuid'], 'group_uuid': derived_dataset['group_uuid'], 'group_display_name': derived_dataset['group_name'], 'full_path': new_path } return response_data