def currently_changing(): layerinfos = layman_util.get_publication_infos(username, LAYER_TYPE) result = any(( not is_layer_chain_ready(username, layername) for (_, _, layername) in layerinfos )) return result
def can_user_write_publication(username, workspace, publication_type, publication_name): publ_info = layman_util.get_publication_infos( workspace=workspace, publ_type=publication_type).get( (workspace, publication_type, publication_name)) return publ_info and is_user_in_access_rule( username, publ_info['access_rights']['write'])
def import_uuids_to_redis(): current_app.logger.info('Importing UUIDs to REDIS') infos = layman_util.get_publication_infos() for (workspace, publication_type, publication_name), info in infos.items(): register_publication_uuid(workspace, publication_type, publication_name, info["uuid"], ignore_duplicate=True) current_app.logger.info( f'Import publication into redis: workspace {workspace}, type {publication_type}, name {publication_name}, uuid {info["uuid"]}')
def ensure_output_srs_for_all(): layers = layman_util.get_publication_infos(style_type='qml') if layers: (workspace, _, layer) = next(iter(layers.keys())) old_set = util.get_layer_wms_crs_list_values(workspace, layer) if old_set != set(settings.LAYMAN_OUTPUT_SRS_LIST): logger.info( f' Update output SRS list for QGIS projects. Old set={old_set}, new list={settings.LAYMAN_OUTPUT_SRS_LIST}' ) for (workspace, _, layer) in layers.keys(): wms.save_qgs_file(workspace, layer)
def upgrade_1_8(): logger.info(f'Upgrade to version 1.8.x') with app.app_context(): logger.info(f' Creating prime_db_schema') schema_initialization.check_schema_name(settings.LAYMAN_PRIME_SCHEMA) schema_initialization.ensure_schema(settings.LAYMAN_PRIME_SCHEMA) logger.info(f' Ensuring users') from ..util import get_usernames, ensure_whole_user, check_username all_usernames = get_usernames() for username in all_usernames: logger.info(f' Ensuring user {username}') check_username(username) ensure_whole_user(username) logger.info(f' Ensuring GS rules') # Delete old rules for workspaces for username in all_usernames: headers_json = { 'Accept': 'application/json', 'Content-type': 'application/json', } for type in ['w', 'r']: response = requests.delete(urljoin( settings.LAYMAN_GS_REST_SECURITY_ACL_LAYERS, username + '.*.' + type), headers=headers_json, auth=settings.LAYMAN_GS_AUTH) if response.status_code != 404: response.raise_for_status() # Create rules for publications/layers for username in all_usernames: logger.info(f' Ensuring GS rules for user {username}') for (_, _, layer), info in layman_util.get_publication_infos( username, LAYER_TYPE).items(): logger.info( f' Ensuring GS rules for user {username} and layer {layer}' ) for type in ['read', 'write']: security_read_roles = gs_common.layman_users_to_geoserver_roles( info['access_rights'][type]) gs_util.ensure_layer_security_roles( username, layer, security_read_roles, type[0], settings.LAYMAN_GS_AUTH)
def ensure_all_publications(): ensure_all_users() with app.app_context(): already_created_publications = util.get_publication_infos() publications_to_publish = set( data.PUBLICATIONS) - set(already_created_publications) for p_type in [data.LAYER_TYPE, data.MAP_TYPE]: publications_by_type = { (workspace, publ_type, publication) for workspace, publ_type, publication in publications_to_publish if publ_type == p_type } step_num = 0 while publications_by_type: done_publications = publish_publications_step( publications_by_type, step_num) publications_by_type -= done_publications step_num += 1
def ensure_output_srs_for_all(): qml_layers = layman_util.get_publication_infos(style_type='qml') first_layer_with_qgis_file = next( ((workspace, layer) for (workspace, _, layer) in iter(qml_layers.keys()) if os.path.exists(wms.get_layer_file_path(workspace, layer))), None) if first_layer_with_qgis_file is not None: workspace, layer = first_layer_with_qgis_file old_set = util.get_layer_wms_crs_list_values(workspace, layer) if old_set != set(settings.LAYMAN_OUTPUT_SRS_LIST): logger.info( f' Update output SRS list for QGIS projects. Old set={old_set},' f' new list={settings.LAYMAN_OUTPUT_SRS_LIST}') for (workspace, _, layer) in qml_layers.keys(): try: wms.save_qgs_file(workspace, layer) except BaseException as exc: logger.warning( f" SRS list of layer {workspace}.{layer} not updated" f" because of following exception:") logger.exception(exc)
def authorize(workspace, publication_type, publication_name, request_method, actor_name): is_multi_publication_request = not publication_name publication_not_found_code = { 'layman.layer': 15, 'layman.map': 26, }[publication_type] if is_multi_publication_request: if request_method.lower() in [ common.REQUEST_METHOD_GET, common.REQUEST_METHOD_DELETE ]: if not workspaces.get_workspace_infos(workspace): raise LaymanError(40) # Workspace not found return if request_method.lower() in [common.REQUEST_METHOD_POST]: if actor_name == workspace: return if ((not users.get_user_infos(workspace)) # public workspace and can_user_publish_in_public_workspace(actor_name) ): # actor can publish in public workspace if workspaces.get_workspace_infos( workspace): # workspaces exists return if can_user_create_public_workspace( actor_name): # workspaces can be created by actor # raises exception if new workspace is not correct layman_util.check_workspace_name(workspace) else: raise LaymanError(30) # unauthorized request else: raise LaymanError(30) # unauthorized request else: raise LaymanError(31, {'method': request_method}) # unsupported method else: if not workspaces.get_workspace_infos(workspace): raise LaymanError(40) # Workspace not found publ_info = layman_util.get_publication_infos( workspace, publication_type).get( (workspace, publication_type, publication_name)) if not publ_info: raise LaymanError(publication_not_found_code) user_can_read = is_user_in_access_rule( actor_name, publ_info['access_rights']['read']) if request_method.lower() in [common.REQUEST_METHOD_GET]: if user_can_read: return raise LaymanError(publication_not_found_code) if request_method.lower() in [ common.REQUEST_METHOD_PATCH, common.REQUEST_METHOD_DELETE, common.REQUEST_METHOD_POST, common.REQUEST_METHOD_PUT, ]: if is_user_in_access_rule(actor_name, publ_info['access_rights']['write']): return if user_can_read: raise LaymanError(30) # unauthorized request raise LaymanError(publication_not_found_code) raise LaymanError(31, {'method': request_method}) # unsupported method
def check_redis_consistency(expected_publ_num_by_type=None): # get info from non-redis sources infos = layman_util.get_publication_infos() num_total_publs = len(infos) total_publs = list(infos.keys()) # publication types and names redis = settings.LAYMAN_REDIS user_publ_keys = redis.keys(':'.join(USER_TYPE_NAMES_KEY.split(':')[:2]) + ':*') uuid_keys = redis.keys(':'.join(UUID_METADATA_KEY.split(':')[:2]) + ':*') assert num_total_publs == len(uuid_keys), f"total_publs={total_publs}, uuid_keys={uuid_keys}" total_publs_by_type = defaultdict(list) for publ in total_publs: total_publs_by_type[publ[1]].append((publ[0], publ[2])) if expected_publ_num_by_type is not None: for publ_type, publ_num in expected_publ_num_by_type.items(): found_publ_num = len(total_publs_by_type[publ_type]) assert publ_num == found_publ_num, f"expected {publ_num} of {publ_type}, found {found_publ_num}: {total_publs}" num_publ = 0 for user_publ_key in user_publ_keys: num_publ += redis.hlen(user_publ_key) assert num_publ == len(uuid_keys) # publication uuids uuids = redis.smembers(UUID_SET_KEY) assert len(uuids) == num_publ for uuid_str in uuids: assert get_uuid_metadata_key(uuid_str) in uuid_keys for uuid_key in uuid_keys: uuid_dict = redis.hgetall(uuid_key) assert redis.hexists( get_user_type_names_key( uuid_dict['username'], uuid_dict['publication_type'] ), uuid_dict['publication_name'], ) # publication tasks chain_infos_len = redis.hlen(celery_util.PUBLICATION_CHAIN_INFOS) assert chain_infos_len == len(total_publs), f"task_infos_len={chain_infos_len}, total_publs={total_publs}" task_names_tuples = [ h.split(':') for h in redis.smembers(celery_util.REDIS_CURRENT_TASK_NAMES) ] for username, publ_type_name, pubname in total_publs: chain_info = celery_util.get_publication_chain_info(username, publ_type_name, pubname) is_ready = celery_util.is_chain_ready(chain_info) assert chain_info['finished'] is is_ready assert (next(( t for t in task_names_tuples if t[1] == username and t[2] == pubname and t[0].startswith(publ_type_name) ), None) is None) is is_ready, f"{username}, {publ_type_name}, {pubname}: {is_ready}, {task_names_tuples}" assert (redis.hget(celery_util.LAST_TASK_ID_IN_CHAIN_TO_PUBLICATION, chain_info['last'].task_id) is None) is is_ready # publication locks locks = redis.hgetall(redis_util.PUBLICATION_LOCKS_KEY) assert len(locks) == len(task_names_tuples), f"{locks} != {task_names_tuples}" for k, _ in locks.items(): username, publication_type, publication_name = k.split(':') assert next(( t for t in task_names_tuples if t[1] == username and t[2] == publication_name and t[0].startswith(publication_type) ), None) is not None return total_publs_by_type
def test_get_publication_infos(): ensure_all_publications() users = data.USERS | {settings.ANONYM_USER, settings.NONAME_USER} # prepare expected data expected = dict() for actor in users: expected[actor] = dict() for workspace in data.WORKSPACES: expected[actor][workspace] = dict() for publ_type in process_client.PUBLICATION_TYPES: expected[actor][workspace][publ_type] = dict() for access_type in ['read', 'write']: expected[actor][workspace][publ_type][access_type] = set() for (workspace, publ_type, publication), value in data.PUBLICATIONS.items(): for access_type in ['read', 'write']: users_with_right = value[data.TEST_DATA].get('users_can_' + access_type) users_with_right = users_with_right or users for actor in users_with_right: expected[actor][workspace][publ_type][access_type].add( publication) for actor in users: headers = data.HEADERS.get(actor) # test internal get_publication_infos only with actor and access type for access_type in ['read', 'write']: with app.app_context(): publications = layman_util.get_publication_infos( context={ 'actor_name': actor, 'access_type': access_type }) assert {publ_type for _, publ_type, _ in publications.keys() } == set(process_client.PUBLICATION_TYPES) for publ_type in process_client.PUBLICATION_TYPES: for workspace in data.WORKSPACES: publications_set = { name for ws, p_type, name in publications.keys() if ws == workspace and p_type == publ_type } assert publications_set == expected[actor][workspace][ publ_type][access_type] for publ_type in process_client.PUBLICATION_TYPES: for workspace in data.WORKSPACES: # test internal get_publication_infos with workspace, publication type. actor and access type for access_type in ['read', 'write']: with app.app_context(): publications = layman_util.get_publication_infos( workspace, publ_type, { 'actor_name': actor, 'access_type': access_type }) assert all( p_workspace == workspace and p_type == publ_type for p_workspace, p_type, _ in publications.keys()) publications_set = { name for _, _, name in publications.keys() } assert publications_set == expected[actor][workspace][ publ_type][access_type] # test authenticated GET Workspace Layers/Maps publications = process_client.get_workspace_publications( publ_type, workspace, headers=headers) publication_set = { publication['name'] for publication in publications } assert publication_set == expected[actor][workspace][ publ_type]['read'] # test authenticated GET Layers/Maps publications = process_client.get_publications(publ_type, headers=headers) for workspace in data.WORKSPACES: publication_set = { publication['name'] for publication in publications if publication['workspace'] == workspace } assert publication_set == expected[actor][workspace][ publ_type]['read']