def test_user_workspace(workspace): ensure_all_publications() is_private_workspace = workspace in data.USERS all_sources = [] for type_def in layman_util.get_publication_types( use_cache=False).values(): all_sources += type_def['internal_sources'] providers = layman_util.get_providers_from_source_names(all_sources) for provider in providers: with app.app_context(): usernames = provider.get_usernames() if not is_private_workspace: assert workspace not in usernames, (workspace, provider) with app.app_context(): usernames = layman_util.get_usernames(use_cache=False) workspaces = layman_util.get_workspaces(use_cache=False) if is_private_workspace: assert workspace in usernames else: assert workspace not in usernames assert workspace in workspaces
def get_url_name_to_publication_type(): if _URL_NAME_TO_PUBLICATION_TYPE.get() is None: _URL_NAME_TO_PUBLICATION_TYPE.set({ publ_type['rest_path_name']: publ_type for publ_type in layman_util.get_publication_types().values() }) return _URL_NAME_TO_PUBLICATION_TYPE.get()
def _get_pub_type_pattern(): publ_type_names = [ publ_type['rest_path_name'] for publ_type in layman_util.get_publication_types().values() ] publ_type_pattern = r"(?P<publication_type>" + "|".join( publ_type_names) + r")" return publ_type_pattern
def setup_codelists_data(): sql = f"""insert into {DB_SCHEMA}.right_types (name) values ('{RIGHT_WRITE}'); insert into {DB_SCHEMA}.right_types (name) values ('{RIGHT_READ}');""" for type_def in get_publication_types(use_cache=False).values(): publ_type_name = type_def['type'] sql = sql + f""" insert into {DB_SCHEMA}.publication_types (name) values ('{publ_type_name}');""" return sql
def source_has_its_key_or_it_is_empty(workspace, publ_type, name): with app.app_context(): all_items = layman_util.get_publication_types( )[publ_type]['internal_sources'].values() for source_def in all_items: for key in source_def.info_items: context = {'keys': [key]} info = layman_util.get_publication_info( workspace, publ_type, name, context) assert key in info or not info, info
def on_task_postrun(**kwargs): task_name = kwargs['task'].name from layman import app from layman.util import get_publication_types from layman.celery import task_postrun with app.app_context(): publication_type = next((v['type'] for k, v in get_publication_types().items() if task_name.startswith(k)), None) if publication_type is None: return username = kwargs['args'][0] publication_name = kwargs['args'][1] task_id = kwargs['task_id'] task_postrun(username, publication_type, publication_name, task_id, task_name, kwargs['state'])
def source_internal_keys_are_subset_of_source_sibling_keys( workspace, publ_type, name): with app.app_context(): all_items = layman_util.get_publication_types( )[publ_type]['internal_sources'].values() for source_def in all_items: for key in source_def.info_items: context = {'keys': [key]} info = layman_util.get_publication_info( workspace, publ_type, name, context) all_sibling_keys = set(sibling_key for item_list in all_items for sibling_key in item_list.info_items if key in item_list.info_items) internal_keys = [ key[1:] for key in info if key.startswith('_') ] assert set(internal_keys) <= all_sibling_keys, \ f'internal_keys={set(internal_keys)}, all_sibling_keys={all_sibling_keys}, key={key}, info={info}'
def get_syncable_prop_names(publ_type): publ_types = get_publication_types() prop_names = publ_types[publ_type][PUBL_TYPE_DEF_KEY][ 'syncable_properties'] return prop_names
def get_publications_dir(publ_type, workspace): publ_types = get_publication_types() dirname = publ_types[publ_type][PUBL_TYPE_DEF_KEY]['publications_dir'] layersdir = os.path.join(get_workspace_dir(workspace), dirname) return layersdir