def get_publication_status( workspace, publication_type, publication_name, complete_info, item_keys, ): chain_info = celery_util.get_publication_chain_info( workspace, publication_type, publication_name) current_lock = redis.get_publication_lock( workspace, publication_type, publication_name, ) if (chain_info and not celery_util.is_chain_ready(chain_info)) or current_lock: publication_status = 'UPDATING' elif any( complete_info.get(v, dict()).get('status') for v in item_keys if isinstance(complete_info.get(v, dict()), dict)): publication_status = 'INCOMPLETE' else: publication_status = 'COMPLETE' return publication_status
def test_patch_layer_title(client): with app.app_context(): workspace = 'testuser1' layername = 'ne_110m_admin_0_countries' rest_path = url_for('rest_workspace_layer.patch', workspace=workspace, layername=layername) new_title = "New Title of Countries" new_description = "and new description" response = client.patch(rest_path, data={ 'title': new_title, 'description': new_description, }) assert response.status_code == 200, response.get_json() chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and celery_util.is_chain_ready(chain_info) resp_json = response.get_json() assert resp_json['title'] == new_title assert resp_json['description'] == new_description with app.app_context(): expected_md_values = { 'abstract': "and new description", 'extent': [-180.0, -85.60903859383285, 180.0, 83.64513109859944], 'graphic_url': url_for_external('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'identifier': { 'identifier': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'label': 'ne_110m_admin_0_countries' }, 'language': ['eng'], 'layer_endpoint': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'organisation_name': None, 'publication_date': TODAY_DATE, 'reference_system': EXP_REFERENCE_SYSTEMS, 'revision_date': TODAY_DATE, 'spatial_resolution': { 'scale_denominator': 100000000, }, 'title': "New Title of Countries", } check_metadata(client, workspace, layername, METADATA_PROPERTIES_EQUAL, expected_md_values) uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() })
def test_post_layers_concurrent(client): workspace = 'testuser1' layername = 'countries_concurrent' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': layername, }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': layername, }) assert response.status_code == 409 resp_json = response.get_json() assert resp_json['code'] == 17 finally: for file_path in files: file_path[0].close() publication_counter.increase() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() })
def test_post_layers_shp(client): workspace = 'testuser1' layername = 'ne_110m_admin_0_countries_shp' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.cpg', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.dbf', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.prj', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.README.html', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.shp', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.shx', 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.VERSION.txt', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': layername, }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) flask_client.wait_till_layer_ready(workspace, layername) # last_task['last'].get() wms_url = geoserver_wms.get_wms_url(workspace) wms = wms_proxy(wms_url) assert 'ne_110m_admin_0_countries_shp' in wms.contents publication_counter.increase() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() })
def test_post_layers_long_and_delete_it(client): workspace = 'testuser1' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'tmp/naturalearth/10m/cultural/ne_10m_admin_0_countries.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() layername = 'ne_10m_admin_0_countries' time.sleep(1) chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) layer_info = util.get_layer_info(workspace, layername) keys_to_check = ['db_table', 'wms', 'wfs', 'thumbnail', 'metadata'] for key_to_check in keys_to_check: assert 'status' in layer_info[key_to_check] rest_path = url_for('rest_workspace_layer.delete_layer', workspace=workspace, layername=layername) response = client.delete(rest_path) assert response.status_code == 200 response = client.get(url_for('rest_workspace_layer.get', workspace=workspace, layername=layername)) # print(resp_json) assert response.status_code == 404 uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() })
def test_patch_layer_concurrent_and_delete_it(client): with app.app_context(): workspace = 'testuser2' layername = 'countries' rest_path = url_for('rest_workspace_layer.patch', workspace=workspace, layername=layername) file_paths = [ 'tmp/naturalearth/10m/cultural/ne_10m_admin_0_countries.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) uuid_str = layer_uuid.get_layer_uuid(workspace, layername) assert uuid.is_valid_uuid(uuid_str) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.patch(rest_path, data={ 'file': files, 'title': 'populated places' }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) with app.app_context(): try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.patch(rest_path, data={ 'file': files, }) assert response.status_code == 400, response.get_json() resp_json = response.get_json() assert resp_json['code'] == 49 finally: for file_path in files: file_path[0].close() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) with app.app_context(): rest_path = url_for('rest_workspace_layer.delete_layer', workspace=workspace, layername=layername) response = client.delete(rest_path) assert response.status_code == 200 from layman.layer import get_layer_type_def from layman.common.filesystem import uuid as common_uuid uuid_filename = common_uuid.get_publication_uuid_file( get_layer_type_def()['type'], workspace, layername) assert not os.path.isfile(uuid_filename) assert not settings.LAYMAN_REDIS.sismember(uuid.UUID_SET_KEY, uuid_str) assert not settings.LAYMAN_REDIS.exists(uuid.get_uuid_metadata_key(uuid_str)) assert not settings.LAYMAN_REDIS.hexists( uuid.get_workspace_type_names_key(workspace, '.'.join(__name__.split('.')[:-1])), layername ) publication_counter.decrease() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() })
def test_patch_layer_data(client): with app.app_context(): workspace = 'testuser2' layername = 'countries' rest_path = url_for('rest_workspace_layer.patch', workspace=workspace, layername=layername) file_paths = [ 'tmp/naturalearth/110m/cultural/ne_110m_populated_places.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.patch(rest_path, data={ 'file': files, 'title': 'populated places' }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) resp_json = response.get_json() keys_to_check = ['db_table', 'wms', 'wfs', 'thumbnail', 'metadata'] for key_to_check in keys_to_check: assert 'status' in resp_json[key_to_check] flask_client.wait_till_layer_ready(workspace, layername) # last_task['last'].get() with app.app_context(): rest_path = url_for('rest_workspace_layer.get', workspace=workspace, layername=layername) response = client.get(rest_path) assert 200 <= response.status_code < 300 resp_json = response.get_json() assert resp_json['title'] == "populated places" feature_type = get_feature_type(workspace, 'postgresql', layername) attributes = feature_type['attributes']['attribute'] assert next(( a for a in attributes if a['name'] == 'sovereignt' ), None) is None assert next(( a for a in attributes if a['name'] == 'adm0cap' ), None) is not None uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) with app.app_context(): expected_md_values = { 'abstract': "popis st\u00e1t\u016f", 'extent': [-175.22056435043098, -41.29999116752133, 179.21664802661394, 64.15002486626597], 'graphic_url': url_for_external('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'identifier': { 'identifier': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), "label": "countries" }, 'language': ["eng", 'chi', 'rus'], 'layer_endpoint': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'organisation_name': None, 'publication_date': TODAY_DATE, 'reference_system': EXP_REFERENCE_SYSTEMS, 'revision_date': TODAY_DATE, 'spatial_resolution': None, # it's point data now and we can't guess scale from point data 'title': 'populated places', } check_metadata(client, workspace, layername, METADATA_PROPERTIES_EQUAL, expected_md_values)
def test_uppercase_attr(client): with app.app_context(): workspace = 'testuser2' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'sample/data/upper_attr.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] sld_path = 'sample/data/upper_attr.sld' assert os.path.isfile(sld_path) layername = 'upper_attr' try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': layername, 'style': (open(sld_path, 'rb'), os.path.basename(sld_path)), }) assert response.status_code == 200 resp_json = response.get_json() # print(resp_json) finally: for file_path in files: file_path[0].close() chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) flask_client.wait_till_layer_ready(workspace, layername) # last_task['last'].get() assert celery_util.is_chain_ready(chain_info) with app.app_context(): rest_path = url_for('rest_workspace_layer.get', workspace=workspace, layername=layername) response = client.get(rest_path) assert 200 <= response.status_code < 300 resp_json = response.get_json() # print(resp_json) for source in [ 'wms', 'wfs', 'thumbnail', 'file', 'db_table', 'metadata', ]: assert 'status' not in resp_json[source], f"{source}: {resp_json[source]}" style_url = geoserver_sld.get_workspace_style_url(workspace, layername) response = requests.get(style_url + '.sld', auth=settings.LAYMAN_GS_AUTH ) response.raise_for_status() sld_file = io.BytesIO(response.content) tree = ET.parse(sld_file) root = tree.getroot() assert root.attrib['version'] == '1.1.0' feature_type = get_feature_type(workspace, 'postgresql', layername) attributes = feature_type['attributes']['attribute'] attr_names = ["id", "dpr_smer_k", "fid_zbg", "silnice", "silnice_bs", "typsil_p", "cislouseku", "jmeno", "typsil_k", "peazkom1", "peazkom2", "peazkom3", "peazkom4", "vym_tahy_k", "vym_tahy_p", "r_indsil7", "kruh_obj_k", "etah1", "etah2", "etah3", "etah4", "kruh_obj_p", "dpr_smer_p"] for attr_name in attr_names: assert next(( a for a in attributes if a['name'] == attr_name ), None) is not None th_path = get_layer_thumbnail_path(workspace, layername) assert os.path.getsize(th_path) > 5000 with app.app_context(): rest_path = url_for('rest_workspace_layer.delete_layer', workspace=workspace, layername=layername) response = client.delete(rest_path) assert 200 <= response.status_code < 300 uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() })
def test_post_layers_complex(client): with app.app_context(): workspace = 'testuser2' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] sld_path = 'sample/style/generic-blue_sld.xml' assert os.path.isfile(sld_path) layername = '' try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, 'name': 'countries', 'title': 'staty', 'description': 'popis států', 'style': (open(sld_path, 'rb'), os.path.basename(sld_path)), }) assert response.status_code == 200 resp_json = response.get_json() # print(resp_json) layername = resp_json[0]['name'] finally: for file_path in files: file_path[0].close() chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) flask_client.wait_till_layer_ready(workspace, layername) # last_task['last'].get() assert celery_util.is_chain_ready(chain_info) wms_url = geoserver_wms.get_wms_url(workspace) wms = wms_proxy(wms_url) assert 'countries' in wms.contents assert wms['countries'].title == 'staty' assert wms['countries'].abstract == 'popis států' assert wms['countries'].styles[workspace + '_wms:countries']['title'] == 'Generic Blue' assert layername != '' rest_path = url_for('rest_workspace_layer.get', workspace=workspace, layername=layername) response = client.get(rest_path) assert 200 <= response.status_code < 300 resp_json = response.get_json() # print(resp_json) assert resp_json['title'] == 'staty' assert resp_json['description'] == 'popis států' for source in [ 'wms', 'wfs', 'thumbnail', 'file', 'db_table', 'metadata', ]: assert 'status' not in resp_json[source] style_url = geoserver_sld.get_workspace_style_url(workspace, layername) response = requests.get(style_url + '.sld', auth=settings.LAYMAN_GS_AUTH ) response.raise_for_status() sld_file = io.BytesIO(response.content) tree = ET.parse(sld_file) root = tree.getroot() assert root.attrib['version'] == '1.0.0' feature_type = get_feature_type(workspace, 'postgresql', layername) attributes = feature_type['attributes']['attribute'] assert next(( a for a in attributes if a['name'] == 'sovereignt' ), None) is not None publication_counter.increase() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) with app.app_context(): expected_md_values = { 'abstract': "popis st\u00e1t\u016f", 'extent': [-180.0, -85.60903859383285, 180.0, 83.64513109859944], 'graphic_url': url_for_external('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'identifier': { "identifier": url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), "label": "countries" }, 'language': ["eng"], 'layer_endpoint': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'organisation_name': None, 'publication_date': TODAY_DATE, 'reference_system': EXP_REFERENCE_SYSTEMS, 'revision_date': None, 'spatial_resolution': { 'scale_denominator': 100000000, }, 'title': "staty", } check_metadata(client, workspace, layername, METADATA_PROPERTIES_EQUAL, expected_md_values)
def test_post_layers_simple(client): with app.app_context(): workspace = 'testuser1' rest_path = url_for('rest_workspace_layers.post', workspace=workspace) file_paths = [ 'tmp/naturalearth/110m/cultural/ne_110m_admin_0_countries.geojson', ] for file_path in file_paths: assert os.path.isfile(file_path) files = [] try: files = [(open(fp, 'rb'), os.path.basename(fp)) for fp in file_paths] response = client.post(rest_path, data={ 'file': files, }) assert response.status_code == 200 finally: for file_path in files: file_path[0].close() layername = 'ne_110m_admin_0_countries' chain_info = util.get_layer_chain(workspace, layername) assert chain_info is not None and not celery_util.is_chain_ready(chain_info) layer_info = util.get_layer_info(workspace, layername) keys_to_check = ['db_table', 'wms', 'wfs', 'thumbnail', 'metadata'] for key_to_check in keys_to_check: assert 'status' in layer_info[key_to_check] # For some reason this hangs forever on get() if run (either with src/layman/authz/read_everyone_write_owner_auth2_test.py::test_authn_map_access_rights or src/layman/authn/oauth2_test.py::test_patch_current_user_without_username) and with src/layman/common/metadata/util.csw_insert # last_task['last'].get() # e.g. python3 -m pytest -W ignore::DeprecationWarning -xsvv src/layman/authn/oauth2_test.py::test_patch_current_user_without_username src/layman/layer/rest_workspace_test.py::test_post_layers_simple # this can badly affect also .get(propagate=False) in layman.celery.abort_task_chain # but hopefully this is only related to magic flask&celery test suite flask_client.wait_till_layer_ready(workspace, layername) layer_info = util.get_layer_info(workspace, layername) for key_to_check in keys_to_check: assert isinstance(layer_info[key_to_check], str) \ or 'status' not in layer_info[key_to_check] wms_url = geoserver_wms.get_wms_url(workspace) wms = wms_proxy(wms_url) assert layername in wms.contents from layman.layer import get_layer_type_def from layman.common.filesystem import uuid as common_uuid uuid_filename = common_uuid.get_publication_uuid_file( get_layer_type_def()['type'], workspace, layername) assert os.path.isfile(uuid_filename) uuid_str = None with open(uuid_filename, "r") as file: uuid_str = file.read().strip() assert uuid.is_valid_uuid(uuid_str) assert settings.LAYMAN_REDIS.sismember(uuid.UUID_SET_KEY, uuid_str) assert settings.LAYMAN_REDIS.exists(uuid.get_uuid_metadata_key(uuid_str)) assert settings.LAYMAN_REDIS.hexists( uuid.get_workspace_type_names_key(workspace, '.'.join(__name__.split('.')[:-1])), layername ) layer_info = client.get(url_for('rest_workspace_layer.get', workspace=workspace, layername=layername)).get_json() assert set(layer_info['metadata'].keys()) == {'identifier', 'csw_url', 'record_url', 'comparison_url'} assert layer_info['metadata']['identifier'] == f"m-{uuid_str}" assert layer_info['metadata']['csw_url'] == settings.CSW_PROXY_URL md_record_url = f"http://micka:80/record/basic/m-{uuid_str}" assert layer_info['metadata']['record_url'].replace("http://localhost:3080", "http://micka:80") == md_record_url assert layer_info['metadata']['comparison_url'] == url_for_external('rest_workspace_layer_metadata_comparison.get', workspace=workspace, layername=layername) assert 'id' not in layer_info.keys() assert 'type' not in layer_info.keys() response = requests.get(md_record_url, auth=settings.CSW_BASIC_AUTHN) response.raise_for_status() assert layername in response.text publication_counter.increase() uuid.check_redis_consistency(expected_publ_num_by_type={ f'{LAYER_TYPE}': publication_counter.get() }) with app.app_context(): expected_md_values = { 'abstract': None, 'extent': [-180.0, -85.60903859383285, 180.0, 83.64513109859944], 'graphic_url': url_for_external('rest_workspace_layer_thumbnail.get', workspace=workspace, layername=layername), 'identifier': { 'identifier': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'label': 'ne_110m_admin_0_countries' }, 'language': ['eng'], 'layer_endpoint': url_for_external('rest_workspace_layer.get', workspace=workspace, layername=layername), 'organisation_name': None, 'publication_date': TODAY_DATE, 'reference_system': EXP_REFERENCE_SYSTEMS, 'revision_date': None, 'spatial_resolution': { 'scale_denominator': 100000000, }, 'title': 'ne_110m_admin_0_countries', } check_metadata(client, workspace, layername, METADATA_PROPERTIES_EQUAL, expected_md_values)
def wait_till_ready(username, mapname): chain_info = util.get_map_chain(username, mapname) while chain_info is not None and not celery_util.is_chain_ready( chain_info): time.sleep(0.1) chain_info = util.get_map_chain(username, mapname)
def check_redis_consistency(expected_publ_num_by_type=None): # get info from non-redis sources infos = layman_util.get_publication_infos() num_total_publs = len(infos) total_publs = list(infos.keys()) # publication types and names redis = settings.LAYMAN_REDIS user_publ_keys = redis.keys(':'.join(USER_TYPE_NAMES_KEY.split(':')[:2]) + ':*') uuid_keys = redis.keys(':'.join(UUID_METADATA_KEY.split(':')[:2]) + ':*') assert num_total_publs == len(uuid_keys), f"total_publs={total_publs}, uuid_keys={uuid_keys}" total_publs_by_type = defaultdict(list) for publ in total_publs: total_publs_by_type[publ[1]].append((publ[0], publ[2])) if expected_publ_num_by_type is not None: for publ_type, publ_num in expected_publ_num_by_type.items(): found_publ_num = len(total_publs_by_type[publ_type]) assert publ_num == found_publ_num, f"expected {publ_num} of {publ_type}, found {found_publ_num}: {total_publs}" num_publ = 0 for user_publ_key in user_publ_keys: num_publ += redis.hlen(user_publ_key) assert num_publ == len(uuid_keys) # publication uuids uuids = redis.smembers(UUID_SET_KEY) assert len(uuids) == num_publ for uuid_str in uuids: assert get_uuid_metadata_key(uuid_str) in uuid_keys for uuid_key in uuid_keys: uuid_dict = redis.hgetall(uuid_key) assert redis.hexists( get_user_type_names_key( uuid_dict['username'], uuid_dict['publication_type'] ), uuid_dict['publication_name'], ) # publication tasks chain_infos_len = redis.hlen(celery_util.PUBLICATION_CHAIN_INFOS) assert chain_infos_len == len(total_publs), f"task_infos_len={chain_infos_len}, total_publs={total_publs}" task_names_tuples = [ h.split(':') for h in redis.smembers(celery_util.REDIS_CURRENT_TASK_NAMES) ] for username, publ_type_name, pubname in total_publs: chain_info = celery_util.get_publication_chain_info(username, publ_type_name, pubname) is_ready = celery_util.is_chain_ready(chain_info) assert chain_info['finished'] is is_ready assert (next(( t for t in task_names_tuples if t[1] == username and t[2] == pubname and t[0].startswith(publ_type_name) ), None) is None) is is_ready, f"{username}, {publ_type_name}, {pubname}: {is_ready}, {task_names_tuples}" assert (redis.hget(celery_util.LAST_TASK_ID_IN_CHAIN_TO_PUBLICATION, chain_info['last'].task_id) is None) is is_ready # publication locks locks = redis.hgetall(redis_util.PUBLICATION_LOCKS_KEY) assert len(locks) == len(task_names_tuples), f"{locks} != {task_names_tuples}" for k, _ in locks.items(): username, publication_type, publication_name = k.split(':') assert next(( t for t in task_names_tuples if t[1] == username and t[2] == publication_name and t[0].startswith(publication_type) ), None) is not None return total_publs_by_type
def is_map_chain_ready(workspace, mapname): chain_info = get_map_chain(workspace, mapname) return chain_info is None or celery_util.is_chain_ready(chain_info)
def wait_till_layer_ready(workspace, layername): last_task = util_layer.get_layer_chain(workspace, layername) while last_task is not None and not celery_util.is_chain_ready(last_task): time.sleep(0.1) last_task = util_layer.get_layer_chain(workspace, layername)
def is_layer_chain_ready(username, layername): chain_info = get_layer_chain(username, layername) return chain_info is None or celery_util.is_chain_ready(chain_info)
def is_map_chain_ready(username, mapname): chain_info = get_map_chain(username, mapname) return chain_info is None or celery_util.is_chain_ready(chain_info)