Esempio n. 1
0
def test_merge_clusters():

    # Simple ice cap
    df = get_iceland_df(reduced=True)

    # Save the area for testing later
    area_ref = df.Area.sum()

    # Add dummy entries for testing
    from shapely.affinity import translate
    idf = df.iloc[0].copy()
    idf['geometry'] = translate(idf.geometry, xoff=0.15, yoff=0.0)
    idf['RGIId'] = 'd1'
    df = df.append(idf, ignore_index=True)

    idf = df.iloc[1].copy()
    idf['geometry'] = translate(idf.geometry, xoff=0.15, yoff=0.01)
    idf['RGIId'] = 'd2'
    df = df.append(idf, ignore_index=True)

    # Intersects and go
    idf = funcs.compute_intersects(df)
    out = funcs.merge_clusters(df, idf)

    assert len(out) == 3
    assert_allclose(out.iloc[0].Area, area_ref)

    s1 = df.iloc[-2]
    s2 = out.loc[out.RGIId == 'd1'].iloc[0]
    assert_equal(s1.CenLat, s2.CenLat)
    assert_equal(s1.CenLon, s2.CenLon)
    assert s1.geometry.equals(s2.geometry)
Esempio n. 2
0
def test_merge_clusters_all():

    # All
    df = get_iceland_df()

    # Intersects and go
    idf = funcs.compute_intersects(df)
    out = funcs.merge_clusters(df, idf)

    assert np.all(g.is_valid for g in out.geometry)
    assert np.all(g.type == 'Polygon' for g in out.geometry)
Esempio n. 3
0
def test_find_clusters():

    # Simple ice cap
    df = get_iceland_df(reduced=True)
    idf = funcs.compute_intersects(df)

    # Add dummy entries for testing
    idf = idf.append({'RGIId_1': 'd1', 'RGIId_2': 'd2'}, ignore_index=True)
    idf = idf.append({'RGIId_1': 'd1', 'RGIId_2': 'd3'}, ignore_index=True)
    out = funcs.find_clusters(idf)
    assert len(out) == 2
    assert len(out['d1']) == 3
Esempio n. 4
0
def test_intersects(tmpdir):

    # Simple ice cap
    df = get_iceland_df(reduced=True)
    test_of = os.path.join(str(tmpdir), 'interfile.shp')
    out = funcs.compute_intersects(df, to_file=test_of, job_id='test')

    assert len(out) >= len(df)
    assert os.path.exists(test_of)

    # All elements should have an intersect with something
    all_ids = np.append(out.RGIId_1.values, out.RGIId_2.values)
    all_ids = np.sort(np.unique(all_ids))
    assert_equal(np.sort(np.unique(df.RGIId.values)), all_ids)
Esempio n. 5
0
    'RGI60-05.02310_2', 'RGI60-05.02328_2', 'RGI60-05.01987_2',
    'RGI60-05.02135'
]

# select only glaciers with geodetic MB
entity = entity[entity['RGIId'].isin(geod_list)]

# exclude
entity = entity[~entity.RGIId.isin(rm_list)]
entity = entity[~entity.RGIId.isin(rm_list2)]

# test with one glacier
#entity = entity[entity.RGIId == 'RGI60-05.01920']

# compute intersects
new_intersects = compute_intersects(entity)
# store intersects to working dir
entity_intersects_path = os.path.join(cfg.PATHS['working_dir'],
                                      'entity_intersects.shp')
new_intersects.to_file(entity_intersects_path)

# set intersect file to use
cfg.set_intersects_db(new_intersects)

# year where outlines are valid
outline_year = os.path.basename(fp)[:4]

# check geometries
#for idx, row in entity.iterrows():
#    if entity.geometry.iloc[idx].type != 'Polygon':
#        print(row['RGIId'] + row.geometry.type)