Пример #1
0
def test_choropleth_multiple_dataset(client):
    social_vulnerability_census_block_group = '5a284f57c7d30d13bc08254c'
    dislocation_census_block_group = '5a284f58c7d30d13bc082566'
    dataset1 = Dataset.from_data_service(
        social_vulnerability_census_block_group, DataService(client))
    dataset2 = Dataset.from_data_service(dislocation_census_block_group,
                                         DataService(client))
    viz.plot_choropleth_multiple_dataset([dataset1, dataset2],
                                         ['tot_hh', 'p_16pyr'])

    assert True
Пример #2
0
def test_create_network_dataset(datasvc):
    # assert that we can't create a network dataset with an invalid path
    dataset = Dataset(datasvc.get_dataset_metadata("5cf696b05648c477129bfc21"))
    pytest.raises(TypeError, NetworkDataset, dataset)

    # assert we can successfully create a network dataset
    dataset = Dataset.from_data_service("5cf696b05648c477129bfc21", datasvc)
    network_dataset = NetworkDataset(dataset)
    assert network_dataset.graph is not None
    assert network_dataset.link is not None
    assert network_dataset.node is not None
Пример #3
0
def test_multiple_vector_visualization(client):
    centerville_model_tornado = '60c917b498a93232884f367d'
    centerville_epn_link = '5b1fdc2db1cf3e336d7cecc9'
    tornado_metadata = HazardService(client).get_tornado_hazard_metadata(
        centerville_model_tornado)
    dataset1 = Dataset.from_data_service(centerville_epn_link,
                                         DataService(client))
    dataset2 = Dataset.from_data_service(tornado_metadata["datasetId"],
                                         DataService(client))
    viz.plot_multiple_vector_dataset([dataset1, dataset2])

    assert True
Пример #4
0
def test_create_dataset_from_json_str(datasvc):
    with open(
            os.path.join(pyglobals.PYINCORE_ROOT_FOLDER,
                         "tests/data/dataset_metadata.json"), 'r') as f:
        metadata_json_str = f.read()

    with pytest.raises(Exception):
        Dataset.from_json_str(metadata_json_str)

    assert Dataset.from_json_str(
        metadata_json_str, data_service=datasvc).local_file_path is not None
    assert Dataset.from_json_str(
        metadata_json_str,
        file_path="some_local_file_path").local_file_path is not None
Пример #5
0
def test_visualize_raster_file(client):
    galvaston_wave_height_id = '5f11e503feef2d758c4df6db'
    dataset = Dataset.from_data_service(galvaston_wave_height_id,
                                        DataService(client))
    map = viz.plot_raster_from_path(dataset.get_file_path('tif'))

    assert True
Пример #6
0
def test_heatmap(client):
    shelby_hospital_inv_id = "5a284f0bc7d30d13bc081a28"
    dataset = Dataset.from_data_service(shelby_hospital_inv_id,
                                        DataService(client))
    map = viz.plot_heatmap(dataset, "str_prob")

    assert True
Пример #7
0
def test_seaside_roadways(client):
    trns_brdg_dataset_id = "5ee7af50772cf80008577ae3"
    trns_brdg_dataset = Dataset.from_data_service(trns_brdg_dataset_id,
                                                  DataService(client))
    viz.plot_map(trns_brdg_dataset, column=None, category=False, basemap=True)

    assert True
Пример #8
0
def test_seaside_bridges(client):
    trns_brdg_dataset_id = "5d251172b9219c0692cd7523"
    trns_brdg_dataset = Dataset.from_data_service(trns_brdg_dataset_id,
                                                  DataService(client))
    viz.plot_map(trns_brdg_dataset, column=None, category=False, basemap=True)

    assert True
Пример #9
0
def test_plot_map_table_dataset(client):
    building_damage_id = '5a296b53c7d30d4af5378cd5'
    dataset = Dataset.from_data_service(building_damage_id,
                                        DataService(client))
    map = viz.plot_table_dataset(dataset, client, 'meandamage')

    assert True
Пример #10
0
def test_choropleth_sinlge_dataset(client):
    social_vulnerability_census_block_group = '5a284f57c7d30d13bc08254c'
    dataset = Dataset.from_data_service(
        social_vulnerability_census_block_group, DataService(client))
    viz.plot_choropleth_multiple_fields_from_single_dataset(
        dataset, ['tot_hh', 'totpop'])

    assert True
Пример #11
0
    def get_mapped_result_from_dataset_id(client,
                                          inventory_id: str,
                                          dmg_result_id: str,
                                          bldg_func_id,
                                          archetype_mapping_id: str,
                                          groupby_col_name: str = "max_state"):
        """Use this if your damage results are already stored in the data service and you have their dataset ids.
        All the inputs (except groupby_col_name) are dataset ids.

        Args:
            client: Service client with authentication info
            inventory_id: Inventory dataset id
            dmg_result_id: Damage result dataset id
            bldg_func_id: Incore dataset for building func id
            archetype_mapping_id: Mapping id dataset for archetype
            groupby_col_name: column name to group by, default to max_state

        Returns:
            dmg_ret_json: JSON of the damage state results ordered by cluster and category.
            func_ret_json: JSON of the building functionality results ordered by cluster and category.
            max_state_df: Dataframe of max damage state

        """
        bldg_inv = Dataset.from_data_service(inventory_id, DataService(client))
        inventory = pd.DataFrame(gpd.read_file(bldg_inv.local_file_path))

        dmg_result_dataset = Dataset.from_data_service(dmg_result_id,
                                                       DataService(client))
        dmg_result = dmg_result_dataset.get_dataframe_from_csv()

        bldg_func_dataset = Dataset.from_data_service(bldg_func_id,
                                                      DataService(client))
        bldg_func_df = bldg_func_dataset.get_dataframe_from_csv()
        bldg_func_df.rename(columns={'building_guid': 'guid'}, inplace=True)

        archtype_mapping_dataset = Dataset.from_data_service(
            archetype_mapping_id, DataService(client))
        arch_mapping = archtype_mapping_dataset.get_dataframe_from_csv()

        max_state_df = DataProcessUtil.get_max_damage_state(dmg_result)
        dmg_ret_json = DataProcessUtil.create_mapped_dmg_result(
            inventory, max_state_df, arch_mapping, groupby_col_name)
        func_ret_json = DataProcessUtil.create_mapped_func_result(
            inventory, bldg_func_df, arch_mapping)

        return dmg_ret_json, func_ret_json, max_state_df
Пример #12
0
def test_visualize_network(client):
    centerville_epn_network_id = "5d25fb355648c40482a80e1c"

    dataset = Dataset.from_data_service(centerville_epn_network_id,
                                        DataService(client))
    network_dataset = NetworkDataset(dataset)
    viz.plot_network_dataset(network_dataset)

    assert True
Пример #13
0
    def get_mapped_result_from_analysis(client,
                                        inventory_id: str,
                                        dmg_result_dataset,
                                        bldg_func_dataset,
                                        archetype_mapping_id: str,
                                        groupby_col_name: str = "max_state"):
        """Use this if you want to load results directly from the output files of the analysis, than storing the results
        to data service and loading from there using ids.
        It takes the static inputs: inventory & archetypes as dataset ids. The result inputs are taken as
        Dataset class objects, which are created by reading the output result files.

        Args:
            client: Service client with authentication info
            inventory_id: Inventory dataset id
            dmg_result_dataset: Incore dataset for damage result
            bldg_func_dataset: Incore dataset for building func dataset
            archetype_mapping_id: Mapping id dataset for archetype

        Returns:
            dmg_ret_json: JSON of the damage state results ordered by cluster and category.
            func_ret_json: JSON of the building functionality results ordered by cluster and category.
            mapped_df: Dataframe of max damage state

        """
        bldg_inv = Dataset.from_data_service(inventory_id, DataService(client))
        inventory = pd.DataFrame(gpd.read_file(bldg_inv.local_file_path))

        dmg_result = dmg_result_dataset.get_dataframe_from_csv()

        bldg_func_df = bldg_func_dataset.get_dataframe_from_csv()
        bldg_func_df.rename(columns={'building_guid': 'guid'}, inplace=True)

        arch_mapping = Dataset.from_data_service(
            archetype_mapping_id,
            DataService(client)).get_dataframe_from_csv()

        max_state_df = DataProcessUtil.get_max_damage_state(dmg_result)
        dmg_ret_json = DataProcessUtil.create_mapped_dmg_result(
            inventory, max_state_df, arch_mapping, groupby_col_name)
        func_ret_json = DataProcessUtil.create_mapped_func_result(
            inventory, bldg_func_df, arch_mapping)

        return dmg_ret_json, func_ret_json, max_state_df
Пример #14
0
def test_visualize_earthquake(client):
    eq_hazard_id = "5b902cb273c3371e1236b36b"

    eq_metadata = HazardService(client).get_earthquake_hazard_metadata(
        eq_hazard_id)
    eq_dataset_id = eq_metadata['rasterDataset']['datasetId']

    eq_dataset = Dataset.from_data_service(eq_dataset_id, DataService(client))
    viz.plot_earthquake(eq_hazard_id, client)

    assert True
Пример #15
0
def test_visualize_inventory(client):
    shelby_hospital_inv_id = "5a284f0bc7d30d13bc081a28"
    shelby_road_id = "5a284f2bc7d30d13bc081eb6"

    # get shelvy building inventory and road
    sh_bldg_inv = Dataset.from_data_service(shelby_hospital_inv_id,
                                            DataService(client))
    sh_road = Dataset.from_data_service(shelby_road_id, DataService(client))

    # visualize building inventory
    viz.plot_map(sh_bldg_inv,
                 column="struct_typ",
                 category=False,
                 basemap=True)

    # visualize building inventory from geoserver
    viz.get_wms_map([sh_bldg_inv, sh_road])
    viz.get_gdf_map([sh_bldg_inv, sh_road])
    viz.get_gdf_wms_map([sh_bldg_inv], [sh_road])

    assert True
Пример #16
0
def test_visualize_joplin_tornado_building(client):
    # testing datasets
    tornado_hazard_id = "5dfa32bbc0601200080893fb"
    joplin_bldg_inv_id = "5df7d0de425e0b00092d0082"

    viz.plot_tornado(tornado_hazard_id, client, basemap=False)

    tornado_dataset_id = HazardService(client).get_tornado_hazard_metadata(
        tornado_hazard_id)['datasetId']
    tornado_dataset = Dataset.from_data_service(tornado_dataset_id,
                                                DataService(client))

    viz.get_gdf_map([tornado_dataset])

    # get joplin building inventory
    joplin_bldg_inv = Dataset.from_data_service(joplin_bldg_inv_id,
                                                DataService(client))

    # using wms layer for joplin building inv. gdf will crash the browser
    viz.get_gdf_wms_map([tornado_dataset], [joplin_bldg_inv])

    assert True
Пример #17
0
def test_plot_table_dataset_list_from_single_source(client):
    seaside_building_polygon_id = '5f7c95d681c8dd4d309d5a46'
    dataset_id_list = ['5f7c9b4f81c8dd4d309d5b62', '5f7c9af781c8dd4d309d5b5e']
    dataset_list = []

    for dataset_id in dataset_id_list:
        dataset_list.append(
            Dataset.from_data_service(dataset_id, DataService(client)))

    map = viz.plot_table_dataset_list_from_single_source(
        client, dataset_list, 'failure_probability',
        seaside_building_polygon_id)

    assert True
Пример #18
0
def test_get_mapped_result_from_analysis(client):
    bldg_dataset_id = "5f9091df3e86721ed82f701d"

    bldg_dmg_dataset_id = "5f9868c00ace240b22a7f2a5"  # legacy DS_name
    # bldg_dmg_dataset_id = "602d96e4b1db9c28aeeebdce" # new DS_name
    dmg_result_dataset = Dataset.from_data_service(bldg_dmg_dataset_id,
                                                   DataService(client))

    archetype_id = "5fca915fb34b193f7a44059b"

    bldg_func_dataset_id = "60674c89c57ada48e492b97b"
    bldg_func_dataset = Dataset.from_data_service(bldg_func_dataset_id,
                                                  DataService(client))

    dmg_ret_json, func_ret_json, max_state_df = util.get_mapped_result_from_analysis(
        client, bldg_dataset_id, dmg_result_dataset, bldg_func_dataset,
        archetype_id)

    assert "by_cluster" in dmg_ret_json.keys(
    ) and "by_category" in dmg_ret_json.keys()
    assert "by_cluster" in func_ret_json.keys(
    ) and "by_category" in func_ret_json.keys()
    assert "max_state" in max_state_df._info_axis.values
Пример #19
0
def test_create_network_data(datasvc):
    dataset = Dataset.from_data_service("5cf696b05648c477129bfc21", datasvc)
    network_dataset = NetworkDataset(dataset)

    assert network_dataset.link is not None
    assert network_dataset.node is not None
    assert network_dataset.graph is not None

    # test that we can't create a network data object with an invalid file path
    with pytest.raises(FileNotFoundError):
        NetworkData(network_type="", file_path="")
    with pytest.raises(FileNotFoundError):
        NetworkData(network_type="test-type", file_path="test-file")
    with pytest.raises(FileNotFoundError):
        NetworkData(network_type="test-type", file_path="test-file")
Пример #20
0
    def plot_table_dataset_list_from_single_source(client,
                                                   dataset_list=list,
                                                   column=str,
                                                   in_source_dataset_id=None):
        """Creates map window with a list of table dataset and source dataset

            Args:
                client (Client): pyincore service Client Object
                dataset_list (list): list of table dataset
                column (str): column name to be plot
                in_source_dataset_id (str): source dataset id, the dafault is None

            Returns:
                GeoUtil.map (ipyleaflet.Map): ipyleaflet Map object

            """
        source_dataset_id = None
        if in_source_dataset_id is None:
            joined_df, dataset_id_list, source_dataset_id = \
                GeoUtil.merge_table_dataset_with_field(dataset_list, column)
        else:
            joined_df, dataset_id_list, source_dataset_id = \
                GeoUtil.merge_table_dataset_with_field(dataset_list, column, in_source_dataset_id)

        if source_dataset_id is None:
            raise Exception("There is no sourceDataset id.")

        source_dataset = Dataset.from_data_service(source_dataset_id,
                                                   DataService(client))
        inventory_df = PlotUtil.inventory_to_geodataframe(source_dataset)
        inventory_df = PlotUtil.remove_null_inventories(inventory_df, 'guid')

        # merge inventory dataframe and joined table dataframe
        inventory_df = inventory_df.merge(joined_df, on='guid')

        # keep only necessary fields
        keep_list = ['guid', 'geometry']
        for dataset_id in dataset_id_list:
            # dataset_id will be used as a column name to visualize the values in the field
            keep_list.append(dataset_id)
        inventory_df = inventory_df[keep_list]

        # create base map
        map = table_list_map()
        map.create_basemap_ipylft(inventory_df, dataset_id_list)

        return map.map
Пример #21
0
    def plot_earthquake(earthquake_id, client):
        """Plot earthquake raster data

        Args:
            earthquake_id (str):  ID of tornado hazard
            client (Client): pyincore service Client Object

        """
        eq_metadata = HazardService(client).get_earthquake_hazard_metadata(
            earthquake_id)
        eq_dataset_id = eq_metadata['rasterDataset']['datasetId']

        eq_dataset = Dataset.from_data_service(eq_dataset_id,
                                               DataService(client))
        raster_file_path = Path(eq_dataset.local_file_path).joinpath(
            eq_dataset.metadata['fileDescriptors'][0]['filename'])
        raster = rasterio.open(raster_file_path)
        rasterio.plot.show(raster)
Пример #22
0
    def plot_tornado(tornado_id, client, category=False, basemap=True):
        """Plot a tornado path

        Args:
            tornado_id (str):  ID of tornado hazard
            client (Client): pyincore service Client Object
            category (boolean): turn on/off category option
            basemap (boolean): turn on/off base map (e.g. openstreetmap)
        """
        # it needs descartes pakcage for polygon plotting
        # getting tornado dataset should be part of Tornado Hazard code
        tornado_dataset_id = HazardService(client).get_tornado_hazard_metadata(
            tornado_id)['datasetId']
        tornado_dataset = Dataset.from_data_service(tornado_dataset_id,
                                                    DataService(client))
        tornado_gdf = gpd.read_file(tornado_dataset.local_file_path)

        GeoUtil.plot_gdf_map(tornado_gdf, 'ef_rating', category, basemap)
Пример #23
0
def test_plot_map_dataset_list(client):
    galveston_roadway_id = '5f0dd5ecb922f96f4e962caf'
    galvaston_wave_height_id = '5f11e503feef2d758c4df6db'
    shelvy_building_damage_id = '5a296b53c7d30d4af5378cd5'
    dataset_id_list = [
        galveston_roadway_id, galvaston_wave_height_id,
        shelvy_building_damage_id
    ]

    dataset_list = []
    for dataset_id in dataset_id_list:
        dataset_list.append(
            Dataset.from_data_service(dataset_id, DataService(client)))

    # table dataset plot map
    map = viz.plot_maps_dataset_list(dataset_list, client)

    assert True
Пример #24
0
    def join_table_dataset_with_source_dataset(dataset, client):
        """Creates geopandas geodataframe by joining table dataset and its source dataset

            Args:
                dataset (Dataset): pyincore dataset object
                client (Client): pyincore service client object

            Returns:
                gpd.Dataset: Geopandas geodataframe object.

        """
        is_source_dataset = False
        source_dataset = None

        # check if the given dataset is table dataset
        if dataset.metadata['format'] != 'table' and dataset.metadata[
                'format'] != 'csv':
            print("The given dataset is not a table dataset")
            return None

        # check if source dataset exists
        try:
            source_dataset = dataset.metadata['sourceDataset']
            is_source_dataset = True
        except Exception:
            print("There is no source dataset for the give table dataset")

        if is_source_dataset:
            # merge dataset and source dataset
            geodataset = Dataset.from_data_service(source_dataset,
                                                   DataService(client))
            joined_gdf = DatasetUtil.join_datasets(geodataset, dataset)
        else:
            return None

        return joined_gdf
Пример #25
0
import os
import pandas as pd
import geopandas as gpd
from pyincore import DataService, IncoreClient, Dataset

client = IncoreClient()
datasvc = DataService(client)

# reading in Nathanael's building inventory
building_inventory_pop_id = '5d5433edb9219c0689b98344'
dataset = Dataset.from_data_service(building_inventory_pop_id, datasvc)
df = dataset.get_dataframe_from_csv()


# reading in dataframe
seaside_building_shapefile_id = '5df40388b9219c06cf8b0c80'
dataset = Dataset.from_data_service(seaside_building_shapefile_id, datasvc)
gdf = dataset.get_dataframe_from_shapefile()

# merging Nathanael's building inventory CSV with OSU's shapefile
gdf_new = pd.merge(gdf, df[['strctid', 'guid']], how='left', left_on='guid', right_on='guid')
print(len(df), len(gdf), len(gdf_new))	# checking lengths

# writing to shapefile
file_new = os.path.join(os.getcwd(), 'seaside_bldg.shp')
gdf_new.to_file(file_new)
        print("================================")
        client = IncoreClient()
        data_services = DataService(client)
        space_services = SpaceService(client)

        dataset_metadata = {
        "title":"MMSA All Building Inventory",
        "description": "Shelby building inventory containing strctid, longitude, latitude and block id",
        "dataType": "ergo:buildingInventoryVer5",
        "format": "shapefile"
        }

        created_dataset = data_services.create_dataset(dataset_metadata)
        dataset_id = created_dataset['id']
        print('dataset is created with id ' + dataset_id)

        files = [shp_loc+'.shp', shp_loc+'.shx', shp_loc+'.prj', shp_loc+'.dbf']
        full_dataset = data_services.add_files_to_dataset(dataset_id, files)
        
        print("Data upload complete - dataset summary \n")
        print(full_dataset)
        
        print("Checking to see if it worked by loading this from remote dataset... \n")
        
        buildings = Dataset.from_data_service(dataset_id, data_services)
        print(buildings)