コード例 #1
0
def upload_shapefile_to_services(client):
    # Upload to incore services and put under commresilience space
    # It assumes the shapefile is in the utils directory
    datasvc = DataService(client)
    dataset_prop = {
        "title": "Joplin Population Dislocation For Heatmap Plotting",
        "description":
        "Contains only dislocated numprec for Joplin playbook plotting usage",
        "contributors": [],
        "dataType": "incore:popdislocationShp",
        "storedUrl": "",
        "format": "shapefile"
    }
    response = datasvc.create_dataset(dataset_prop)
    dataset_id = response['id']
    files = [
        'joplin-pop-disl-numprec.shp', 'joplin-pop-disl-numprec.dbf',
        'joplin-pop-disl-numprec.shx', 'joplin-pop-disl-numprec.prj'
    ]
    datasvc.add_files_to_dataset(dataset_id, files)

    # add to space
    spacesvc = SpaceService(client)
    spacesvc.add_dataset_to_space("5f99ba8b0ace240b22a82e00",
                                  dataset_id=dataset_id)  # commresilience
    print(dataset_id +
          " successfully uploaded and move to commresilience space!")
コード例 #2
0
def test_insecure_client():
    """
    test insecure client
    """
    client = InsecureIncoreClient(username="******")
    data_svc = DataService(client)
    try:
        r = data_svc.get_datasets()
        assert len(r) != 0
    except requests.HTTPError:
        assert False
コード例 #3
0
ファイル: conftest.py プロジェクト: IN-CORE/pyincore
def pytest_sessionstart(session):
    """
    Called after the Session object has been created and
    before performing collection and entering the run test loop.
    """
    try:
        with open(
                os.path.join(os.path.dirname(__file__), "pyincore/.incorepw"),
                'r') as f:
            cred = f.read().splitlines()
    except EnvironmentError:
        assert False
    credentials = jwt.decode(cred[0], cred[1])

    monkeypatch = MonkeyPatch()
    monkeypatch.setattr("builtins.input", lambda x: credentials["username"])
    monkeypatch.setattr("getpass.getpass", lambda y: credentials["password"])
    client = IncoreClient(service_url=pyglobals.INCORE_API_DEV_URL,
                          token_file_name=".incrtesttoken")
    pytest.client = client
    pytest.datasvc = DataService(client)
    pytest.fragilitysvc = FragilityService(client)
    pytest.repairsvc = RepairService(client)
    pytest.restorationsvc = RestorationService(client)
    pytest.hazardsvc = HazardService(client)
    pytest.spacesvc = SpaceService(client)
    print(
        f"Successfully initialized Incore client and services. Using {pyglobals.INCORE_API_DEV_URL}"
    )
コード例 #4
0
    def create_result_dataset(datasvc: DataService, parentid: str,
                              result_files: List[str], title: str,
                              output_metadata: Dict[str, str]):
        # Result metadata
        properties = output_metadata
        properties["title"] = title
        properties["sourceDataset"] = parentid

        # Create child dataset with parent dataset as sourceDataset
        result_dataset = datasvc.create_dataset(properties)
        result_dataset_id = result_dataset["id"]

        # Attach files to result dataset
        datasvc.add_files_to_dataset(result_dataset_id, result_files)

        return result_dataset_id
コード例 #5
0
ファイル: dataprocessutil.py プロジェクト: IN-CORE/pyincore
    def get_mapped_result_from_dataset_id(client,
                                          inventory_id: str,
                                          dmg_result_id: str,
                                          bldg_func_id,
                                          archetype_mapping_id: str,
                                          groupby_col_name: str = "max_state"):
        """Use this if your damage results are already stored in the data service and you have their dataset ids.
        All the inputs (except groupby_col_name) are dataset ids.

        Args:
            client: Service client with authentication info
            inventory_id: Inventory dataset id
            dmg_result_id: Damage result dataset id
            bldg_func_id: Incore dataset for building func id
            archetype_mapping_id: Mapping id dataset for archetype
            groupby_col_name: column name to group by, default to max_state

        Returns:
            dmg_ret_json: JSON of the damage state results ordered by cluster and category.
            func_ret_json: JSON of the building functionality results ordered by cluster and category.
            max_state_df: Dataframe of max damage state

        """
        bldg_inv = Dataset.from_data_service(inventory_id, DataService(client))
        inventory = pd.DataFrame(gpd.read_file(bldg_inv.local_file_path))

        dmg_result_dataset = Dataset.from_data_service(dmg_result_id,
                                                       DataService(client))
        dmg_result = dmg_result_dataset.get_dataframe_from_csv()

        bldg_func_dataset = Dataset.from_data_service(bldg_func_id,
                                                      DataService(client))
        bldg_func_df = bldg_func_dataset.get_dataframe_from_csv()
        bldg_func_df.rename(columns={'building_guid': 'guid'}, inplace=True)

        archtype_mapping_dataset = Dataset.from_data_service(
            archetype_mapping_id, DataService(client))
        arch_mapping = archtype_mapping_dataset.get_dataframe_from_csv()

        max_state_df = DataProcessUtil.get_max_damage_state(dmg_result)
        dmg_ret_json = DataProcessUtil.create_mapped_dmg_result(
            inventory, max_state_df, arch_mapping, groupby_col_name)
        func_ret_json = DataProcessUtil.create_mapped_func_result(
            inventory, bldg_func_df, arch_mapping)

        return dmg_ret_json, func_ret_json, max_state_df
コード例 #6
0
ファイル: dataprocessutil.py プロジェクト: IN-CORE/pyincore
    def get_mapped_result_from_analysis(client,
                                        inventory_id: str,
                                        dmg_result_dataset,
                                        bldg_func_dataset,
                                        archetype_mapping_id: str,
                                        groupby_col_name: str = "max_state"):
        """Use this if you want to load results directly from the output files of the analysis, than storing the results
        to data service and loading from there using ids.
        It takes the static inputs: inventory & archetypes as dataset ids. The result inputs are taken as
        Dataset class objects, which are created by reading the output result files.

        Args:
            client: Service client with authentication info
            inventory_id: Inventory dataset id
            dmg_result_dataset: Incore dataset for damage result
            bldg_func_dataset: Incore dataset for building func dataset
            archetype_mapping_id: Mapping id dataset for archetype

        Returns:
            dmg_ret_json: JSON of the damage state results ordered by cluster and category.
            func_ret_json: JSON of the building functionality results ordered by cluster and category.
            mapped_df: Dataframe of max damage state

        """
        bldg_inv = Dataset.from_data_service(inventory_id, DataService(client))
        inventory = pd.DataFrame(gpd.read_file(bldg_inv.local_file_path))

        dmg_result = dmg_result_dataset.get_dataframe_from_csv()

        bldg_func_df = bldg_func_dataset.get_dataframe_from_csv()
        bldg_func_df.rename(columns={'building_guid': 'guid'}, inplace=True)

        arch_mapping = Dataset.from_data_service(
            archetype_mapping_id,
            DataService(client)).get_dataframe_from_csv()

        max_state_df = DataProcessUtil.get_max_damage_state(dmg_result)
        dmg_ret_json = DataProcessUtil.create_mapped_dmg_result(
            inventory, max_state_df, arch_mapping, groupby_col_name)
        func_ret_json = DataProcessUtil.create_mapped_func_result(
            inventory, bldg_func_df, arch_mapping)

        return dmg_ret_json, func_ret_json, max_state_df
コード例 #7
0
    def cache_files(self, data_service: DataService):
        """Get the set of fragility data, curves.

        Args:
            data_service (obj): Data service.

        Returns:
            str: A path to the local file.

        """
        if self.local_file_path is not None:
            return
        self.local_file_path = data_service.get_dataset_blob(self.id)
        return self.local_file_path
コード例 #8
0
def test_get_mapped_result_from_analysis(client):
    bldg_dataset_id = "5f9091df3e86721ed82f701d"

    bldg_dmg_dataset_id = "5f9868c00ace240b22a7f2a5"  # legacy DS_name
    # bldg_dmg_dataset_id = "602d96e4b1db9c28aeeebdce" # new DS_name
    dmg_result_dataset = Dataset.from_data_service(bldg_dmg_dataset_id,
                                                   DataService(client))

    archetype_id = "5fca915fb34b193f7a44059b"

    bldg_func_dataset_id = "60674c89c57ada48e492b97b"
    bldg_func_dataset = Dataset.from_data_service(bldg_func_dataset_id,
                                                  DataService(client))

    dmg_ret_json, func_ret_json, max_state_df = util.get_mapped_result_from_analysis(
        client, bldg_dataset_id, dmg_result_dataset, bldg_func_dataset,
        archetype_id)

    assert "by_cluster" in dmg_ret_json.keys(
    ) and "by_category" in dmg_ret_json.keys()
    assert "by_cluster" in func_ret_json.keys(
    ) and "by_category" in func_ret_json.keys()
    assert "max_state" in max_state_df._info_axis.values
コード例 #9
0
    def from_data_service(cls, id: str, data_service: DataService):
        """Get Dataset from Data service, get metadata as well.

        Args:
            id (str): ID of the Dataset.
            data_service (obj): Data service.

        Returns:
            obj: Dataset from Data service.

        """
        metadata = data_service.get_dataset_metadata(id)
        instance = cls(metadata)
        instance.cache_files(data_service)
        return instance
コード例 #10
0
ファイル: baseanalysis.py プロジェクト: IN-CORE/pyincore
    def __init__(self, incore_client):
        self.spec = self.get_spec()
        self.client = incore_client
        self.data_service = DataService(self.client)

        # initialize parameters, input_datasets, output_datasets, etc
        self.parameters = {}
        for param in self.spec['input_parameters']:
            self.parameters[param['id']] = {'spec': param, 'value': None}

        self.input_datasets = {}
        for input_dataset in self.spec['input_datasets']:
            self.input_datasets[input_dataset['id']] = {'spec': input_dataset, 'value': None}

        self.output_datasets = {}
        for output_dataset in self.spec['output_datasets']:
            self.output_datasets[output_dataset['id']] = {'spec': output_dataset, 'value': None}
コード例 #11
0
    def join_table_dataset_with_source_dataset(dataset, client):
        """Creates geopandas geodataframe by joining table dataset and its source dataset

            Args:
                dataset (Dataset): pyincore dataset object
                client (Client): pyincore service client object

            Returns:
                gpd.Dataset: Geopandas geodataframe object.

        """
        is_source_dataset = False
        source_dataset = None

        # check if the given dataset is table dataset
        if dataset.metadata['format'] != 'table' and dataset.metadata[
                'format'] != 'csv':
            print("The given dataset is not a table dataset")
            return None

        # check if source dataset exists
        try:
            source_dataset = dataset.metadata['sourceDataset']
            is_source_dataset = True
        except Exception:
            print("There is no source dataset for the give table dataset")

        if is_source_dataset:
            # merge dataset and source dataset
            geodataset = Dataset.from_data_service(source_dataset,
                                                   DataService(client))
            joined_gdf = DatasetUtil.join_datasets(geodataset, dataset)
        else:
            return None

        return joined_gdf
コード例 #12
0
import os
import pandas as pd
import geopandas as gpd
from pyincore import DataService, IncoreClient, Dataset

client = IncoreClient()
datasvc = DataService(client)

# reading in Nathanael's building inventory
building_inventory_pop_id = '5d5433edb9219c0689b98344'
dataset = Dataset.from_data_service(building_inventory_pop_id, datasvc)
df = dataset.get_dataframe_from_csv()


# reading in dataframe
seaside_building_shapefile_id = '5df40388b9219c06cf8b0c80'
dataset = Dataset.from_data_service(seaside_building_shapefile_id, datasvc)
gdf = dataset.get_dataframe_from_shapefile()

# merging Nathanael's building inventory CSV with OSU's shapefile
gdf_new = pd.merge(gdf, df[['strctid', 'guid']], how='left', left_on='guid', right_on='guid')
print(len(df), len(gdf), len(gdf_new))	# checking lengths

# writing to shapefile
file_new = os.path.join(os.getcwd(), 'seaside_bldg.shp')
gdf_new.to_file(file_new)
コード例 #13
0
shp_file = input("Is shape file created?[y/n]: ")

if shp_file=='y':
    shp_loc = input("shapefile package location + filename without extension ")
    print("Including Files - \n ")
    print(shp_loc+'.shp')
    print(shp_loc+'.shx')
    print(shp_loc+'.prj')
    print(shp_loc+'.dbf')
    proceed = input("\n Ready to upload to incore. Proceed?[y/n] ")
    
    if proceed=='y':
        print("\n Uploading dataset to incore")
        print("================================")
        client = IncoreClient()
        data_services = DataService(client)
        space_services = SpaceService(client)

        dataset_metadata = {
        "title":"MMSA All Building Inventory",
        "description": "Shelby building inventory containing strctid, longitude, latitude and block id",
        "dataType": "ergo:buildingInventoryVer5",
        "format": "shapefile"
        }

        created_dataset = data_services.create_dataset(dataset_metadata)
        dataset_id = created_dataset['id']
        print('dataset is created with id ' + dataset_id)

        files = [shp_loc+'.shp', shp_loc+'.shx', shp_loc+'.prj', shp_loc+'.dbf']
        full_dataset = data_services.add_files_to_dataset(dataset_id, files)
コード例 #14
0
    def __init__(self, incore_client):
        self.hazardsvc = HazardService(incore_client)
        self.fragilitysvc = FragilityService(incore_client)
        self.datasetsvc = DataService(incore_client)
        self.fragility_tower_id = '5b201b41b1cf3e336de8fa67'
        self.fragility_pole_id = '5b201d91b1cf3e336de8fa68'

        # this is for deciding to use indpnode field. Not using this could be safer for general dataset
        self.use_indpnode = False
        self.nnode = 0
        self.highest_node_num = 0
        self.EF = 0
        self.nint = []
        self.indpnode = []
        self.mcost = 1435  # mean repair cost for single distribution pole
        self.vcost = (0.1 * self.mcost)**2
        self.sigmad = math.sqrt(math.log(
            self.vcost / (self.mcost**2) +
            1))  # convert to gaussian Std Deviation to be used in logncdf
        self.mud = math.log(
            (self.mcost**2) / math.sqrt(self.vcost + self.mcost**2))

        self.mcost = 400000  # mean repair cost for single transmission pole
        self.vcost = (0.1 * self.mcost)**2
        self.sigmat = math.sqrt(math.log(
            self.vcost / (self.mcost**2) +
            1))  # convert to gaussian Std Deviation to be used in logncdf
        self.mut = math.log(
            (self.mcost**2) / math.sqrt(self.vcost + self.mcost**2))

        self.tmut = 72  # mean repairtime for transmission tower in hrs
        self.tsigmat = 36  # std dev

        self.tmud = 5  # mean repairtime for poles in hrs
        self.tsigmad = 2.5

        self.totalcost2repairpath = []
        self.totalpoles2repair = []

        self.tornado_sim_field_name = 'SIMULATION'
        self.tornado_ef_field_name = 'EF_RATING'

        # tornado number of simulation and ef_rate
        self.nmcs = 0
        self.tornado_ef_rate = 0

        self.pole_distance = 38.1

        # node variables
        self.nodenwid_fld_name = "NODENWID"
        self.indpnode_fld_name = "INDPNODE"
        self.guid_fldname = 'GUID'

        # link variables
        self.tonode_fld_name = "TONODE"
        self.fromnode_fld_name = "FROMNODE"
        self.linetype_fld_name = "LINETYPE"

        # line type variable
        self.line_transmission = "transmission"
        self.line_distribution = "distribution"

        super(TornadoEpnDamage, self).__init__(incore_client)
コード例 #15
0
def datasvc(monkeypatch):
    client = IncoreClient(service_url=pyglobals.INCORE_API_DEV_URL,
                          token_file_name=".incrtesttoken")
    return DataService(client)