Exemplo n.º 1
0
 def return_fset(self):
     try:
         from arcgis.features import FeatureSet
         fset =  FeatureSet(self.features)
         fset.fields = self.fields
         return fset
     except:
         raise Exception ('You might not have the arcgis module')
Exemplo n.º 2
0
def add_features(feature_layer, features, use_global_ids=False):
    """ Adds features to the feature_layer.  The input features will be updated upon successful
        adding on the server, such that they contain the server-assigned object_ids and global_ids.
        :param feature_layer: An arcgis.features.FeatureLayer.
        :param features: list of arcgis.features.Features.
        :param use_global_ids: use global ids or not
        :returns: The added features.
        :raises ServerError: Indicates that the server rejected the new features.
    """
    if features:
        feature_set = FeatureSet(features)
        response = feature_layer.edit_features(adds=feature_set,
                                               use_global_ids=use_global_ids)
        add_results = response['addResults']
        errors = [
            result['error'] for result in add_results if not result['success']
        ]
        if errors:
            raise workforce.ServerError(errors)
        for feature, add_results in zip(features, add_results):
            feature.attributes[feature_layer.properties(
                'objectIdField')] = add_results['objectId']
            feature.attributes[feature_layer.properties(
                'globalIdField')] = add_results['globalId']
    return features
Exemplo n.º 3
0
    def process_summaries(self,
                          processed_dir,
                          processed_file_details,
                          make_historical_csv=False):

        if self.verbose:
            print("Starting load of summary table...")

        summary_filename = self.agol.layers['summary_table'][
            'original_file_name']

        summary_df = pd.DataFrame()
        for f in processed_file_details:
            fname = f["processed_filename"]
            size = os.path.getsize(os.path.join(processed_dir, fname))
            if size > 0:
                df = load_csv_to_df(os.path.join(processed_dir, fname))
                table_row = create_summary_table_row(df, f["source_datetime"],
                                                     f["filename"])
                summary_df = summary_df.append(table_row, ignore_index=True)
            else:
                print(f"{fname} has a filesize of {size}, not processing.")

        if make_historical_csv:
            out_csv_file = os.path.join(processed_dir, summary_filename)
            summary_df.to_csv(out_csv_file, index=False, header=True)
            if self.verbose:
                print(
                    "Finished creation of historical summary table CSV, returning."
                )
            return

        layer_conf = self.agol.layers['summary_table']

        table = self.agol.gis.content.get(layer_conf['id'])
        t = table.tables[0]

        new_col_names = {}
        for name in t.properties.fields:
            new_col_names[name["alias"]] = name["name"]
        summary_df = summary_df.rename(columns=new_col_names)
        df_as_dict = summary_df.to_dict(orient='records')

        features = []
        for r in df_as_dict:
            ft = Feature(attributes=r)
            features.append(ft)
        # It's okay if features is empty; status will reflect arcgis telling us that,
        # but it won't stop the processing.
        fs = FeatureSet(features)
        if self.dry_run:
            if self.verbose:
                print("Dry run set, not editing features.")
        else:
            status = t.edit_features(adds=features)
            if self.verbose:
                print(status)
        if self.verbose:
            print("Finished load of summary table")
Exemplo n.º 4
0
 def return_sdf(self, simplify = True):
     try:
         from arcgis.features import FeatureSet
         if simplify:
             out_fields = [field['name'] for field in self.fields if field['name'] not in self.exclude_fields]
         else:
             out_fields = [field['name'] for field in self.fields]
         return FeatureSet(self.features).sdf[out_fields]
     except:
         raise Exception ('You might not have the arcgis module')
Exemplo n.º 5
0
def append_geo_json(geo_json):
    gis = GIS("https://www.arcgis.com", username="", password="")
    crime_properties = {
        'title': 'Crime data',
        'tags': 'crimes, open data, devlabs',
        'type': 'GeoJson'
    }

    search_result = gis.content.search(query="", item_type="Feature Layer")
    crime_data_item = search_result[0]
    crime_data_feature_layer = FeatureLayer.fromitem(crime_data_item,
                                                     layer_id=0)
    new_crime_set = FeatureSet.from_geojson(geo_json)
    crime_data_feature_layer.edit_features(adds=new_crime_set)
def get_geo_location_info_by_OD_matrix(lon, lat):
    origin_coords = [
        '50.448069, 30.5194453', '50.448616, 30.5116673',
        '50.913788, 34.7828343'
    ]
    # origin_coords = ['-117.187807, 33.939479', '-117.117401, 34.029346']

    origin_features = []

    for origin in origin_coords:
        reverse_geocode = geocoding.reverse_geocode({
            "x": origin.split(',')[0],
            "y": origin.split(',')[1]
        })

        origin_feature = Feature(geometry=reverse_geocode['location'],
                                 attributes=reverse_geocode['address'])
        origin_features.append(origin_feature)

    origin_fset = FeatureSet(origin_features,
                             geometry_type='esriGeometryPoint',
                             spatial_reference={'latestWkid': 4326})

    destinations_address = r"data/destinations_address.csv"
    destinations_df = pd.read_csv(destinations_address)
    destinations_sdf = pd.DataFrame.spatial.from_df(destinations_df, "Address")

    destinations_fset = destinations_sdf.spatial.to_featureset()

    try:
        results = generate_origin_destination_cost_matrix(
            origins=origin_fset,  # origins_fc_latlong,
            destinations=destinations_fset,  # destinations_fs_address,
            cutoff=200,
            origin_destination_line_shape='Straight Line')
        od_df = results.output_origin_destination_lines.sdf

        # filter only the required columns
        od_df2 = od_df[[
            'DestinationOID', 'OriginOID', 'Total_Distance', 'Total_Time'
        ]]

        # user pivot_table
        od_pivot = od_df2.pivot_table(index='OriginOID',
                                      columns='DestinationOID')
        return od_pivot
    except Exception as exp:
        print(exp)

    return None
Exemplo n.º 7
0
def update_features(feature_layer, features):
    """ Updates features in a feature_layer.
        :param feature_layer: An arcgis.features.FeatureLayer.
        :param features: list of arcgis.features.Features.  Each feature must have an object id.
        :raises ServerError: Indicates that the server rejected the updates.
    """
    if features:
        response = feature_layer.edit_features(updates=FeatureSet(features))
        errors = [
            result['error'] for result in response["updateResults"]
            if not result['success']
        ]
        if errors:
            raise workforce.ServerError(errors)
    return features
Exemplo n.º 8
0
    def to_featureset(self):
        """
        Return a feature set
        """
        bin_features = []
        for bin_entry in self.bins():
            bin_feature = Feature(
                geometry=bin_entry['geometry'],
                attributes={'hitCount': bin_entry['hitCount']})
            bin_features.append(bin_feature)

        bin_fields = ['hitCount']
        return FeatureSet(bin_features,
                          bin_fields,
                          geometry_type='esriGeometryPolygon',
                          spatial_reference=self._wkid)
Exemplo n.º 9
0
def update_database(current, last):
    """ 
Update the big database from the little one if the little one has new data for the big one.

current = todays data
last = the last row from the big database

Returns True if the database was updated
    """

    # If these all match then the latest data
    # has already been written to the database

    if ((last.new_cases == current.new_cases) \
        and (last.total_cases == current.total_cases) 
        and (last.last_update == current.date)):
        print("Database is current.")

        # If you want to test the update, comment this out
        # and then go in and manually delete the extra row(s).
        return False

    print("Appending the new record.")

    #return False

    attributes = {
        "utc_date": datetime.utcnow(),
        "last_update": current.date,
        "editor": os.environ.get('USERNAME') or os.environ.get('USER'),
        "source": "CC",
        "new_cases" : current.new_cases,
        "total_cases" : current.total_cases,
        "new_deaths" : current.new_deaths,
        "total_deaths" : current.total_deaths,
        "name": "Clatsop"
    }

    gis = GIS(Config.PORTAL_URL, Config.PORTAL_USER, Config.PORTAL_PASSWORD)
    layer = FeatureLayer(Config.COVID_CASES_URL, gis)
    f = Feature.from_dict({"attributes": attributes})
    fs = FeatureSet(features=[f])
    results = layer.edit_features(adds=fs)

    return True
Exemplo n.º 10
0
def complex_fc_to_fset(fc_or_flyr, temp_dir = r'P:/temp'):
    '''converts a featureclass or feature layer to an AGOL-friendly FeatureSet object.
    This is good for complex geometry, as SDF-from_featureclass simplifies geometry.
    requires arcpy.
    
    inputs:
        fc_or_flyr:  feature class or feature layer.  Can be a selection.
        temp_dir:    local folder for saving a temporary json file
    '''
    from os import makedirs
    from os.path import join, exists
    from json import load
    from arcpy import FeaturesToJSON_conversion, Delete_management
    from arcgis.features import FeatureSet
    
    if not exists(temp_dir):
        makedirs(temp_dir)
    temp_json_file = FeaturesToJSON_conversion(fc_or_flyr,join(temp_dir,'temp_features.json'))[0]
    with open(temp_json_file) as jsonfile:
        data = load(jsonfile)
    Delete_management(temp_json_file)
    return FeatureSet.from_dict(data)
Exemplo n.º 11
0
            classes = visual_recognition.classify(images_file,
                                                  parameters=json.dumps({
                                                      'classifier_ids':
                                                      ['##########'],
                                                      'threshold':
                                                      0.876
                                                  }))

        #print(json.dumps(classes, indent=2))
        data = json.loads(json.dumps(classes, indent=2))
        if len(data['images'][0]['classifiers'][0]['classes']) != 0:

            print(json.dumps(classes, indent=2))

            f = open(
                "./images/" +
                filename.replace("photo", "coor").replace("jpg", "txt"), "r")
            d = float(f.read()) * 0.000000301

            gis = GIS(username="******", password="******")
            p = Point({"x": -73.471977 + d, "y": 40.703342})
            a = {"pothole_layer": "pothole"}
            f = Feature(p, a)
            fs = FeatureSet([f])
            lyr = FeatureLayer(
                "https://services8.arcgis.com/x660UqfqVJlbWB0Y/arcgis/rest/services/pothole_layers/FeatureServer/0",
                gis=gis)
            lyr.edit_features(adds=fs)

        #delete photo and txt
Exemplo n.º 12
0
 def to_featureset(self):
     return FeatureSet.from_dict(self.__feature_set__())
Exemplo n.º 13
0
def main(arguments):  # noqa: C901
    # Initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the workforce project
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    try:
        if project._is_v2_project:
            raise Exception("This is a v2 project. Please migrate v1 projects")
    except AttributeError:
        raise Exception(
            "Cannot find the attribute is v2 project. "
            "Are you sure you have the API version 1.8.3 or greater installed? Check with `arcgis.__version__` in your Python console"
        )
    logger.info(project)
    logger.info("Creating base v2 project...")

    # Create WF Project w given title
    if arguments.title:
        if arguments.title != project.title:
            title = arguments.title
        else:
            raise Exception(
                "Cannot name your project the same as the old one. Please provide a unique name"
            )
    else:
        title = project.title + " Updated"
    v2_project = workforce.create_project(title=title,
                                          summary=project.summary,
                                          major_version=2)

    # Update thumbnail
    with tempfile.TemporaryDirectory() as dirpath:
        try:
            thumbnail = item.download_thumbnail(save_folder=dirpath)
            v2_project._item.update(thumbnail=thumbnail)
            gis.content.get(
                v2_project.worker_web_map_id).update(thumbnail=thumbnail)
            gis.content.get(
                v2_project.dispatcher_web_map_id).update(thumbnail=thumbnail)
        except Exception:
            logger.info("Thumbnail not migrated successfully")

    # Migrate Assignment Types
    logger.info("Migrating assignment types...")
    existing_assignment_types = project.assignment_types.search()
    at_to_add = []

    for assignment_type in existing_assignment_types:
        if assignment_type.name:
            at_to_add.append(
                workforce.AssignmentType(project=v2_project,
                                         name=assignment_type.name))
        else:
            logger.info(
                "Assignment Type migration skipped - does not have a name")

    # Get Assignment Types in migrated project before you potentially add a bad worker / dispatcher
    v2_project.assignment_types.batch_add(at_to_add)
    new_assignment_types = v2_project.assignment_types.search()
    if len(existing_assignment_types) == len(new_assignment_types):
        logger.info("Assignment Types successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Assignment Types not successfully migrated. Cleaning up new project"
        )

    # Migrate Dispatchers
    if not arguments.skip_dispatchers:
        logger.info("Migrating dispatchers...")
        dispatcher_ghost = False

        # Get Existing Dispatchers
        existing_dispatchers = project.dispatchers.search()
        dispatchers_to_add = []
        layer = v2_project.dispatchers_layer

        # Get Custom Dispatcher Fields and Templates
        custom_fields = add_custom_fields(project.dispatchers_layer, layer)

        # Prepare Dispatchers to be added
        for dispatcher in existing_dispatchers:

            # Validate that there is a user id populated and that the user id isn't yourself (since that was added during project creation).
            # Otherwise, skip adding the dispatcher
            if dispatcher.user_id and dispatcher.user_id != arguments.username:

                # Validate a name exists, otherwise populate with an empty string
                dispatcher_name = dispatcher.user_id if dispatcher.name is None else dispatcher.name

                attributes = {
                    v2_project._dispatcher_schema.name: dispatcher_name,
                    v2_project._dispatcher_schema.contact_number:
                    dispatcher.contact_number,
                    v2_project._dispatcher_schema.user_id: dispatcher.user_id,
                    v2_project._dispatcher_schema.global_id:
                    dispatcher.global_id
                }

                # Add Custom Field Values
                for field in custom_fields:
                    attributes[field["name"]] = dispatcher._feature.attributes[
                        field["name"]]
                feature = Feature(geometry=dispatcher.geometry,
                                  attributes=attributes)
                dispatchers_to_add.append(feature)
            else:
                if not dispatcher.user_id:
                    logger.info(
                        "Dispatcher was skipped from migrating. The dispatcher does not a valid user_id in the layer, or 2. "
                        "The dispatcher was already added. Please check the original dispatchers layer."
                    )
                    dispatcher_ghost = True
                else:
                    # update info for owner dispatcher
                    v2_dispatcher = v2_project.dispatchers.search()[0]
                    v2_dispatcher.update(
                        contact_number=dispatcher.contact_number,
                        name=dispatcher.name)

        # Add Dispatchers
        layer.edit_features(adds=FeatureSet(dispatchers_to_add),
                            use_global_ids=True)
        # add dispatcher named users to the project's group.
        max_add_per_call = 25
        for i in range(0,
                       math.ceil(len(dispatchers_to_add) / max_add_per_call)):
            v2_project.group.add_users([
                d.attributes[v2_project._dispatcher_schema.user_id]
                for d in dispatchers_to_add[i * max_add_per_call:(
                    i * max_add_per_call) + max_add_per_call]
            ])
        new_dispatchers = v2_project.dispatchers_layer.query(
            "1=1", return_all_records=True).features
        if len(existing_dispatchers) == len(
                new_dispatchers) or dispatcher_ghost:
            logger.info("Dispatchers successfully migrated")
        else:
            raise Exception("Dispatchers not migrated successfully")

    # Migrate Workers
    logger.info("Migrating workers...")
    worker_ghost = False

    # Get Existing Workers
    existing_workers = project.workers_layer.query(
        "1=1", return_all_records=True).features
    workers_to_add = []
    layer = v2_project.workers_layer

    # Get Custom Worker Fields
    custom_fields = add_custom_fields(project.workers_layer, layer)
    # Prepare Workers to be added
    for worker in existing_workers:
        if worker.attributes[project._worker_schema.user_id]:
            worker_name = worker.attributes[project._worker_schema.user_id] if worker.attributes[project._worker_schema.name] is None else \
                worker.attributes[project._worker_schema.name]
            worker_status = 0 if worker.attributes[
                project._worker_schema.status] is None else worker.attributes[
                    project._worker_schema.status]
            attributes = {
                v2_project._worker_schema.name:
                worker_name,
                v2_project._worker_schema.contact_number:
                worker.attributes[project._worker_schema.contact_number],
                v2_project._worker_schema.notes:
                worker.attributes[project._worker_schema.notes],
                v2_project._worker_schema.status:
                worker_status,
                v2_project._worker_schema.title:
                worker.attributes[project._worker_schema.title],
                v2_project._worker_schema.user_id:
                worker.attributes[project._worker_schema.user_id],
                v2_project._worker_schema.global_id:
                worker.attributes[project._worker_schema.global_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = worker.attributes[field["name"]]
            feature = Feature(geometry=worker.geometry, attributes=attributes)
            workers_to_add.append(feature)
        else:
            worker_ghost = True
            logger.info("Worker migration skipped - does not have a user id")

    # Add Workers
    layer.edit_features(adds=FeatureSet(workers_to_add), use_global_ids=True)
    # add worker named users to the project's group.
    max_add_per_call = 25
    for i in range(0, math.ceil(len(workers_to_add) / max_add_per_call)):
        v2_project.group.add_users([
            w.attributes[v2_project._worker_schema.user_id]
            for w in workers_to_add[i *
                                    max_add_per_call:(i * max_add_per_call) +
                                    max_add_per_call]
        ])
    new_workers = v2_project.workers_layer.query(
        "1=1", return_all_records=True).features
    if (len(existing_workers) == len(new_workers)) or worker_ghost:
        logger.info("Workers successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Workers not migrated successfully. Cleaning up new project")

    # Migrate Assignments
    logger.info("Migrating assignments")
    assignment_ghost = False

    # Get Existing Assignments
    existing_assignments = project.assignments_layer.query(
        "1=1", return_all_records=True).features
    assignments_to_add = []
    layer = v2_project.assignments_layer

    # Set Custom Fields for Assignments and Templates
    custom_fields = add_custom_fields(project.assignments_layer, layer)

    # Prepare Assignments to be Added
    for assignment in existing_assignments:
        if assignment.attributes[project._assignment_schema.assignment_type]:

            # set attributes in case they are empty
            assignment_location = (str(assignment.geometry["x"]) + " " + str(assignment.geometry["y"])) if \
                assignment.attributes[project._assignment_schema.location] is None else assignment.attributes[project._assignment_schema.location]
            assignment_status = 0 if assignment.attributes[project._assignment_schema.status] is None else \
                assignment.attributes[project._assignment_schema.status]
            assignment_priority = 0 if assignment.attributes[project._assignment_schema.priority] is None else \
                assignment.attributes[project._assignment_schema.priority]

            assignment_type_name = ""
            for at in existing_assignment_types:
                if at.code == assignment.attributes[
                        project._assignment_schema.assignment_type]:
                    assignment_type_name = at.name
                    break
            attributes = {
                v2_project._assignment_schema.status:
                assignment_status,
                v2_project._assignment_schema.notes:
                assignment.attributes[project._assignment_schema.notes],
                v2_project._assignment_schema.priority:
                assignment_priority,
                v2_project._assignment_schema.assignment_type:
                get_assignment_type_global_id(new_assignment_types,
                                              assignment_type_name),
                v2_project._assignment_schema.work_order_id:
                assignment.attributes[
                    project._assignment_schema.work_order_id],
                v2_project._assignment_schema.due_date:
                assignment.attributes[project._assignment_schema.due_date],
                v2_project._assignment_schema.description:
                assignment.attributes[project._assignment_schema.description],
                v2_project._assignment_schema.worker_id:
                get_worker_global_id(
                    project.workers.search(), assignment.attributes[
                        project._assignment_schema.worker_id]),
                v2_project._assignment_schema.location:
                assignment_location,
                v2_project._assignment_schema.declined_comment:
                assignment.attributes[
                    project._assignment_schema.declined_comment],
                v2_project._assignment_schema.assigned_date:
                assignment.attributes[
                    project._assignment_schema.assigned_date],
                v2_project._assignment_schema.in_progress_date:
                assignment.attributes[
                    project._assignment_schema.in_progress_date],
                v2_project._assignment_schema.completed_date:
                assignment.attributes[
                    project._assignment_schema.completed_date],
                v2_project._assignment_schema.declined_date:
                assignment.attributes[
                    project._assignment_schema.declined_date],
                v2_project._assignment_schema.paused_date:
                assignment.attributes[project._assignment_schema.paused_date],
                v2_project._assignment_schema.dispatcher_id:
                get_dispatcher_global_id(
                    arguments.skip_dispatchers, project.dispatchers.search(),
                    assignment.attributes[
                        project._assignment_schema.dispatcher_id]),
                v2_project._assignment_schema.global_id:
                assignment.attributes[project._assignment_schema.global_id],
                v2_project._assignment_schema.object_id:
                assignment.attributes[project._assignment_schema.object_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = assignment.attributes[
                    field["name"]]
            feature = Feature(geometry=assignment.geometry,
                              attributes=attributes)
            assignments_to_add.append(feature)
        else:
            logger.info(
                "One assignment's migration skipped - does not have an assignment type"
            )
            assignment_ghost = True

    # Add Assignments
    layer.edit_features(adds=FeatureSet(assignments_to_add),
                        use_global_ids=True)
    new_assignments = v2_project.assignments_layer.query(
        "1=1", return_all_records=True).features
    if (len(new_assignments) == len(existing_assignments)) or assignment_ghost:
        logger.info("Assignments successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Assignments not migrated successfully. Cleaning up new project")

    # Migrate Attachments
    logger.info("Migrating Attachments")
    for assignment in existing_assignments:
        object_id = assignment.attributes[project._assignment_schema.object_id]
        new_assignment_object_id = v2_project.assignments.get(
            global_id=assignment.attributes[
                project._assignment_schema.global_id]).object_id
        if len(project.assignments_layer.attachments.get_list(object_id)) > 0:
            try:
                with tempfile.TemporaryDirectory() as dirpath:
                    paths = project.assignments_layer.attachments.download(
                        oid=object_id, save_path=dirpath)
                    for path in paths:
                        v2_project.assignments_layer.attachments.add(
                            oid=new_assignment_object_id, file_path=path)
            except Exception as e:
                logger.info(e)
                logger.info(
                    "Skipping migration of this attachment. It did not download successfully"
                )
    if len(project.assignments_layer.attachments.search("1=1")) == len(
            v2_project.assignments_layer.attachments.search("1=1")):
        logger.info("Attachments successfully migrated")
    else:
        logger.info(
            "Not all of your attachments migrated successfully. Continuing with migration"
        )

    # Migrate Integrations
    logger.info("Migrating Integrations")
    v2_project.integrations.batch_delete(
        [v2_project.integrations.get("arcgis-navigator")[0]])
    previous_integrations = project.integrations.search()

    # Replacing AT Code with GUID
    for integration in previous_integrations:
        if "assignmentTypes" in integration:
            types = integration["assignmentTypes"]
            key_list = list(sorted(types.keys()))
            for key in key_list:
                at_name = project.assignment_types.get(code=int(key)).name
                guid = get_assignment_type_global_id(new_assignment_types,
                                                     at_name)
                v2_project.integrations.add(
                    integration_id=integration["id"],
                    prompt=integration["prompt"],
                    url_template=types[key]["urlTemplate"],
                    assignment_types=guid)
        else:
            # default id changed
            if integration["id"] == "default-navigator":
                integration["id"] = "arcgis-navigator"
            v2_project.integrations.add(
                integration_id=integration["id"],
                prompt=integration["prompt"],
                url_template=integration["urlTemplate"])
    logger.info("Integrations migrated successfully")

    # Get rid of old URL patterns
    integrations = v2_project.integrations.search()
    generate_universal_links(integrations)

    # Migrate Webmaps - Retain non-WF layers
    logger.info("Migrating Webmaps")
    upgrade_webmaps(project.worker_webmap, v2_project.worker_webmap)
    upgrade_webmaps(project.dispatcher_webmap, v2_project.dispatcher_webmap)
    logger.info("Script Completed")
Exemplo n.º 14
0
def to_featureset(df):
    """converts a pd.DataFrame to a FeatureSet Object"""
    if hasattr(df, 'spatial'):
        fs = df.spatial.__feature_set__
        return FeatureSet.from_dict(fs)
    return None
Exemplo n.º 15
0
def get_dataframe(in_features, gis=None):
    """
    Get a spatially enabled dataframe from the input features provided.
    :param in_features: Spatially Enabled Dataframe | String path to Feature Class | pathlib.Path object to feature
        class | ArcGIS Layer object |String url to Feature Service | String Web GIS Item ID
        Resource to be evaluated and converted to a Spatially Enabled Dataframe.
    :param gis: Optional GIS object instance for connecting to resources.
    """
    # if a path object, convert to a string for following steps to work correctly
    in_features = str(in_features) if isinstance(in_features, pathlib.Path) else in_features

    # helper for determining if feature layer
    def _is_feature_layer(in_ftrs):
        if hasattr(in_ftrs, 'isFeatureLayer'):
            return in_ftrs.isFeatureLayer
        else:
            return False

    # if already a Spatially Enabled Dataframe, mostly just pass it straight through
    if isinstance(in_features, pd.DataFrame) and in_features.spatial.validate() is True:
        df = in_features

    # if a csv previously exported from a Spatially Enabled Dataframe, get it in
    elif isinstance(in_features, str) and os.path.exists(in_features) and in_features.endswith('.csv'):
        df = pd.read_csv(in_features)
        df['SHAPE'] = df['SHAPE'].apply(lambda geom: Geometry(eval(geom)))

        # this almost always is the index written to the csv, so taking care of this
        if df.columns[0] == 'Unnamed: 0':
            df = df.set_index('Unnamed: 0')
            del (df.index.name)

    # create a Spatially Enabled Dataframe from the direct url to the Feature Service
    elif isinstance(in_features, str) and in_features.startswith('http'):

        # submitted urls can be lacking a few essential pieces, so handle some contingencies with some regex matching
        regex = re.compile(r'((^https?://.*?)(/\d{1,3})?)\?')
        srch = regex.search(in_features)

        # if the layer index is included, still clean by dropping any possible trailing url parameters
        if srch.group(3):
            in_features = f'{srch.group(1)}'

        # ensure at least the first layer is being referenced if the index was forgotten
        else:
            in_features = f'{srch.group(2)}/0'

            # if the layer is unsecured, a gis is not needed, but we have to handle differently
        if gis is not None:
            df = FeatureLayer(in_features, gis).query(out_sr=4326, as_df=True)
        else:
            df = FeatureLayer(in_features).query(out_sr=4326, as_df=True)

    # create a Spatially Enabled Dataframe from a Web GIS Item ID
    elif isinstance(in_features, str) and len(in_features) == 32:

        # if publicly shared on ArcGIS Online this anonymous gis can be used to access the resource
        if gis is None:
            gis = GIS()
        itm = gis.content.get(in_features)
        df = itm.layers[0].query(out_sr=4326, as_df=True)

    elif isinstance(in_features, (str, pathlib.Path)):
        df = GeoAccessor.from_featureclass(in_features)

    # create a Spatially Enabled Dataframe from a Layer
    elif _is_feature_layer(in_features):
        df = FeatureSet.from_json(arcpy.FeatureSet(in_features).JSON).sdf

    # sometimes there is an issue with modified or sliced dataframes with the SHAPE column not being correctly
    #    recognized as a geometry column, so try to set it as the geometry...just in case
    elif isinstance(in_features, pd.DataFrame) and 'SHAPE' in in_features.columns:
        in_features.spatial.set_geometry('SHAPE')
        df = in_features

        if df.spatial.validate() is False:
            raise Exception('Could not process input features for get_dataframe function. Although the input_features '
                            'appear to be in a Pandas Dataframe, the SHAPE column appears to not contain valid '
                            'geometries. The Dataframe is not validating using the *.spatial.validate function.')

    else:
        raise Exception('Could not process input features for get_dataframe function.')

    # ensure the universal spatial column is correctly being recognized
    df.spatial.set_geometry('SHAPE')

    return df
Exemplo n.º 16
0
def main(arguments):  # noqa: C901
    # Initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the old workforce project
    item = gis.content.get(arguments.classic_project_id)
    project = workforce.Project(item)
    try:
        if project._is_v2_project:
            raise Exception(
                "The first project provided is a v2 project. Please migrate assignment data from v1 projects"
            )
    except AttributeError:
        raise Exception(
            "Cannot find the attribute is v2 project. Are you sure you have the API version 1.8.3 or greater installed? "
            "Check with `arcgis.__version__` in your Python console")

    # Get new workforce project
    v2_project = workforce.Project(gis.content.get(arguments.new_project_id))
    if not v2_project._is_v2_project:
        raise Exception(
            "The second project provided is a v1 project. Please migrate assignment data to v2 projects"
        )

    # validate correct assignment types are present
    existing_assignment_types = project.assignment_types.search()
    for assignment_type in existing_assignment_types:
        if not v2_project.assignment_types.get(name=assignment_type.name):
            raise Exception(
                "One of your assignment types in your classic project is not in your offline project"
            )

    # validate correct workers are present
    for worker in project.workers.search():
        if not v2_project.workers.get(user_id=worker.user_id):
            raise Exception(
                "One of your workers in your classic project is not in your offline project"
            )

    # Migrate Assignments
    logger.info("Migrating assignments")
    assignment_ghost = False

    # Get Existing Assignments
    existing_assignments = project.assignments_layer.query(
        where=arguments.where, return_all_records=True).features
    assignments_to_add = []
    layer = v2_project.assignments_layer

    # Set Custom Fields for Assignments and Templates
    custom_fields = add_custom_fields(project.assignments_layer, layer)

    # Prepare Assignments to be Added
    for assignment in existing_assignments:
        if assignment.attributes[project._assignment_schema.assignment_type]:

            # set attributes in case they are empty
            assignment_location = (str(assignment.geometry["x"]) + " " + str(assignment.geometry["y"])) if \
                assignment.attributes[project._assignment_schema.location] is None else \
                assignment.attributes[project._assignment_schema.location]
            assignment_status = 0 if assignment.attributes[project._assignment_schema.status] is None else \
                assignment.attributes[project._assignment_schema.status]
            assignment_priority = 0 if assignment.attributes[project._assignment_schema.priority] is None else \
                assignment.attributes[project._assignment_schema.priority]

            # get AT name based on code stored
            assignment_type_name = ""
            for at in existing_assignment_types:
                if at.code == assignment.attributes[
                        project._assignment_schema.assignment_type]:
                    assignment_type_name = at.name
                    break

            # Set attributes
            attributes = {
                v2_project._assignment_schema.status:
                assignment_status,
                v2_project._assignment_schema.notes:
                assignment.attributes[project._assignment_schema.notes],
                v2_project._assignment_schema.priority:
                assignment_priority,
                v2_project._assignment_schema.assignment_type:
                get_assignment_type_global_id(
                    v2_project.assignment_types.search(),
                    assignment_type_name),
                v2_project._assignment_schema.work_order_id:
                assignment.attributes[
                    project._assignment_schema.work_order_id],
                v2_project._assignment_schema.due_date:
                assignment.attributes[project._assignment_schema.due_date],
                v2_project._assignment_schema.description:
                assignment.attributes[project._assignment_schema.description],
                v2_project._assignment_schema.worker_id:
                get_worker_global_id(
                    project.workers.search(), v2_project.workers,
                    assignment.attributes[
                        project._assignment_schema.worker_id]),
                v2_project._assignment_schema.location:
                assignment_location,
                v2_project._assignment_schema.declined_comment:
                assignment.attributes[
                    project._assignment_schema.declined_comment],
                v2_project._assignment_schema.assigned_date:
                assignment.attributes[
                    project._assignment_schema.assigned_date],
                v2_project._assignment_schema.in_progress_date:
                assignment.attributes[
                    project._assignment_schema.in_progress_date],
                v2_project._assignment_schema.completed_date:
                assignment.attributes[
                    project._assignment_schema.completed_date],
                v2_project._assignment_schema.declined_date:
                assignment.attributes[
                    project._assignment_schema.declined_date],
                v2_project._assignment_schema.paused_date:
                assignment.attributes[project._assignment_schema.paused_date],
                v2_project._assignment_schema.dispatcher_id:
                get_dispatcher_global_id(
                    project.dispatchers.search(), v2_project.dispatchers,
                    assignment.attributes[
                        project._assignment_schema.dispatcher_id]),
                v2_project._assignment_schema.global_id:
                assignment.attributes[project._assignment_schema.global_id],
                v2_project._assignment_schema.object_id:
                assignment.attributes[project._assignment_schema.object_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = assignment.attributes[
                    field["name"]]
            feature = Feature(geometry=assignment.geometry,
                              attributes=attributes)
            assignments_to_add.append(feature)
        else:
            logger.info(
                "One assignment's migration skipped - does not have an assignment type"
            )
            assignment_ghost = True

    # Add Assignments
    layer.edit_features(adds=FeatureSet(assignments_to_add),
                        use_global_ids=True)
    new_assignments = v2_project.assignments_layer.query(
        "1=1", return_all_records=True).features
    # skip validation if there's a ghost
    if (len(new_assignments) == len(existing_assignments)) or assignment_ghost:
        logger.info("Assignments successfully migrated")
    else:
        raise Exception("Assignments not migrated successfully. Unknown error")

    # Migrate Attachments
    logger.info("Migrating Attachments")
    for assignment in existing_assignments:
        object_id = assignment.attributes[project._assignment_schema.object_id]
        new_assignment_object_id = v2_project.assignments.get(
            global_id=assignment.attributes[
                project._assignment_schema.global_id]).object_id
        if len(project.assignments_layer.attachments.get_list(object_id)) > 0:
            try:
                with tempfile.TemporaryDirectory() as dirpath:
                    paths = project.assignments_layer.attachments.download(
                        oid=object_id, save_path=dirpath)
                    for path in paths:
                        v2_project.assignments_layer.attachments.add(
                            oid=new_assignment_object_id, file_path=path)
            except Exception as e:
                logger.info(e)
                logger.info(
                    "Skipping migration of this attachment. It did not download successfully"
                )
    if len(project.assignments_layer.attachments.search("1=1")) == len(
            v2_project.assignments_layer.attachments.search("1=1")):
        logger.info("Attachments successfully migrated")
    else:
        logger.info(
            "Not all of your attachments migrated successfully. Continuing with migration"
        )
    logger.info("Script Completed")
def create_buffers(input_layer,
                   distance=1,
                   distance_unit="Miles",
                   field=None,
                   method="Planar",
                   dissolve_option="None",
                   dissolve_fields=None,
                   summary_fields=None,
                   multipart=False,
                   output_name=None,
                   context=None,
                   gis=None,
                   future=False):
    """

    .. image:: _static/images/create_buffers_geo/create_buffers_geo.png

    Buffers are typically used to create areas that can be further analyzed
    using other tools such as ``aggregate_points``. For example, ask the question,
    "What buildings are within one mile of the school?" The answer can be found
    by creating a one-mile buffer around the school and overlaying the buffer
    with the layer containing building footprints. The end result is a layer
    of those buildings within one mile of the school.

    ================================================    =========================================================
    **Parameter**                                       **Description**
    ------------------------------------------------    ---------------------------------------------------------
    input_layer                                         Required layer. The point, line, or polygon features to be buffered.
                                                        See :ref:`Feature Input<gaxFeatureInput>`.
    ------------------------------------------------    ---------------------------------------------------------
    distance (Required if field is not provided)        Optional float. A float value used to buffer the input features.
                                                        You must supply a value for either the distance or field parameter.
                                                        You can only enter a single distance value. The units of the
                                                        distance value are supplied by the ``distance_unit`` parameter.

                                                        The default value is 1.
    ------------------------------------------------    ---------------------------------------------------------
    distance_unit (Required if distance is used)        Optional string. The linear unit to be used with the value specified in distance.

                                                        Choice list:['Feet', 'Yards', 'Miles', 'Meters', 'Kilometers', 'NauticalMiles']

                                                        The default value is "Miles"
    ------------------------------------------------    ---------------------------------------------------------
    field (Required if distance not provided)           Optional string. A field on the ``input_layer`` containing a buffer distance or a field expression.
                                                        A buffer expression must begin with an equal sign (=). To learn more about buffer expressions
                                                        see: `Buffer Expressions <https://developers.arcgis.com/rest/services-reference/bufferexpressions.htm>`_
    ------------------------------------------------    ---------------------------------------------------------
    method                                              Optional string. The method used to apply the buffer with. There are two methods to choose from:

                                                        Choice list:['Geodesic', 'Planar']

                                                        * ``Planar`` - This method applies a Euclidean buffers and is appropriate for local analysis on projected data. This is the default.
                                                        * ``Geodesic`` - This method is appropriate for large areas and any geographic coordinate system.
    ------------------------------------------------    ---------------------------------------------------------
    dissolve_option                                     Optional string. Determines how output polygon attributes are processed.

                                                        Choice list:['All', 'List', 'None']

                                                        +----------------------------------+---------------------------------------------------------------------------------------------------+
                                                        |Value                             | Description                                                                                       |
                                                        +----------------------------------+---------------------------------------------------------------------------------------------------+
                                                        | All - All features are dissolved | You can calculate summary statistics and determine if you want multipart or single part features. |
                                                        | into one feature.                |                                                                                                   |
                                                        +----------------------------------+---------------------------------------------------------------------------------------------------+
                                                        | List - Features with the same    | You can calculate summary statistics and determine if you want multipart or single part features. |
                                                        | value in the specified field     |                                                                                                   |
                                                        | will be dissolve together.       |                                                                                                   |
                                                        +----------------------------------+---------------------------------------------------------------------------------------------------+
                                                        | None - No features are dissolved.| There are no additional dissolve options.                                                         |
                                                        +----------------------------------+---------------------------------------------------------------------------------------------------+
    ------------------------------------------------    ---------------------------------------------------------
    dissolve_fields                                     Specifies the fields to dissolve on. Multiple fields may be provided.
    ------------------------------------------------    ---------------------------------------------------------
    summary_fields                                      Optional string. A list of field names and statistical summary types
                                                        that you want to calculate for resulting polygons. Summary statistics
                                                        are only available if dissolveOption = List or All. By default, all
                                                        statistics are returned.

                                                        Example: [{"statisticType": "statistic type", "onStatisticField": "field name"}, ..}]

                                                        fieldName is the name of the fields in the input point layer.

                                                        statisticType is one of the following for numeric fields:

                                                            * ``Count`` - Totals the number of values of all the points in each polygon.
                                                            * ``Sum`` - Adds the total value of all the points in each polygon.
                                                            * ``Mean`` - Calculates the average of all the points in each polygon.
                                                            * ``Min`` - Finds the smallest value of all the points in each polygon.
                                                            * ``Max`` - Finds the largest value of all the points in each polygon.
                                                            * ``Range`` - Finds the difference between the Min and Max values.
                                                            * ``Stddev`` - Finds the standard deviation of all the points in each polygon.
                                                            * ``Var`` - Finds the variance of all the points in each polygon.

                                                        statisticType is the following for string fields:

                                                            * ``Count`` - Totals the number of strings for all the points in each polygon.
                                                            * ``Any`` - Returns a sample string of a point in each polygon.

    ------------------------------------------------    ---------------------------------------------------------
    multipart                                           Optional boolean. Determines if output features are multipart or single part.
                                                        This option is only available if a ``dissolve_option`` is applied.
    ------------------------------------------------    ---------------------------------------------------------
    output_name                                         Optional string. The task will create a feature service of the results. You define the name of the service.
    ------------------------------------------------    ---------------------------------------------------------
    gis                                                 Optional, the GIS on which this tool runs. If not specified, the active GIS is used.
    ------------------------------------------------    ---------------------------------------------------------
    context                                             Optional dict. The context parameter contains additional settings that affect task execution. For this task, there are four settings:

                                                        #. Extent (``extent``) - A bounding box that defines the analysis area. Only those features that intersect the bounding box will be analyzed.
                                                        #. Processing spatial reference (``processSR``) - The features will be projected into this coordinate system for analysis.
                                                        #. Output spatial reference (``outSR``) - The features will be projected into this coordinate system after the analysis to be saved. The output spatial reference for the spatiotemporal big data store is always WGS84.
                                                        #. Data store (``dataStore``) - Results will be saved to the specified data store. The default is the spatiotemporal big data store.
    ------------------------------------------------    ---------------------------------------------------------
    future                                              Optional boolean. If 'True', the value is returned as a GPJob.

                                                        The default value is 'False'
    ================================================    =========================================================

    :returns: Output Features as a feature layer collection item

    .. code-block:: python

            # Usage Example: To create buffer based on distance field.

            buffer = create_buffers(input_layer=lyr,
                                    field='dist',
                                    method='Geodesic',
                                    dissolve_option='All',
                                    dissolve_fields='Date')
    """
    kwargs = locals()


    gis = _arcgis.env.active_gis if gis is None else gis
    url = gis.properties.helperServices.geoanalytics.url

    if isinstance(input_layer, FeatureCollection) and \
       'layers' in input_layer.properties and \
       len(input_layer.properties.layers) > 0:
        input_layer = _FeatureSet.from_dict(
            featureset_dict=input_layer._lazy_properties.layers[0].featureSet)

    params = {}
    for key, value in kwargs.items():
        if key != 'field':
            if value is not None:
                params[key] = value
        else:
            params['distance'] = None
            params['distance_unit'] = None

    if output_name is None:
        output_service_name = 'Create Buffers Analysis_' + _id_generator()
        output_name = output_service_name.replace(' ', '_')
    else:
        output_service_name = output_name.replace(' ', '_')

    output_service = _create_output_service(gis, output_name, output_service_name, 'Create Buffers')

    params['output_name'] = _json.dumps({
        "serviceProperties": {"name" : output_name, "serviceUrl" : output_service.url},
        "itemProperties": {"itemId" : output_service.itemid}})

    if context is not None:
        params["context"] = context
    else:
        _set_context(params)

    param_db = {
        "input_layer": (_FeatureSet, "inputLayer"),
        "distance": (float, "distance"),
        "distance_unit": (str, "distanceUnit"),
        "field": (str, "field"),
        "method": (str, "method"),
        "dissolve_option": (str, "dissolveOption"),
        "dissolve_fields": (str, "dissolveFields"),
        "summary_fields": (str, "summaryFields"),
        "multipart": (bool, "multipart"),
        "output_name": (str, "outputName"),
        "context": (str, "context"),
        "output": (_FeatureSet, "Output Features"),
    }
    return_values = [
        {"name": "output", "display_name": "Output Features", "type": _FeatureSet},
    ]

    try:
        if future:
            gpjob = _execute_gp_tool(gis, "CreateBuffers", params, param_db, return_values, _use_async, url, True, future=future)
            return GAJob(gpjob=gpjob, return_service=output_service)
        _execute_gp_tool(gis, "CreateBuffers", params, param_db, return_values, _use_async, url, True, future=future)
        return output_service
    except:
        output_service.delete()
        raise
Exemplo n.º 18
0
    def __getPointFeatureSet(self, points: List[Point]) -> FeatureSet:
        featureList = [Feature(point) for point in points]

        return FeatureSet(featureList,
                          geometry_type="esriGeometryMultipoint",
                          spatial_reference={'wkid': 4326})
Exemplo n.º 19
0
def create_buffers(
    input_layer,
    distance = 1,
    distance_unit = "Miles",
    field = None,
    method = """Planar""",
    dissolve_option = """None""",
    dissolve_fields = None,
    summary_fields = None,
    multipart = False,
    output_name = None,
    context = None,
    gis = None):
    """

    A buffer is an area that covers a given distance from a point, line, or polygon feature.

    Buffers are typically used to create areas that can be further analyzed using other tools. For example, if the question is What buildings are within 1 mile of the school?, the answer can be found by creating a 1-mile buffer around the school and overlaying the buffer with the layer containing building footprints. The end result is a layer of those buildings within 1 mile of the school.

    For example

    * Using linear river features, buffer each river by 50 times the width of the river to determine a proposed riparian boundary.

    * Given areas representing countries, buffer each country by 200 nautical miles to determine the maritime boundary.



Parameters:

   input_layer: Input Features (_FeatureSet). Required parameter.

   distance: Buffer Distance (float). Optional parameter.

   distance_unit: Buffer Distance Unit (str). Optional parameter.
      Choice list:['Feet', 'Yards', 'Miles', 'Meters', 'Kilometers', 'NauticalMiles']

   field: Buffer Distance Field (str). Optional parameter.

   method: Method (str). Required parameter.
      Choice list:['Geodesic', 'Planar']

   dissolve_option: Dissolve Option (str). Optional parameter.
      Choice list:['All', 'List', 'None']

   dissolve_fields: Dissolve Fields (str). Optional parameter.

   summary_fields: Summary Statistics (str). Optional parameter.

   multipart: Allow Multipart Geometries (bool). Optional parameter.

   output_name: Output Features Name (str). Required parameter.


   gis: Optional, the GIS on which this tool runs. If not specified, the active GIS is used.


Returns:
   output - Output Features as a feature layer collection item


    """
    kwargs = locals()


    gis = _arcgis.env.active_gis if gis is None else gis
    url = gis.properties.helperServices.geoanalytics.url

    if isinstance(input_layer, FeatureCollection) and \
       'layers' in input_layer.properties and \
       len(input_layer.properties.layers) > 0:
        input_layer = _FeatureSet.from_dict(
            featureset_dict=input_layer._lazy_properties.layers[0].featureSet)

    params = {}
    for key, value in kwargs.items():
        if value is not None:
            params[key] = value

    if output_name is None:
        output_service_name = 'Create Buffers Analysis_' + _id_generator()
        output_name = output_service_name.replace(' ', '_')
    else:
        output_service_name = output_name.replace(' ', '_')

    output_service = _create_output_service(gis, output_name, output_service_name, 'Create Buffers')

    params['output_name'] = _json.dumps({
        "serviceProperties": {"name" : output_name, "serviceUrl" : output_service.url},
        "itemProperties": {"itemId" : output_service.itemid}})

    _set_context(params)
    param_db = {
        "input_layer": (_FeatureSet, "inputLayer"),
        "distance": (float, "distance"),
        "distance_unit": (str, "distanceUnit"),
        "field": (str, "field"),
        "method": (str, "method"),
        "dissolve_option": (str, "dissolveOption"),
        "dissolve_fields": (str, "dissolveFields"),
        "summary_fields": (str, "summaryFields"),
        "multipart": (bool, "multipart"),
        "output_name": (str, "outputName"),
        "context": (str, "context"),
        "output": (_FeatureSet, "Output Features"),
    }
    return_values = [
        {"name": "output", "display_name": "Output Features", "type": _FeatureSet},
    ]

    try:
        _execute_gp_tool(gis, "CreateBuffers", params, param_db, return_values, _use_async, url, True)
        return output_service
    except:
        output_service.delete()
        raise
Exemplo n.º 20
0
def _get_nearest_gis(origin_dataframe: pd.DataFrame,
                     destination_dataframe: pd.DataFrame,
                     source: [str, Country, GIS],
                     origin_id_column: str = 'LOCNUM',
                     destination_id_column: str = 'LOCNUM',
                     destination_count: int = 4) -> pd.DataFrame:
    """Web GIS implementation of get nearest solution."""

    # TODO: backport these to be optional input parameters
    return_geometry = True
    output_spatial_reference = 4326

    # build the spatial reference dict
    out_sr = {'wkid': output_spatial_reference}

    # if a country instance, get the GIS object from it
    if isinstance(source, Country):
        assert isinstance(Country.source, GIS), 'The source Country must be reference an ArcGIS Web GIS object ' \
                                                'instance to solve using a GIS.'
        source = Country.source

    # run a couple of checks to make sure we do not encounter strange errors later
    assert isinstance(source, GIS), 'The source must be a GIS object instance.'
    assert utils.has_networkanalysis_gis(source.users.me), 'You must have the correct permissions in the Web GIS to ' \
                                                           'perform routing solves. It appears you do not.'

    # prep the datasets for routing
    origin_fs = _prep_sdf_for_nearest(
        origin_dataframe, origin_id_column).spatial.to_featureset().to_dict()
    dest_fs = _prep_sdf_for_nearest(
        destination_dataframe,
        destination_id_column).spatial.to_featureset().to_dict()

    # create the url for doing routing
    route_url = source.properties.helperServices.route.url
    solve_url = '/'.join(
        route_url.split('/')[:-1]) + '/ClosestFacility/solveClosestFacility'

    # construct the payload for the routing solve
    params = {
        'incidents': origin_fs,
        'facilities': dest_fs,
        'returnCFRoutes': True,
        'f': 'json',
        'defaultTargetFacilityCount': destination_count,
        'outputLines': 'esriNAOutputLineTrueShape'
        if return_geometry else 'esriNAOutputLineNone',
        'outSR': out_sr
    }

    # call the server for the solve
    res = source._con.post(solve_url, params)

    # unpack the results from the response
    route_df = FeatureSet.from_dict(res['routes']).sdf

    # clean up any empty columns
    notna_srs = route_df.isna().all()
    drop_cols = notna_srs[notna_srs].index.values
    route_df.drop(columns=drop_cols, inplace=True)

    # populate the origin and destination id columns so the output will be as expected
    id_srs = route_df['Name'].str.split(' - ')
    route_df['IncidentID'] = id_srs.apply(lambda val: val[0])
    route_df['FacilityID'] = id_srs.apply(lambda val: val[1])

    return route_df