Ejemplo n.º 1
0
def portal_name():

    portal_desc = arcpy.GetPortalDescription()

    if 'user' in portal_desc:
        if 'username' in portal_desc['user']:
            username = portal_desc['user']['username']
        else:
            username = None
    else:
        username = None

    return username
Ejemplo n.º 2
0
def portal_info():

    portal_desc = arcpy.GetPortalDescription()

    if 'user' in portal_desc:
        if 'username' in portal_desc['user']:
            username = portal_desc['user']['username']
        else:
            username = None
    else:
        username = None

    if 'allSSL' in portal_desc and 'portalHostname' in portal_desc:
        if portal_desc['allSSL'] is True:
            portal_url = "https://" + portal_desc['portalHostname']
        else:
            portal_url = "http://" + portal_desc['portalHostname']
    else:
        portal_url = None

    return (username, portal_url)
 # Print out facilities that failed to geocode
 if not bad_geo.empty:
     print(f'Number of facilities that failed to geocode:  {bad_geo.shape[0]}')
     print('Failed facilities:')
     output = [print(f'    {row[0]}:  {row[1]}, {row[2]}, {row[3]}, {row[4]}') for row in bad_geo[['UniqueID', 'Facility_Name', 'Address', 'City', 'ZIP_Code']].to_numpy()]
 else:
     print(f'\n All facilities ({good_geo.shape[0]}) were successfully geocoded! \n')
 
 # Prompt user to continue or abort
 resp = input("Would you like to continue?    (y/n) \n")
 if resp.lower() == 'n':
     sys.exit(0)
 
 # Append successfully geocoded facilities to LTCF_Data feature layer
 # Get AGOL username
 desc = arcpy.GetPortalDescription()
 username = desc['user']['username']
 
 insert_fields = ['UniqueID', 'Facility_Name', 'Address',
                 'City', 'ZIP_Code', 'Facility_Type', 'LHD',
                 'Resolved_Y_N', 'Date_Resolved', 'Notification_Date', 'Positive_Patients',
                 'Deceased_Patients', 'Positive_HCWs', 'CreationDate', 'Creator',
                 'EditDate', 'Editor', 'SHAPE@XY']
     
 def insert_row(row):
     xy = (row['x'], row['y'])
     values = [row['UniqueID'],
               row['Facility_Name'],
               row['Address'],
               row['City'],
               row['ZIP_Code'],
Ejemplo n.º 4
0
import pyodbc

import arcpy
import logging

import ec_arcpy_util
import ec_sql_server_util

logging.basicConfig(format="%(asctime)s %(levelname)s:%(message)s",
                    filename="load_addresses.log",
                    filemode="w",
                    level=logging.DEBUG,
                    datefmt="%m/%d/%Y %I:%M:? %p")

portal_url = arcpy.GetActivePortalURL()
portal_desc = arcpy.GetPortalDescription(portal_url)
def main(**inputs):  # pylint:disable = too-many-locals, too-many-statements, too-many-branches
    """Preprocess inputs, compute OD cost matrix and postprocess outputs."""
    # Create the output workspace
    out_gdb_name = "outputs"
    out_gdb = os.path.join(inputs["output_folder"], out_gdb_name + ".gdb")
    if not os.path.exists(out_gdb):
        arcpy.management.CreateFileGDB(inputs["output_folder"], out_gdb_name)

    # Preprocess inputs
    pp_origins = ODCostMatrix.preprocess_inputs(inputs["origins"], inputs["network_data_source"], inputs["travel_mode"],
                                                out_gdb)
    pp_destinations = ODCostMatrix.preprocess_inputs(inputs["destinations"], inputs["network_data_source"],
                                                     inputs["travel_mode"], out_gdb)

    inputs["origins"] = pp_origins
    inputs["destinations"] = pp_destinations

    # Store count of input origins and destinations
    origins_count = int(arcpy.management.GetCount(inputs["origins"]).getOutput(0))
    destinations_count = int(arcpy.management.GetCount(inputs["destinations"]).getOutput(0))

    # Determine if working with online or enterprise portal
    network_data_source = inputs["network_data_source"]
    is_agol = False
    portal_desc = {}
    if ODCostMatrix.is_nds_service(network_data_source):
        logger.debug("Getting information from the portal")
        portal_desc = arcpy.GetPortalDescription(network_data_source)
        inputs["portal_description"] = portal_desc
        is_agol = not portal_desc["isPortal"]

    # Get iterables for the inputs
    if is_agol:
        # Get the max origins and max destinations if working with AGOL
        tool_limits = ODCostMatrix.get_tool_limits(network_data_source)
        max_origins = int(tool_limits["maximumOrigins"])
        max_destinations = int(tool_limits["maximumDestinations"])
        # Chunk origin and destination OID ranges based on max origins and max destinations
        ranges = ODCostMatrix.get_oid_ranges_agol(origins_count, destinations_count, max_origins, max_destinations)
        # Adjust properties specific to working with AGOL service.
        inputs["workers"] = min(4, inputs["workers"])
        inputs["max_od_size"] = max_origins * max_destinations
    else:
        ranges = ODCostMatrix.get_oid_ranges(origins_count, destinations_count, inputs["max_od_size"])

    inputs_iter = itertools.repeat(inputs)

    # Compute OD cost matrix
    od_line_fcs = []
    job_folders_to_delete = []
    # Run on multiple processes when solving large ODs
    if origins_count * destinations_count > inputs["max_od_size"]:
        with futures.ProcessPoolExecutor(max_workers=inputs["workers"]) as executors:
            results = executors.map(solve_od_cost_matrix, inputs_iter, ranges)
            # Compute a list of od results from each iteration if the solve is successful.
            for result in results:
                if result["solveSucceeded"]:
                    od_line_fcs.append(result["outputLines"])
                    job_folders_to_delete.append(result["jobFolder"])
                else:
                    logger.warning("Solve failed for job id %s", result["jobId"])
                    logger.debug(result["solveMessages"])
    else:
        result = solve_od_cost_matrix(inputs, [(1, origins_count), (1, destinations_count)])
        if result["solveSucceeded"]:
            od_line_fcs.append(result["outputLines"])
            job_folders_to_delete.append(result["jobFolder"])
        else:
            logger.warning("Solve failed for job id %s", result["jobId"])
            logger.debug(result["solveMessages"])

    # Merge individual OD matrix feature classes into a single feature class
    if od_line_fcs:
        output_od_name = "od_" + pp_origins[-7:-2]
        output_fc = arcpy.CreateUniqueName(output_od_name, out_gdb)
        logger.info("Merging results to %s", output_fc)
        result = arcpy.management.Merge(od_line_fcs, output_fc)
        logger.debug(result.getMessages().split("\n")[-1])

    # Cleanup
    # Delete the job folders if the job succeeded
    for folder in job_folders_to_delete:
        logger.debug("Deleting %s", folder)
        shutil.rmtree(folder, ignore_errors=True)

    # Delete the preprocessed inputs
    arcpy.management.Delete(pp_origins)
    arcpy.management.Delete(pp_destinations)