def execute(self, parameters, messages): """The source code of the tool.""" arcpy.SetProgressor("default", message="Accesing to a destinational resouse") # Acessing outpud data token = arcpy.GetSigninToken() portal_url = arcpy.GetActivePortalURL() gis = GIS(portal_url, token=token['token']) layer = FeatureLayer(parameters[0].valueAsText) arcpy.SetProgressorLabel("Prepearing input data") #Prepearing input data feature_set = arcpy.FeatureSet(parameters[1].valueAsText) feature_set_dict = json.loads(feature_set.JSON) # Matching parameter matching = parameters[2].value # Split features by number of threads list_of_lists = chunkIt(feature_set_dict['features'], parameters[3].value) # List of threads threads = [] arcpy.SetProgressorLabel("Starting threads") # Starting threads for feature_list in list_of_lists: threads.append( Thread(target=create_and_append, args=[ feature_list, arcpy.GetSigninToken(), portal_url, parameters[0].valueAsText, matching ])) threads[-1].start() # Joining all threads arcpy.SetProgressorLabel("Executing appendence") for thread in threads: thread.join() return
def checkServiceCapabilities(pth, checklist): res = False if pth == None: dla.addMessage('Error: No path available for layer') return False if checkLayerIsService(pth): url = pth if isFeatureLayerUrl(url): data = arcpy.GetSigninToken() token = data['token'] name = getServiceName(url) if name != None: # checklist is a list like: ['Create','Delete'] res = hasCapabilities(url, token, checklist) if res != True: dla.addMessage('WARNING: ' + name + ' does not have ' + '+'.join(checklist) + ' privileges') dla.addMessage('Verify the service properties for: ' + url) dla.addMessage( 'This tool will not run until this is addressed') return res else: dla.addMessage("Unable to retrieve Service name from the url") return res else: dla.addMessage( pth + ' Does not appear to be a feature service layer, exiting. Check that you selected a layer not a service' ) return False else: return None # if it's not a service return None
def create_and_append(feature_list=None, token=None, portal_url=None, service_url=None, matching=None): token = arcpy.GetSigninToken() portal_url = arcpy.GetActivePortalURL() gis = GIS(portal_url, token=token['token']) layer = FeatureLayer(service_url) features_to_append = [] for feature in feature_list: new_feature = {'attributes': {}, 'geometry': feature['geometry']} #Find fields and it's value for field in matching: new_feature['attributes'][field[0]] = [ x[1] for x in feature['attributes'].items() if x[0] == field[1] ][0] features_to_append.append(new_feature.copy()) if len(features_to_append) > 500: result = layer.edit_features(adds=features_to_append) features_to_append = [] if features_to_append: layer.edit_features(adds=features_to_append)
def get_feature_service_list( ): #functions to get Feature layer list from portal # Authentification on active portal using Python API (arcpy lib) token = arcpy.GetSigninToken() portal_url = arcpy.GetActivePortalURL() gis = GIS(portal_url, token=token['token']) # Get content of the Portal and get url's of layers and tables #content = gis.content search_results = gis.content.search(query='', item_type='Feature Service', max_items=200) layers_list = [] #Only layers not tables for itm in search_results: try: layers_list.extend([x.url for x in itm.layers]) except TypeError: pass return layers_list
def checkServiceCapabilities(sourcePath, required): ## Added May2016. Ensure that feature layer has been added and warn if capabilities are not available if sourcePath == None: addMessage('Error: No path available for layer') return False addMessage('Checking: ' + sourcePath) if checkLayerIsService(sourcePath): url = getLayerSourceUrl(sourcePath) if isFeatureLayerUrl(url): data = arcpy.GetSigninToken() token = data['token'] name = getServiceName(url) #print('Service',name) res = hasCapabilities(url, token, ['Create', 'Delete']) if res != True and required == False: addMessage('WARNING: ' + name + ' does not have Create and Delete privileges') addMessage('Verify the service properties for: ' + url) addMessage( 'This tool might continue but other tools will not run until this is addressed' ) elif res != True and required == True: addError('WARNING: ' + name + ' does not have Create and Delete privileges') addMessage('Verify the service properties for: ' + url) addMessage('This tool will not run until this is addressed') return res else: addMessage('Service does not appear to be a feature layer') return False else: return True
def token_login(self): """ Get a sign-in token generated from ArcPy. Arguments: None Returns: None """ # NOTE side-effects token_response = arcpy.GetSigninToken() if token_response and 'token' in token_response: self.token = token_response['token'] self.expiration = datetime.datetime.fromtimestamp( token_response['expires']) - datetime.timedelta(seconds=1) if self.debug: msg = 'Received token starting with ' + \ '"{}", valid for {} minutes.'.format( self.token[0:10], self.valid_for) arcpy.AddMessage(msg) # update base information with token self.information() self.login_method = 'token' else: arcpy.AddError("Unable to get signin token.") return
def process(sdeConnection, prefix, featureServiceUrl, timezone, portalUrl=None, username=None, password=None): '''Operations: 1) Query Feature Service endpoint for table names & IDs 2) Check for existing tables 3) If existing tables, get last synchronization time 4) CreateReplica a FGDB 5) Download the FGDB 6) If new, create the tables 7) Append ''' now = getUTCTimestamp(timezone) cleanupOperations = {} section = 'Beginning' try: section = 'Logging in to Survey' tokenTest = arcpy.GetSigninToken() token = None if tokenTest == None: token = getToken(username, password, portalUrl) else: token = tokenTest['token'] serviceInfo = getServiceDefinition(token, featureServiceUrl) if 'Sync' not in serviceInfo['capabilities']: arcpy.AddError('Sync Capabilities not enabled') raise Exception('Sync Capabilities not enabled') section = 'Checking Existing Data' existingTables = getSurveyTables(sdeConnection, prefix) lastSync = None if len(existingTables) > 0: lastSync = getLastSynchronizationTime(sdeConnection, existingTables) section = 'Downloading Survey' tempdir = tempfile.mkdtemp() #cleanupOperations['tempdir'] = tempdir surveyGDB = getReplica(token, featureServiceUrl, serviceInfo, now, outDir=tempdir, lastSync=lastSync) section = 'Preprocess Surveys for transfer' filterRecords(surveyGDB, now, lastSync) addTimeStamp(surveyGDB, now) addKeyFields(surveyGDB) if len(existingTables) == 0: section = 'Making Tables' createTables(surveyGDB, sdeConnection, prefix) cleanupOperations['createTables'] = True section = 'Updating Tables' cleanupOperations['append'] = existingTables appendTables(surveyGDB, sdeConnection, prefix) cleanupOperations.pop('append', None) cleanupOperations.pop('createTables', None) except Exception as e: FAIL(section, e) finally: #clean up cleanup(cleanupOperations, sdeConnection, prefix, now) return
def getSigninToken(): data = arcpy.GetSigninToken() token = None if data is not None: token = data['token'] #expires = data['expires'] #referer = data['referer'] else: arcpy.AddMessage( "Error: No token - Please sign in to ArcGIS Online or your Portal to continue" ) return token
def sign_into_portal(self): portal_config = self.config['portal'] portal_url = arcpy.GetActivePortalURL() if 'username' in portal_config and 'password' in portal_config: try: portal_info = arcpy.SignInToPortal(portal_url, portal_config['username'], portal_config['password']) token = portal_info['token'] except (ValueError, KeyError): return None elif 'app_id' in portal_config and 'refresh_token' in portal_config: try: payload = { 'client_id': portal_config['app_id'], 'refresh_token': portal_config['refresh_token'], 'grant_type': 'refresh_token' } req = requests.post(portal_url + '/sharing/rest/oauth2/token', data=payload, verify=False) req_json = req.json() token = req_json['access_token'] except (ValueError, KeyError): return None elif 'app_id' in portal_config and 'app_secret' in portal_config: try: payload = { 'client_id': portal_config['app_id'], 'client_secret': portal_config['app_secret'], 'grant_type': 'client_credentials' } req = requests.post(portal_url + '/sharing/rest/oauth2/token', data=payload, verify=False) req_json = req.json() token = req_json['access_token'] except (ValueError, KeyError): return None else: infos = arcpy.GetSigninToken() if infos: token = infos['token'] else: return None self.token = token return self.token
def main(argv=None): token = None for i in range(0, 90): arcpy.AddMessage("i:" + str(i)) data = arcpy.GetSigninToken() if data is not None: token = data['token'] expires = data['expires'] arcpy.AddMessage("i:" + str(i) + " expires:" + str(expires)) #referer = data['referer'] arcpy.AddMessage("Using current token") else: arcpy.AddMessage( "Error: No token - Please sign in to ArcGIS Online or your Portal to continue" ) username = "******" # Should never need this section in ArcGIS Pro... password = "******" #portal = arcpy.GetActivePortalURL() #portal = portal.replace('http:','https:') portal = 'https://www.arcgis.com' tokenUrl = portal + '/sharing/rest/generateToken' params = { 'f': 'pjson', 'username': username, 'password': password, 'referer': portal } #save if user/pass needed... #params = {'f': 'pjson', 'referer': portal} # assume signed in ... response = openRequest(tokenUrl, params) result = response.read().decode('utf-8') data = json.loads(result) token = data['token'] #return step = 300 arcpy.AddMessage("sleep " + str(step)) time.sleep(step)
def get_token(base_url: str) -> Optional[str]: """get token""" token: Optional[str] = None try: hosted_feature_service: bool = arcpy.GetParameter(0) ags_service: bool = arcpy.GetParameter(1) portal_url: str = arcpy.GetParameterAsText(2) username: str = arcpy.GetParameterAsText(7) password: str = arcpy.GetParameterAsText(8) if not (is_blank(username) or is_blank(password)): arcpy.AddMessage('\nGenerating Token\n') # Generate token for hosted feature service if hosted_feature_service: arcpy.SignInToPortal(portal_url, username, password) token = arcpy.GetSigninToken()['token'] # Generate token for AGS feature service elif ags_service: token = generate_token(base_url, username, password) if token is None: raise ValueError('Error generate token') except: raise Exception('Error generate token') return token
parser.add_argument('-u',dest ='username', help="The username to authenticate with") parser.add_argument('-p',dest='password', help="The password to authenticate with") parser.add_argument('-url', dest='org_url', help="The url of the org/portal to use") args = parser.parse_args() if any([args.username, args.password, args.org_url]) and not all([args.username, args.password, args.org_url]): arcpy.AddError("Must specify username, password, and org_url, missing at least one parameter.") sys.exit(-1) try: arcpy.AddMessage("Getting Token...") if args.username: token = get_token(args.org_url, args.username, args.password) org_url = args.org_url else: token_info = arcpy.GetSigninToken() if token_info is None: arcpy.AddError("You are not signed into a Portal or AGOL Organization.") sys.exit(-1) else: token = token_info['token'] org_url = arcpy.GetActivePortalURL() except Exception as e: arcpy.AddError("Error Getting Token. Check username, password, and url.") arcpy.AddError(e) sys.exit(-1) arcpy.AddMessage("Updating service definitions...") if isinstance(args.item_ids,list): for item_id in args.item_ids: try:
def getToken(self): token_dictionary = arcpy.GetSigninToken() return token_dictionary["token"]
def updateParameters(self, parameters): """Modify the values and properties of parameters before internal validation is performed. This method is called whenever a parameter has been changed.""" if not (parameters[0].altered and parameters[0].altered and parameters[0].altered ): # Refresh params if tool is reopened but not reloaded FeatureClassToFeatureLayerSyncClass.current_params = [ None, None, None ] if parameters[ 0].altered and FeatureClassToFeatureLayerSyncClass.current_params[ 0] != parameters[0].valueAsText: # Authentification on active portal using Python API (arcpy lib) token = arcpy.GetSigninToken() portal_url = arcpy.GetActivePortalURL() GIS(portal_url, token=token['token']) # Get the layer and it's properties layer = FeatureLayer(parameters[0].valueAsText) properties = layer.properties # Apply filter of feature type to input parameter # Form a pairs os types for filter applying list_of_types = { 'Point': "esriGeometryPoint", 'Multipoint': "esriGeometryMultipoint", 'Polyline': 'esriGeometryPolyline', 'Polygon': 'esriGeometryPolygon' } filter_type = [ x[0] for x in list_of_types.items() if x[1] == properties.geometryType ] # Define list filter parameters[1].filter.list = filter_type #Lists to populate fields to a matching table system_fields = [] # System fields with info about changes + Global and Object ID. Populates automaticaly try: system_fields.extend( [x[1] for x in properties.editFieldsInfo.items()]) except AttributeError: pass try: system_fields.append(properties.objectIdField) except AttributeError: pass try: system_fields.append(properties.globalIdField) except AttributeError: pass # Create a list of fields of utput feature class in table for creating matching schema parameters[2].values = [[x.name, ''] for x in layer.properties.fields if not x.name in system_fields] # Add a filter to create a dropdown list parameters[2].filters[0].list = [ x.name for x in layer.properties.fields if not x.name in system_fields ] #Add input fields to self for feaurute actions FeatureClassToFeatureLayerSyncClass.out_fields_desc = layer.properties #Show matching table ONLY if input and output params is set if parameters[1].value and parameters[0].value: parameters[2].enabled = True else: parameters[2].enabled = False FeatureClassToFeatureLayerSyncClass.current_params[0] = parameters[ 0].valueAsText if parameters[ 1].altered and FeatureClassToFeatureLayerSyncClass.current_params[ 1] != parameters[1].valueAsText: description = arcpy.Describe(parameters[1].valueAsText) field_count = description.fieldInfo.count field_list = [] for i in range(field_count): field_list.append(description.fieldInfo.getFieldName(i)) parameters[2].filters[1].list = field_list # Show matching table ONLY if input and output params is set if parameters[1].value and parameters[0].value: parameters[2].enabled = True else: parameters[2].enabled = False # Save parameter value to choose state FeatureClassToFeatureLayerSyncClass.current_params[1] = parameters[ 1].valueAsText # Save description for feaurute action FeatureClassToFeatureLayerSyncClass.in_fields_desc = arcpy.ListFields( parameters[1].valueAsText) FeatureClassToFeatureLayerSyncClass.layer_desc = description if parameters[ 2].altered and FeatureClassToFeatureLayerSyncClass.current_params[ 2] != parameters[2].valueAsText: FeatureClassToFeatureLayerSyncClass.current_params[2] = parameters[ 2].valueAsText return
def get_token(): global token token = arcpy.GetSigninToken()
def returnArcGISToken(): signintoken = arcpy.GetSigninToken() aToken = signintoken['token'] return aToken
# ----- Prepare OD service ----- try: arcpy.AddMessage("Obtaining credentials for and information about OD Cost Matrix service...") # Hard-wired OD variables TravelMode = "Walking Distance" PathShape = "None" OD_service_name = "World/OriginDestinationCostMatrix" Utility_service_name = "World/Utilities" # Get the credentials from the signed in user and import the service if username and password: ODservice = BBB_SharedFunctions.import_AGOLservice(OD_service_name, username=username, password=password) Utilityservice = BBB_SharedFunctions.import_AGOLservice(Utility_service_name, username=username, password=password) else: credentials = arcpy.GetSigninToken() if not credentials: arcpy.AddError("Please sign into ArcGIS Online or pass a username and password to the tool.") raise CustomError token = credentials["token"] referer = credentials["referer"] ODservice = BBB_SharedFunctions.import_AGOLservice(OD_service_name, token=token, referer=referer) Utilityservice = BBB_SharedFunctions.import_AGOLservice(Utility_service_name, token=token, referer=referer) # Get the service limits from the OD service (how many origins and destinations allowed) utilresult = Utilityservice.GetToolInfo("asyncODCostMatrix", "GenerateOriginDestinationCostMatrix") utilresultstring = utilresult.getOutput(0) utilresultjson = json.loads(utilresultstring) origin_limit = int(utilresultjson['serviceLimits']['maximumDestinations']) destination_limit = int(utilresultjson['serviceLimits']['maximumOrigins'])
def runTool(outFile, SQLDbase, inPointsLayer, inLocUniqueID, day, start_time, end_time, BufferSize, BufferUnits, DepOrArrChoice, username, password): def runOD(Points, Stops): # Call the OD Cost Matrix service for this set of chunks result = ODservice.GenerateOriginDestinationCostMatrix( Points, Stops, TravelMode, Distance_Units=BufferUnits, Cutoff=BufferSize, Origin_Destination_Line_Shape=PathShape) # Check the status of the result object every 0.5 seconds # until it has a value of 4(succeeded) or greater while result.status < 4: time.sleep(0.5) # Print any warning or error messages returned from the tool result_severity = result.maxSeverity if result_severity == 2: errors = result.getMessages(2) if "No solution found." in errors: # No destinations were found for the origins, which probably just means they were too far away. pass else: arcpy.AddError("An error occured when running the tool") arcpy.AddError(result.getMessages(2)) raise BBB_SharedFunctions.CustomError elif result_severity == 1: arcpy.AddWarning("Warnings were returned when running the tool") arcpy.AddWarning(result.getMessages(1)) # Get the resulting OD Lines and store the stops that are reachable from points. if result_severity != 2: linesSubLayer = result.getOutput(1) with arcpy.da.SearchCursor( linesSubLayer, ["OriginOID", "DestinationOID"]) as ODCursor: for row in ODCursor: UID = pointsOIDdict[row[0]] SID = stopOIDdict[row[1]] PointsAndStops.setdefault(str(UID), []).append(str(SID)) try: # Source FC names are not prepended to field names. arcpy.env.qualifiedFieldNames = False # It's okay to overwrite in-memory stuff. OverwriteOutput = arcpy.env.overwriteOutput # Get the orignal value so we can reset it. arcpy.env.overwriteOutput = True BBB_SharedFunctions.CheckArcVersion(min_version_pro="1.2") BBB_SharedFunctions.ConnectToSQLDatabase(SQLDbase) Specific, day = BBB_SharedFunctions.CheckSpecificDate(day) start_sec, end_sec = BBB_SharedFunctions.ConvertTimeWindowToSeconds( start_time, end_time) # Distance between stops and points BufferSize_padded = BufferSize + (.2 * BufferSize) BufferLinearUnit = str(BufferSize_padded) + " " + BufferUnits # Will we calculate the max wait time? CalcWaitTime = True # Output file designated by user outDir = os.path.dirname(outFile) outFilename = os.path.basename(outFile) ispgdb = "esriDataSourcesGDB.AccessWorkspaceFactory" in arcpy.Describe( outDir).workspaceFactoryProgID inLocUniqueID = BBB_SharedFunctions.HandleOIDUniqueID( inPointsLayer, inLocUniqueID) # ----- Prepare OD service ----- try: arcpy.AddMessage( "Obtaining credentials for and information about OD Cost Matrix service..." ) # Hard-wired OD variables TravelMode = "Walking Distance" PathShape = "None" OD_service_name = "World/OriginDestinationCostMatrix" Utility_service_name = "World/Utilities" # Get the credentials from the signed in user and import the service if username and password: ODservice = BBB_SharedFunctions.import_AGOLservice( OD_service_name, username=username, password=password) Utilityservice = BBB_SharedFunctions.import_AGOLservice( Utility_service_name, username=username, password=password) else: credentials = arcpy.GetSigninToken() if not credentials: arcpy.AddError( "Please sign into ArcGIS Online or pass a username and password to the tool." ) raise BBB_SharedFunctions.CustomError token = credentials["token"] referer = credentials["referer"] ODservice = BBB_SharedFunctions.import_AGOLservice( OD_service_name, token=token, referer=referer) Utilityservice = BBB_SharedFunctions.import_AGOLservice( Utility_service_name, token=token, referer=referer) # Get the service limits from the OD service (how many origins and destinations allowed) utilresult = Utilityservice.GetToolInfo( "asyncODCostMatrix", "GenerateOriginDestinationCostMatrix") utilresultstring = utilresult.getOutput(0) utilresultjson = json.loads(utilresultstring) origin_limit = int( utilresultjson['serviceLimits']['maximumDestinations']) destination_limit = int( utilresultjson['serviceLimits']['maximumOrigins']) except: arcpy.AddError( "Failed to obtain credentials for and information about OD Cost Matrix service." ) raise # ----- Create a feature class of stops ------ try: arcpy.AddMessage("Getting GTFS stops...") tempstopsname = "Temp_Stops" StopsLayer, StopList = BBB_SharedFunctions.MakeStopsFeatureClass( os.path.join(outDir, tempstopsname)) # Select only the stops within a reasonable distance of points to reduce problem size arcpy.management.MakeFeatureLayer(StopsLayer, "StopsToRemove") arcpy.management.SelectLayerByLocation( "StopsToRemove", "WITHIN_A_DISTANCE_GEODESIC", inPointsLayer, BufferLinearUnit, invert_spatial_relationship="INVERT") arcpy.management.DeleteRows("StopsToRemove") arcpy.management.Delete("StopsToRemove") # Make Feature Layer of stops to use later arcpy.management.MakeFeatureLayer(StopsLayer, "StopsLayer") stopsOID = arcpy.Describe("StopsLayer").OIDFieldName except: arcpy.AddError("Error creating feature class of GTFS stops.") raise # ----- Prepare input data ----- try: arcpy.AddMessage("Preparing input points...") # Select only the points within a reasonable distance of stops to reduce problem size temppointsname = outFilename + "_Temp" relevantPoints = os.path.join(outDir, temppointsname) arcpy.management.MakeFeatureLayer(inPointsLayer, "PointsToKeep") arcpy.management.SelectLayerByLocation( "PointsToKeep", "WITHIN_A_DISTANCE_GEODESIC", StopsLayer, BufferLinearUnit) num_points = int( arcpy.management.GetCount("PointsToKeep").getOutput(0)) # If the number of points is large, sort them spatially for smart chunking if num_points > origin_limit: shapeFieldName = arcpy.Describe("PointsToKeep").shapeFieldName arcpy.management.Sort("PointsToKeep", relevantPoints, shapeFieldName, "PEANO") # Otherwise, just copy them. else: arcpy.management.CopyFeatures("PointsToKeep", relevantPoints) arcpy.management.Delete("PointsToKeep") # Store OIDs in a dictionary for later joining pointsOIDdict = {} # {OID: inLocUniqueID} with arcpy.da.SearchCursor(relevantPoints, ["OID@", inLocUniqueID]) as cur: for row in cur: pointsOIDdict[row[0]] = row[1] relevantpointsOID = arcpy.Describe(relevantPoints).OIDFieldName except: arcpy.AddError("Error preparing input points for analysis.") raise #----- Create OD Matrix between stops and user's points ----- try: arcpy.AddMessage("Creating OD matrix between points and stops...") arcpy.AddMessage( "(This step could take a while for large datasets or buffer sizes.)" ) global PointsAndStops # PointsAndStops = {LocID: [stop_1, stop_2, ...]} PointsAndStops = {} # Chunk the points to fit the service limits and loop through chunks points_numchunks = int(math.ceil(float(num_points) / origin_limit)) points_chunkstart = 0 points_chunkend = origin_limit current_chunk = 0 for x in range(0, points_numchunks): current_chunk += 1 arcpy.AddMessage("Handling input points chunk %i of %i" % (current_chunk, points_numchunks)) # Select only the points belonging to this chunk points_chunk = sorted( pointsOIDdict.keys())[points_chunkstart:points_chunkend] points_chunkstart = points_chunkend points_chunkend = points_chunkstart + origin_limit if ispgdb: points_selection_query = '[{0}] IN ({1})'.format( relevantpointsOID, ','.join(map(str, points_chunk))) else: points_selection_query = '"{0}" IN ({1})'.format( relevantpointsOID, ','.join(map(str, points_chunk))) arcpy.MakeFeatureLayer_management(relevantPoints, "PointsLayer", points_selection_query) # Select only the stops within the safe buffer of these points arcpy.management.SelectLayerByLocation( "StopsLayer", "WITHIN_A_DISTANCE_GEODESIC", "PointsLayer", BufferLinearUnit) num_stops = int( arcpy.GetCount_management("StopsLayer").getOutput(0)) stopOIDdict = {} # {OID: stop_id} with arcpy.da.SearchCursor("StopsLayer", ["OID@", "stop_id"]) as cur: for row in cur: stopOIDdict[row[0]] = row[1] # If the number of stops in range exceeds the destination limit, we have to chunk these as well. if num_stops > destination_limit: stops_numchunks = int( math.ceil(float(num_stops) / destination_limit)) stops_chunkstart = 0 stops_chunkend = destination_limit for x in range(0, stops_numchunks): stops_chunk = sorted(stopOIDdict.keys() )[stops_chunkstart:stops_chunkend] stops_chunkstart = stops_chunkend stops_chunkend = stops_chunkstart + destination_limit if ispgdb: stops_selection_query = '[{0}] IN ({1})'.format( stopsOID, ','.join(map(str, stops_chunk))) else: stops_selection_query = '"{0}" IN ({1})'.format( stopsOID, ','.join(map(str, stops_chunk))) arcpy.MakeFeatureLayer_management( "StopsLayer", "StopsLayer_Chunk", stops_selection_query) runOD("PointsLayer", "StopsLayer_Chunk") arcpy.management.Delete("StopsLayer_Chunk") # Otherwise, just run them all. else: runOD("PointsLayer", "StopsLayer") # Clean up arcpy.management.Delete("StopsLayer") arcpy.management.Delete("PointsLayer") arcpy.management.Delete(StopsLayer) arcpy.management.Delete(relevantPoints) except: arcpy.AddError( "Error creating OD matrix between stops and input points.") raise #----- Query the GTFS data to count the trips at each stop ----- try: arcpy.AddMessage( "Calculating the number of transit trips available during the time window..." ) # Get a dictionary of stop times in our time window {stop_id: [[trip_id, stop_time]]} stoptimedict = BBB_SharedFunctions.CountTripsAtStops( day, start_sec, end_sec, BBB_SharedFunctions.CleanUpDepOrArr(DepOrArrChoice), Specific) except: arcpy.AddError( "Error calculating the number of transit trips available during the time window." ) raise # ----- Generate output data ----- try: arcpy.AddMessage("Writing output data...") arcpy.management.CopyFeatures(inPointsLayer, outFile) # Add a field to the output file for number of trips and num trips / hour. arcpy.management.AddField(outFile, "NumTrips", "SHORT") arcpy.management.AddField(outFile, "NumTripsPerHr", "DOUBLE") arcpy.management.AddField(outFile, "NumStopsInRange", "SHORT") arcpy.management.AddField(outFile, "MaxWaitTime", "SHORT") with arcpy.da.UpdateCursor(outFile, [ inLocUniqueID, "NumTrips", "NumTripsPerHr", "NumStopsInRange", "MaxWaitTime" ]) as ucursor: for row in ucursor: try: ImportantStops = PointsAndStops[str(row[0])] except KeyError: # This point had no stops in range ImportantStops = [] NumTrips, NumTripsPerHr, NumStopsInRange, MaxWaitTime =\ BBB_SharedFunctions.RetrieveStatsForSetOfStops( ImportantStops, stoptimedict, CalcWaitTime, start_sec, end_sec) row[1] = NumTrips row[2] = NumTripsPerHr row[3] = NumStopsInRange row[4] = MaxWaitTime ucursor.updateRow(row) except: arcpy.AddError("Error writing output.") raise arcpy.AddMessage("Done!") arcpy.AddMessage("Output files written:") arcpy.AddMessage("- " + outFile) except BBB_SharedFunctions.CustomError: arcpy.AddError("Error counting transit trips at input locations.") pass except: arcpy.AddError("Error counting transit trips at input locations.") raise finally: # Reset overwriteOutput to what it was originally. arcpy.env.overwriteOutput = OverwriteOutput
def sharePackage2(in_package, folder, username, password, maintain, summary, tags, credits, everyone, org, groups): try: active_url = arcpy.GetActivePortalURL() except: active_url = 'https://www.arcgis.com/' agol_helper = ago.AGOLHelper(portal_url=active_url) # If not app-signed in, and have user/pass, sign in the old manual way if username and password and "Signed in through app" not in username: agol_helper.login(username, password) elif arcpy.GetSigninToken() is not None: # Sign in using info from the app agol_helper.token_login() else: arcpy.AddIDMessage("Error", 1561) return # Make sure file exists try: uploadSize = os.stat(in_package).st_size except FileNotFoundError: raise Exception("The file {0} was not found".format(in_package)) fileName, fileExt = os.path.splitext(os.path.basename(in_package)) try: uploadType = pkgTypes[fileExt.upper()] except KeyError: raise Exception( "Unknown/unsupported package type extension: {0}".format(fileExt)) portalFolders = agol_helper.list_folders() if folder == "<root>" or None: folder = '' folderID = "" moveFolder = False if folder: if folder not in portalFolders.keys(): # Create a new folder folderID = agol_helper.create_folder(folder) arcpy.AddMessage("Created: {}".format(folderID)) #refresh the folder list portalFolders = agol_helper.list_folders() #previousPkgId = agol_helper.search(fileName, uploadType) previousPkgId = agol_helper.search(item_type=uploadType, name=fileName) if len(previousPkgId) == 0: # Pkg does not exist if maintain: # No pkg + maintain meta == quit raise Exception( "Existing package not found. Check to make sure it exists or disable maintain metadata." ) if folder: if folder in portalFolders.keys(): folderID = portalFolders[folder] moveFolder = True else: # Pkg exists newItemID = previousPkgId[0] itemInfo = agol_helper.item(newItemID) # original pkg lives here. pkgFolderID = itemInfo['ownerFolder'] if itemInfo['ownerFolder'] else "" if folder: if folder in portalFolders.keys(): if maintain and portalFolders[folder] != pkgFolderID: raise Exception( "Existing package to update not found in folder {}. Check the folder or disable maintain metadata." .format(folder)) else: # Existing pkg lives in supplied folder. It'll be updated. folderID = portalFolders[folder] if folderID != pkgFolderID: # Package of same name exists but uploading to a different folder moveFolder = True # no else here - this is case where folder needs to be created, covered previously else: if maintain and pkgFolderID: # pkg lives in folder, but root was specified: raise Exception( "Did not find package to update in <root> Does it exist in a folder?" ) # no else here - covered previously with folderID variable initialize # Set metadata by getting original metadata or adding new if not maintain: try: # Only available in Pro 1.2 or 10.4 metaFromPkg = arcpy.GetPackageInfo(in_package) description = metaFromPkg['description'] if not summary: summary = metaFromPkg['summary'] if not tags: tags = metaFromPkg['tags'] if not credits: credits = metaFromPkg['credits'] except AttributeError: description = '' pkgMetadata = (summary, description, tags, credits, '') else: metadataURL = "{}/content/users/{}/{}/items/{}".format( agol_helper.base_url, agol_helper.username, folderID, newItemID) metadata = agol_helper.url_request(metadataURL, { 'token': agol_helper.token, 'f': 'json' }) #re-set everyone if necessary from original share options everyone = True if metadata['sharing'][ 'access'] == 'public' else everyone org = True if everyone else True if metadata['sharing'][ 'access'] == 'org' else org groups = metadata['sharing']['groups'] if metadata['sharing'][ 'groups'] else groups snippet = metadata['item']['snippet'] if metadata['item'][ 'snippet'] else '' description = metadata['item']['description'] if metadata['item'][ 'description'] else '' tags = ','.join(metadata['item']['tags']) accessInfo = metadata['item']['accessInformation'] if metadata['item'][ 'accessInformation'] else '' licenseInfo = metadata['item']['licenseInfo'] if metadata['item'][ 'licenseInfo'] else '' pkgMetadata = (snippet, description, tags, accessInfo, licenseInfo) # Save original thumbnail to update with metadata try: thumbnailURL = "{}/content/items/{}/info/{}".format( agol_helper.base_url, newItemID, metadata['item']['thumbnail']) saveThumb = os.path.join(arcpy.env.scratchFolder, "thumbnail.png") agol_helper.save_file(thumbnailURL, saveThumb) pkgMetadata += (saveThumb, ) except: arcpy.AddWarning("Problem getting thumbnail") arcpy.AddMessage("Using existing metadata") # Behavior is to always overwrite a package if it exists extraParams = {'overwrite': 'true'} # Upload the package arcpy.AddMessage("Beginning file upload") newItemIDres = agol_helper.add_item(in_package, agol_helper.username, folderID, uploadType, params=extraParams) if 'success' in newItemIDres: if newItemIDres['success']: newItemID = newItemIDres['id'] else: raise Exception("(returned msg) {}".format(newItemIDres)) # Commit the file arcpy.AddMessage("Committing the file on the portal") resCom = agol_helper.commit(newItemID, agol_helper.username) status = 'processing' # partial | processing | failed | completed while status == 'processing' or status == 'partial': status = agol_helper.item_status(newItemID, agol_helper.username)['status'] time.sleep(1) if status == 'failed': raise Exception("Failed in processing the file on the portal") if moveFolder: #move new package into folder moveResp = agol_helper.move_items(folderID, [newItemID]) if not moveResp['results'][0]['success']: arcpy.AddMessage( "Failed to move item to folder: '{}'. Item will be created in root" .format(folder)) folderID = "" # Set or Update the metadata arcpy.AddMessage("Setting metadata and sharing settings") uresp = agol_helper.update_item(newItemID, pkgMetadata, folder_id=folderID, title=fileName) try: if not uresp['success']: arcpy.AddWarning("Could not set sharing properties") except: arcpy.AddWarning("Problem setting metadata values:") arcpy.AddError(" {0}".format(uresp['error'])) # Clean up thumbnail try: os.remove(saveThumb) except (NameError, IOError): pass # Set Sharing options if not maintain: if everyone or groups or org: groupIDs = [] if groups: userGroups = agol_helper.list_groups(agol_helper.username) for group in userGroups.keys(): arcpy.AddMessage(group) for selectedgroup in groups: if group == selectedgroup: groupIDs.append(userGroups[group]) gresp = agol_helper.share_items(groupIDs, everyone, org, [newItemID]) try: if not gresp['results'][0]['success']: arcpy.AddWarning("Could not set sharing properties") arcpy.AddError(" {0}".format( gresp['results'][0]['error']['message'])) except: arcpy.AddWarning("Problem sharing item:") arcpy.AddError(" {0}".format(gresp))