def cameraStream(): camera = picamera.PiCamera() camera.vflip = True fFace1 = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades/haarcascade_frontalface_alt.xml' ) fFace2 = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_frontalface_default.xml' ) pFace = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_profileface.xml' ) x = 0 while True: filename = 'image' + str(x) + '.jpg' camera.capture(filename) image = np.asarray(cv2.imread(filename)) grayScale = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) numFaces = len(fFace2.detectMultiScale(grayScale, 1.3, 4)) if numFaces > 0: ftp.Upload(filename) GPIO.output(start_stop, GPIO.LOW) x += 1 os.remove(filename) camera.close()
def faceDetector(filename): #fFace1 = cv2.CascadeClassifier('/home/pi/Downloads/opencv-3.0.0/data/haarcascades/haarcascade_frontalface_alt.xml') fFace2 = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_frontalface_default.xml' ) #pFace = cv2.CascadeClassifier('/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_profileface.xml') image = np.asarray(cv2.imread(filename)) grayScale = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) numFaces = len(fFace2.detectMultiScale(grayScale, 1.75, 4)) if numFaces > 0: print(numFaces) ftp.Upload(filename) GPIO.output(start_stop, GPIO.LOW) os.remove(filename)
def faceDetector(filename): """Documentation for the faceDetector thread Opens an image file and searches for a face by means of haarcascades. If a face is found it uploads the picture to the server. It also changes the state of GPIO start_stop to LOW """ #fFace1 = cv2.CascadeClassifier('/home/pi/Downloads/opencv-3.0.0/data/haarcascades/haarcascade_frontalface_alt.xml') fFace2 = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_frontalface_default.xml' ) #pFace = cv2.CascadeClassifier('/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_profileface.xml') image = np.asarray(cv2.imread(filename)) grayScale = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) numFaces = len(fFace2.detectMultiScale(grayScale, 1.3, 4)) if numFaces > 0: #if there is a face ftp.Upload(filename) GPIO.output(start_stop, GPIO.LOW) #changes state of start_stop to LOW os.remove(filename)
def faceDetector(filename): """Documentation for the faceDetector thread Opens an image file and searches for a face by means of haarcascades. If a face is found it uploads the picture to the server. It also changes the state of GPIO start_stop to LOW """ global isTakingPictures fFace2 = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_frontalface_default.xml' ) image = np.asarray(cv2.imread(filename)) grayScale = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) numFaces = len(fFace2.detectMultiScale(grayScale, 1.3, 4)) if numFaces > 0: #if there is a face Stop() ftp.Upload(filename) print('IMAGE') WriteToServer('IMAGE ' + filename) #isTakingPictures = False os.remove(filename)
fFace1 = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades/haarcascade_frontalface_alt.xml' ) fFace2 = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_frontalface_default.xml' ) pFace = cv2.CascadeClassifier( '/home/pi/Downloads/opencv-3.0.0/data/haarcascades_cuda/haarcascade_profileface.xml' ) x = 0 while time.time() < end: filename = 'image' + str(x) + '.jpg' camera.capture(filename) image = np.asarray(cv2.imread(filename)) grayScale = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) #numFaces = len(fFace1.detectMultiScale(grayScale, 1.2, 4)) numFaces = len(fFace2.detectMultiScale(grayScale, 1.3, 4)) #numFaces += len(pFace.detectMultiScale(grayScale, 1.1, 4)) if numFaces > 0: ftp.Upload(filename) x += 1 os.remove(filename) print(numFaces) camera.close() exit(0)
def mainFunction(sourceGeodatabase,configFile,ftpSite,ftpFolder,ftpUsername,ftpPassword): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Get a list of the feature datasets in the database arcpy.env.workspace = sourceGeodatabase # Set to copy datasets from config datasetsOption = "Config" # Set update mode to new updateMode = "New" # Setup the destination folder as a temporary folder destinationFolder = os.path.join(arcpy.env.scratchFolder, "CapacityServices") if not os.path.exists(destinationFolder): os.makedirs(destinationFolder) # Logging if (enableLogging == "true"): logger.info("Temporary desination folder - " + destinationFolder) arcpy.AddMessage("Temporary desination folder - " + destinationFolder) featureDatasetList = arcpy.ListDatasets("", "Feature") # EXTERNAL FUNCTION - Copy over these feature datasets GeodatabaseReplication.copyDatasets(sourceGeodatabase,destinationFolder,datasetsOption,updateMode,configFile,featureDatasetList,"Feature Dataset","false") # Get a list of the feature classes in the database featureClassList = arcpy.ListFeatureClasses() # EXTERNAL FUNCTION - Copy over these feature classes GeodatabaseReplication.copyDatasets(sourceGeodatabase,destinationFolder,datasetsOption,updateMode,configFile,featureClassList,"Feature Class","false") # Get a list of the tables in the database tableList = arcpy.ListTables() # EXTERNAL FUNCTION - Copy over these tables GeodatabaseReplication.copyDatasets(sourceGeodatabase,destinationFolder,datasetsOption,updateMode,configFile,tableList,"Table","false") # Join tables onto feature classes # Set CSV delimiter csvDelimiter = "," # Open the CSV file with open(configFile, 'rb') as csvFile: # Read the CSV file rows = csv.reader(csvFile, delimiter=csvDelimiter) # For each row in the CSV count = 0 for row in rows: # Ignore the first line containing headers if (count > 0): sourceDataset = row[0] destinationDataset = row[1] version = row[2] joinTable = row[3] renameFields = row[4] removeFields = row[5] # Join the table to the feature class if there is a join table provided if joinTable: # Copy table to memory first arcpy.CopyRows_management(os.path.join(sourceGeodatabase, joinTable), "in_memory\Tbl", "") # If renaming fields in table if renameFields: renameFields = string.split(renameFields, ";") # For each rename field for renameField in renameFields: # Get the current field name and the new field name fields = string.split(renameField, ":") # Logging if (enableLogging == "true"): logger.info("Renaming field in " + joinTable + " from " + fields[0] + " to " + fields[1] + "...") arcpy.AddMessage("Renaming field in " + joinTable + " from " + fields[0] + " to " + fields[1] + "...") # Alter field name arcpy.AlterField_management("in_memory\Tbl", fields[0], fields[1], fields[1]) # Logging if (enableLogging == "true"): logger.info("Joining dataset " + joinTable + " to dataset " + destinationDataset + "...") arcpy.AddMessage("Joining dataset " + joinTable + " to dataset " + destinationDataset + "...") # Join on table to feature class arcpy.JoinField_management(os.path.join(destinationFolder, destinationDataset), "Feature_ID", "in_memory\Tbl", "asset_id") # Remove unecessary fields if provided if removeFields: # Logging if (enableLogging == "true"): logger.info("Removing fields " + removeFields + " from dataset " + destinationDataset + "...") arcpy.AddMessage("Removing fields " + removeFields + " from dataset " + destinationDataset + "...") # Remove unecessary fields arcpy.DeleteField_management(os.path.join(destinationFolder, destinationDataset), removeFields) count = count + 1 # EXTERNAL FUNCTION - Send data to server FTPUpload.mainFunction(destinationFolder,ftpSite,ftpFolder,ftpUsername,ftpPassword) # --------------------------------------- End of code --------------------------------------- # # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: arcpy.SetParameterAsText(1, output) # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) pass # If arcpy error except arcpy.ExecuteError: # Build and show the error message errorMessage = arcpy.GetMessages(2) arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage) # If python error except Exception as e: errorMessage = "" # Build and show the error message for i in range(len(e.args)): if (i == 0): errorMessage = unicode(e.args[i]).encode('utf-8') else: errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8') arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
def mainFunction( featureClasses, tables, csvFiles, csvXYFieldNames, ftpSite, ftpFolder, ftpUsername, ftpPassword, gpService ): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Get the arcgis version arcgisVersion = arcpy.GetInstallInfo()['Version'] # Setup scratch folder differently depending on ArcGIS version if (arcgisVersion == "10.0"): # Setup geodatabase to load data into in temporary workspace arcpy.env.scratchWorkspace = r"F:\Temp" tempFolder = arcpy.CreateFolder_management( arcpy.env.scratchWorkspace, "WebData-" + str(uuid.uuid1())) else: # Setup geodatabase to load data into in temporary workspace tempFolder = arcpy.CreateFolder_management( arcpy.env.scratchFolder, "WebData-" + str(uuid.uuid1())) arcpy.CreateFileGDB_management(tempFolder, "Data", "CURRENT") geodatabase = os.path.join(str(tempFolder), "Data.gdb") arcpy.AddMessage("Copying datasets...") # Load the feature classes and tables into a list if input values provided if (len(featureClasses) > 0): # Remove out apostrophes featureclassList = string.split( str(featureClasses).replace("'", ""), ";") # Loop through the feature classes for eachFeatureclass in featureclassList: # Create a Describe object from the dataset describeDataset = arcpy.Describe(eachFeatureclass) # Copy feature class into geodatabase using the same dataset name arcpy.CopyFeatures_management( eachFeatureclass, os.path.join(geodatabase, describeDataset.name), "", "0", "0", "0") if (len(tables) > 0): tableList = string.split(str(tables).replace("'", ""), ";") # Loop through of the tables for eachTable in tableList: # Create a Describe object from the dataset describeDataset = arcpy.Describe(eachTable) # Copy feature class into geodatabase using the same dataset name arcpy.TableSelect_analysis( eachTable, os.path.join(geodatabase, describeDataset.name), "") # If CSV files provided if (len(csvFiles) > 0): csvList = string.split(str(csvFiles).replace("'", ""), ";") # Loop through of the CSVs for eachCSV in csvList: # Create a Describe object from the dataset describeDataset = arcpy.Describe(eachCSV) datasetName = string.split(describeDataset.name, ".") # Change CSV name if starts with a digit if datasetName[0].isdigit(): datasetName[0] = "Layer" + datasetName[0] # Create feature layer and convert to feature class csvFields = string.split( str(csvXYFieldNames).replace("'", ""), ",") # Copy feature class into geodatabase using the same dataset name arcpy.MakeXYEventLayer_management( eachCSV, csvFields[0], csvFields[1], "Layer", "PROJCS['NZGD_2000_New_Zealand_Transverse_Mercator',GEOGCS['GCS_NZGD_2000',DATUM['D_NZGD_2000',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',1600000.0],PARAMETER['False_Northing',10000000.0],PARAMETER['Central_Meridian',173.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]];-4020900 1900 10000;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision", "") arcpy.CopyFeatures_management( "Layer", os.path.join(geodatabase, datasetName[0]), "", "0", "0", "0") # Check input datasets are provided before zipping up if ((len(featureClasses) > 0) or (len(tables) > 0) or (len(csvFiles) > 0)): arcpy.AddMessage("Zipping data...") # Setup the zip file if (arcgisVersion == "10.0"): zipFile = os.path.join(arcpy.env.scratchWorkspace, "WebData-" + str(uuid.uuid1()) + ".zip") else: zipFile = os.path.join(arcpy.env.scratchFolder, "WebData-" + str(uuid.uuid1()) + ".zip") zippedFolder = zipfile.ZipFile(zipFile, "w", allowZip64=True) # Zip up the geodatabase root_len = len(os.path.abspath(str(tempFolder))) # For each of the directories in the folder for root, dirs, files in os.walk(str(tempFolder)): archive_root = os.path.abspath(root)[root_len:] # For each file for f in files: fullpath = os.path.join(root, f) archive_name = os.path.join(archive_root, f) zippedFolder.write(fullpath, archive_name) # Close zip file zippedFolder.close() # EXTERNAL FUNCTION - Send data to server FTPUpload.mainFunction(zipFile, ftpSite, ftpFolder, ftpUsername, ftpPassword) else: #--------------------------------------------Logging--------------------------------------------# arcpy.AddError("Process stopped: No datasets provided") # Log error if (enableLogging == "true"): logger.error("Process stopped: No datasets provided") #-----------------------------------------------------------------------------------------------# # Call geoprocessing service to update data on server arcpy.AddMessage("Updating data on server...") arcpy.ImportToolbox(gpService, "toolbox") arcpy.DataUpdateFromZip_toolbox("Existing") # --------------------------------------- End of code --------------------------------------- # # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: arcpy.SetParameterAsText(1, output) # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) pass # If arcpy error except arcpy.ExecuteError: # Build and show the error message errorMessage = arcpy.GetMessages(2) arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage) # If python error except Exception as e: errorMessage = "" # Build and show the error message for i in range(len(e.args)): if (i == 0): errorMessage = unicode(e.args[i]).encode('utf-8') else: errorMessage = errorMessage + " " + unicode( e.args[i]).encode('utf-8') arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
def mainFunction( sourceGeodatabase, configFile, ftpSite, ftpFolder, ftpUsername, ftpPassword ): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Get a list of the feature datasets in the database arcpy.env.workspace = sourceGeodatabase # Set to copy datasets from config datasetsOption = "Config" # Set update mode to new updateMode = "New" # Setup the destination folder as a temporary folder destinationFolder = os.path.join(arcpy.env.scratchFolder, "CapacityServices") if not os.path.exists(destinationFolder): os.makedirs(destinationFolder) # Logging if (enableLogging == "true"): logger.info("Temporary desination folder - " + destinationFolder) arcpy.AddMessage("Temporary desination folder - " + destinationFolder) featureDatasetList = arcpy.ListDatasets("", "Feature") # EXTERNAL FUNCTION - Copy over these feature datasets GeodatabaseReplication.copyDatasets(sourceGeodatabase, destinationFolder, datasetsOption, updateMode, configFile, featureDatasetList, "Feature Dataset", "false") # Get a list of the feature classes in the database featureClassList = arcpy.ListFeatureClasses() # EXTERNAL FUNCTION - Copy over these feature classes GeodatabaseReplication.copyDatasets(sourceGeodatabase, destinationFolder, datasetsOption, updateMode, configFile, featureClassList, "Feature Class", "false") # Get a list of the tables in the database tableList = arcpy.ListTables() # EXTERNAL FUNCTION - Copy over these tables GeodatabaseReplication.copyDatasets(sourceGeodatabase, destinationFolder, datasetsOption, updateMode, configFile, tableList, "Table", "false") # Join tables onto feature classes # Set CSV delimiter csvDelimiter = "," # Open the CSV file with open(configFile, 'rb') as csvFile: # Read the CSV file rows = csv.reader(csvFile, delimiter=csvDelimiter) # For each row in the CSV count = 0 for row in rows: # Ignore the first line containing headers if (count > 0): sourceDataset = row[0] destinationDataset = row[1] version = row[2] joinTable = row[3] renameFields = row[4] removeFields = row[5] # Join the table to the feature class if there is a join table provided if joinTable: # Copy table to memory first arcpy.CopyRows_management( os.path.join(sourceGeodatabase, joinTable), "in_memory\Tbl", "") # If renaming fields in table if renameFields: renameFields = string.split(renameFields, ";") # For each rename field for renameField in renameFields: # Get the current field name and the new field name fields = string.split(renameField, ":") # Logging if (enableLogging == "true"): logger.info("Renaming field in " + joinTable + " from " + fields[0] + " to " + fields[1] + "...") arcpy.AddMessage("Renaming field in " + joinTable + " from " + fields[0] + " to " + fields[1] + "...") # Alter field name arcpy.AlterField_management( "in_memory\Tbl", fields[0], fields[1], fields[1]) # Logging if (enableLogging == "true"): logger.info("Joining dataset " + joinTable + " to dataset " + destinationDataset + "...") arcpy.AddMessage("Joining dataset " + joinTable + " to dataset " + destinationDataset + "...") # Join on table to feature class arcpy.JoinField_management( os.path.join(destinationFolder, destinationDataset), "Feature_ID", "in_memory\Tbl", "asset_id") # Remove unecessary fields if provided if removeFields: # Logging if (enableLogging == "true"): logger.info("Removing fields " + removeFields + " from dataset " + destinationDataset + "...") arcpy.AddMessage("Removing fields " + removeFields + " from dataset " + destinationDataset + "...") # Remove unecessary fields arcpy.DeleteField_management( os.path.join(destinationFolder, destinationDataset), removeFields) count = count + 1 # EXTERNAL FUNCTION - Send data to server FTPUpload.mainFunction(destinationFolder, ftpSite, ftpFolder, ftpUsername, ftpPassword) # --------------------------------------- End of code --------------------------------------- # # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: arcpy.SetParameterAsText(1, output) # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) pass # If arcpy error except arcpy.ExecuteError: # Build and show the error message errorMessage = arcpy.GetMessages(2) arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage) # If python error except Exception as e: errorMessage = "" # Build and show the error message for i in range(len(e.args)): if (i == 0): errorMessage = unicode(e.args[i]).encode('utf-8') else: errorMessage = errorMessage + " " + unicode( e.args[i]).encode('utf-8') arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
def mainFunction(featureClasses,tables,csvFiles,csvXYFieldNames,ftpSite,ftpFolder,ftpUsername,ftpPassword,gpService): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Get the arcgis version arcgisVersion = arcpy.GetInstallInfo()['Version'] # Setup scratch folder differently depending on ArcGIS version if (arcgisVersion == "10.0"): # Setup geodatabase to load data into in temporary workspace arcpy.env.scratchWorkspace = r"F:\Temp" tempFolder = arcpy.CreateFolder_management(arcpy.env.scratchWorkspace, "WebData-" + str(uuid.uuid1())) else: # Setup geodatabase to load data into in temporary workspace tempFolder = arcpy.CreateFolder_management(arcpy.env.scratchFolder, "WebData-" + str(uuid.uuid1())) arcpy.CreateFileGDB_management(tempFolder, "Data", "CURRENT") geodatabase = os.path.join(str(tempFolder), "Data.gdb") arcpy.AddMessage("Copying datasets...") # Load the feature classes and tables into a list if input values provided if (len(featureClasses) > 0): # Remove out apostrophes featureclassList = string.split(str(featureClasses).replace("'", ""), ";") # Loop through the feature classes for eachFeatureclass in featureclassList: # Create a Describe object from the dataset describeDataset = arcpy.Describe(eachFeatureclass) # Copy feature class into geodatabase using the same dataset name arcpy.CopyFeatures_management(eachFeatureclass, os.path.join(geodatabase, describeDataset.name), "", "0", "0", "0") if (len(tables) > 0): tableList = string.split(str(tables).replace("'", ""), ";") # Loop through of the tables for eachTable in tableList: # Create a Describe object from the dataset describeDataset = arcpy.Describe(eachTable) # Copy feature class into geodatabase using the same dataset name arcpy.TableSelect_analysis(eachTable, os.path.join(geodatabase, describeDataset.name), "") # If CSV files provided if (len(csvFiles) > 0): csvList = string.split(str(csvFiles).replace("'", ""), ";") # Loop through of the CSVs for eachCSV in csvList: # Create a Describe object from the dataset describeDataset = arcpy.Describe(eachCSV) datasetName = string.split(describeDataset.name, ".") # Change CSV name if starts with a digit if datasetName[0].isdigit(): datasetName[0] = "Layer" + datasetName[0] # Create feature layer and convert to feature class csvFields = string.split(str(csvXYFieldNames).replace("'", ""), ",") # Copy feature class into geodatabase using the same dataset name arcpy.MakeXYEventLayer_management(eachCSV, csvFields[0], csvFields[1], "Layer", "PROJCS['NZGD_2000_New_Zealand_Transverse_Mercator',GEOGCS['GCS_NZGD_2000',DATUM['D_NZGD_2000',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',1600000.0],PARAMETER['False_Northing',10000000.0],PARAMETER['Central_Meridian',173.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]];-4020900 1900 10000;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision", "") arcpy.CopyFeatures_management("Layer", os.path.join(geodatabase, datasetName[0]), "", "0", "0", "0") # Check input datasets are provided before zipping up if ((len(featureClasses) > 0) or (len(tables) > 0) or (len(csvFiles) > 0)): arcpy.AddMessage("Zipping data...") # Setup the zip file if (arcgisVersion == "10.0"): zipFile = os.path.join(arcpy.env.scratchWorkspace, "WebData-" + str(uuid.uuid1()) + ".zip") else: zipFile = os.path.join(arcpy.env.scratchFolder, "WebData-" + str(uuid.uuid1()) + ".zip") zippedFolder = zipfile.ZipFile(zipFile, "w", allowZip64=True) # Zip up the geodatabase root_len = len(os.path.abspath(str(tempFolder))) # For each of the directories in the folder for root, dirs, files in os.walk(str(tempFolder)): archive_root = os.path.abspath(root)[root_len:] # For each file for f in files: fullpath = os.path.join(root, f) archive_name = os.path.join(archive_root, f) zippedFolder.write(fullpath, archive_name) # Close zip file zippedFolder.close() # EXTERNAL FUNCTION - Send data to server FTPUpload.mainFunction(zipFile,ftpSite,ftpFolder,ftpUsername,ftpPassword) else: #--------------------------------------------Logging--------------------------------------------# arcpy.AddError("Process stopped: No datasets provided") # Log error if (enableLogging == "true"): logger.error("Process stopped: No datasets provided") #-----------------------------------------------------------------------------------------------# # Call geoprocessing service to update data on server arcpy.AddMessage("Updating data on server...") arcpy.ImportToolbox(gpService, "toolbox") arcpy.DataUpdateFromZip_toolbox("Existing") # --------------------------------------- End of code --------------------------------------- # # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: arcpy.SetParameterAsText(1, output) # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) pass # If arcpy error except arcpy.ExecuteError: # Build and show the error message errorMessage = arcpy.GetMessages(2) arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage) # If python error except Exception as e: errorMessage = "" # Build and show the error message for i in range(len(e.args)): if (i == 0): errorMessage = unicode(e.args[i]).encode('utf-8') else: errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8') arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logging.FileHandler.close(logMessage) logger.removeHandler(logMessage) if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)