def ARGIS_registerDB(connection_name, mapindexsde, con): print u'开始注册数据库' db_conn = mapindexsde print con print db_conn # 每次注册数据库第3个参数不能一样 arcpy.AddDataStoreItem(con, "DATABASE", connection_name, db_conn, db_conn) print u'注册数据库成功'
def ARGIS_registerDB(connection_name, mapindexsde, con): print u'开始注册数据库' # con = "C:/Users/ltcx/AppData/Roaming/ESRI/Desktop10.2/ArcCatalog/arcgis on localhost_6080 (系统管理员).ags" # server_conn = "c:/connections/MYSERVER.ags" # db_conn = "D:/PycharmProjects/ClipTask/mapindex20190220100911.sde" db_conn = mapindexsde print con print db_conn # arcpy.AddDataStoreItem(con, "DATABASE", "Wilma", db_conn, db_conn) # 每次注册数据库第3个参数不能一样 arcpy.AddDataStoreItem(con, "DATABASE", connection_name, db_conn, db_conn) print u'注册数据库成功'
def add_data_store_item(data_store_name=None, data_path=None): """add_data_store_item(data_store_name=None,data_path=None Checks if data store is already registered, if not registers it """ if data_path not in [ i[2] for i in arcpy.ListDataStoreItems(CONNECTIONFILE, 'FOLDER') ]: print 'This path is not a registered data store:', data_path dsStatus = arcpy.AddDataStoreItem(CONNECTIONFILE, "FOLDER", data_store_name, data_path, data_path) print "Data store addition status : " + str(dsStatus) validity = arcpy.ValidateDataStoreItem(CONNECTIONFILE, "FOLDER", data_store_name) print("The data store item '{}' is {}".format(data_store_name, validity)) return True else: print 'This path is already a registered data store:', data_path return False
def import_sde_connection_file(ags_connection_file, sde_connection_file): log.info( 'Importing SDE connection file {} to ArcGIS Server connection file {})' .format(sde_connection_file, ags_connection_file)) import arcpy data_store_name = os.path.splitext( os.path.basename(sde_connection_file))[0] try: arcpy.AddDataStoreItem(ags_connection_file, "DATABASE", data_store_name, sde_connection_file, sde_connection_file) except StandardError as e: if e.message == 'Client database entry is already registered.': log.warn(e.message) else: log.exception( 'An error occurred while importing SDE connection file {} to ArcGIS Server connection file {})' .format(sde_connection_file, ags_connection_file)) raise
# run the extract data task and assign it to the 'result' variable # only the cityhall layer was used as input, but the airport and firestation layers will be used in the service creation print "Try buffering..." #result = arcpy.Buffer_analysis(inp, res, "10 kilometers"); result = arcpy.Buffer_analysis(inp, res, dis) #arcpy.ExtractDataTask_server("Counties", aoi, "File Geodatabase - GDB - .gdb", "ESRI GRID - GRID", os.path.join(arcpy.env.scratchFolder, "output.zip")) # make sure the folder is registered with the server, if not, add it to the datastore if arcpy.env.workspace not in [ i[2] for i in arcpy.ListDataStoreItems(connPath, 'FOLDER') ]: # both the client and server paths are the same dsStatus = arcpy.AddDataStoreItem(connPath, "FOLDER", "CityData", arcpy.env.workspace, arcpy.env.workspace) print "Data store : " + str(dsStatus) # create service definition draft print "Creating sddraft file..." arcpy.CreateGPSDDraft(result, sddraft, serviceName, server_type="ARCGIS_SERVER", connection_file_path=connPath, copy_data_to_server=False, folder_name=None, summary="Buffer", tags="Buffer data")
sdePath = outFolderPath + '\\' + outName if os.path.exists(sdePath): os.remove(sdePath) print(sdePath) arcpy.CreateDatabaseConnection_management( out_folder_path=outFolderPath, out_name=outName, database_platform="ORACLE", instance="192.168.220.131/test", account_authentication="DATABASE_AUTH", username=sdeUserName, password=sdePw, save_user_pass="******", database="#", schema="#", version_type='POINT_IN_TIME', version="#", date="#") agsPath = outFolderPath + '\AGSConnection116.ags' arcpy.AddDataStoreItem(connection_file=agsPath, datastore_type="DATABASE", connection_name="myDBConnectOracleSpatial", server_path=sdePath) for i in arcpy.ListDataStoreItems(agsPath, "DATABASE"): validity = arcpy.ValidateDataStoreItem(agsPath, "DATABASE", i[0]) print("The data item '{}' is {}".format(i[0], validity))
# import statements import os, glob import arcpy print("ArcPy imported") # Set Path and Variables map_path = r'E:\UC_demo\Publishing\batch_publishing\\' svr_con = r'E:\UC_demo\Publishing\batch_publishing\local_server.ags' # Find maps in folder map_list = glob.glob(map_path + "*.mxd") print("Total number of maps found: " + str(len(map_list))) # Reference data using Data Store arcpy.AddDataStoreItem(svr_con, "FOLDER", "datastore_registered_with_Python", r"E:\UC_demo\Data") # Loop through each map in list for current_map in map_list: print("----------------------------------") # Set Variables mxd = arcpy.mapping.MapDocument(current_map) service_name = os.path.basename(current_map)[:-4] sddraft = map_path + service_name + ".sddraft" sd = map_path + service_name + ".sd" # Create Service Definition Draft analysis = arcpy.mapping.CreateMapSDDraft( mxd, sddraft,
os.path.join(ws, 'stowe_elev.tif'), 'outRlayer1') distance_method = 'Manhattan' outputdata = os.path.join('Scratch', 'outputPolygon') arcpy.ImportToolbox(intbx) history_item = arcpy.hotspotscript_hotspot(inputfc, inputRaster, distance_method) print("Tool runs successfully") except: print("Making layer or running tool error:", sys.exc_info()[0]) # Add Data Store try: arcpy.AddDataStoreItem(connection_file=connectionFile, datastore_type="FOLDER", connection_name="HSRef", server_path=config.uncpath, client_path=ws) print("Add data store") except: print("Adding data store error:", sys.exc_info()[0]) # Publish a gp service try: # Create a service definition draft draft_file = os.path.join(sc, 'gpservice.sddraft') draft_file_return = arcpy.CreateGPSDDraft( result=history_item, out_sddraft=draft_file, service_name='hotspotgpservice2', server_type="ARCGIS_SERVER",
import arcpy conn = sys.argv[1] name = sys.argv[2] sde_file = sys.argv[3] # check whether the SDE to register is already register => if yes, delete the former registered SDE for item in arcpy.ListDataStoreItems(conn, "DATABASE"): current_name = item[0] if current_name == name: arcpy.RemoveDataStoreItem(conn, "DATABASE", name) arcpy.AddDataStoreItem(conn, "DATABASE", name, sde_file)
def publish(self, product): try: if self.vp.get('geodatabase', 'storingConfig').lower() == 'file': logger.debug("using file geodatabase") # create gdb directory if it doesn't exist self.check_data_dir(self.vp.get('geodatabase', 'gdbpath')) # create file geodatabase if it doesn't exist _gdb_name = self.check_file_GDB( product.product_name, self.vp.get('geodatabase', 'gdbpath')) if product.publish_name is not None: _publish_name = product.publish_name else: _publish_name = product.product_name _mosaic_db = os.path.join(_gdb_name, '{0}'.format(product.publish_name)) _ags_name = _publish_name _layer = _mosaic_db elif self.vp.get('geodatabase', 'storingConfig') == 'ent': logger.debug("Using enterprise geodatabase") _enterprise_gdb_name = '{0}_{1}'.format( self.vp.get('vampire', 'home_country').lower(), product.product_name) self.create_enterprise_GDB( _enterprise_gdb_name, self.vp.get('geodatabase', 'host'), self.vp.get('geodatabase', 'dba'), self.vp.get('geodatabase', 'dbapass'), self.vp.get('geodatabase', 'sdeuser'), self.vp.get('geodatabase', 'sdepass'), self.vp.get('geodatabase', 'keycode')) # gdb = "Database Connections/"+egdbname+".sde" _gdb_name = "Database Connections/{0}.sde".format( _enterprise_gdb_name) _mosaic_db = os.path.join( _gdb_name, '{0}.sde.{1}'.format(_enterprise_gdb_name, _enterprise_gdb_name)) _ags_name = _enterprise_gdb_name _layer = os.path.join( os.path.join(self.vp.get('geodatabase', 'sdeCatalog'), '{0}.sde'.format(_enterprise_gdb_name)), _enterprise_gdb_name) # countProduct = country.lower() + "_" + x # self.createagsfile(ws, countProduct, server_url, username, password) # sdeCon = sdeCatalog+countProduct+'.sde/' # fc = sdeCon+countProduct else: logger.error('Unrecognised geodatabase storing config') return # MDS = gdb + '/' + egdbname + ".sde." + egdbname # gdbname = directory+'\\'+x+'.gdb' # MDS = gdbname + '\\' + x # check if mosaic database exists, and create if necessary if arcpy.Exists(_mosaic_db): logger.debug( "Mosaic dataset {0} already exists".format(_mosaic_db)) else: self.create_mosaic_dataset(_gdb_name, _mosaic_db) logger.debug("Mosaic Dataset {0} created".format(_mosaic_db)) # add product raster to mosaic dataset self.add_raster_to_MDS(_mosaic_db, product.product_filename) # add start and end date fields to mosaic dataset if necessary self.add_date_field(_mosaic_db) # compute statistics self.update_mosaic_statistics(_mosaic_db) # set start and end date for product in mosaic dataset self.update_date_fields(_mosaic_db, product) _ws = self.vp.get('geodatabase', 'ws') self.create_ags_file(_ws, _ags_name, self.vp.get('geodatabase', 'server_url'), self.vp.get('geodatabase', 'username'), self.vp.get('geodatabase', 'password')) # fc1 = os.path.join(directory, x+'.gdb') # fc = os.path.join(fc1, x) if arcpy.Exists(_layer): logger.debug("data {0} is available".format(_publish_name)) else: logger.debug("data {0} is not available".format(_publish_name)) _connection_file = os.path.join(_ws, '{0}.ags'.format(_publish_name)) arcpy.AddDataStoreItem(_connection_file, "FOLDER", _publish_name, self.vp.get('geodatabase', 'gdbpath'), self.vp.get('geodatabase', 'gdbpath')) arcpy.ValidateDataStoreItem(_connection_file, "FOLDER", _publish_name) _service_desc_draft = os.path.join( _ws, '{0}.sddraft'.format(_publish_name)) _service_desc = os.path.join(_ws, '{0}.sd'.format(_publish_name)) self.create_image_SD_draft(product, _layer, _service_desc_draft, _publish_name, _connection_file) self.insert_RTF_file(product, _service_desc_draft) if not os.path.exists(_service_desc): self.analyze_SD_draft(_service_desc_draft, _service_desc) else: logger.debug("file {0} already exists".format(_service_desc)) if not os.path.exists(_connection_file): logger.debug( "file {0} does not exist".format(_connection_file)) else: logger.debug( "file {0} already exists".format(_connection_file)) arcpy.UploadServiceDefinition_server(_service_desc, _connection_file) logger.debug(_connection_file) # elif self.vp.get('geodatabase', 'storingConfig') == 'ent': # logger.debug("Using enterprise geodatabase") # self.createEntGDB(host, x, dba, dbapass, sdeuser, sdepass, keycode) # egdbname = country.lower() + '_' + x # gdb = "Database Connections/"+egdbname+".sde" # MDS = gdb + '/' + egdbname + ".sde." + egdbname # if arcpy.Exists(MDS): # print("Mosaic dataset "+MDS+" already exist") # else: # self.createMosaicDataset(gdb,egdbname) # logging.debug(date1 +": Mosaic Dataset "+egdbname+" is created") # # #print(MDS) # workspace = datafolder + "/" + x # self.addRastertoMDS(MDS, workspace) # self.update_mosaic_statistics(MDS) # self.addDateField(MDS) # self.updateDateField(MDS, x) # # countProduct = country.lower() + "_" + x # self.createagsfile(ws, countProduct, server_url, username, password) # sdeCon = sdeCatalog+countProduct+'.sde/' # fc = sdeCon+countProduct # print(fc) # if arcpy.Exists(fc): # print("data "+x+" is available") # else: # print("data "+x+" is not availabe") # con = os.path.join(ws, x + ".ags") # service = x # sddraft = os.path.join(ws, x + ".sddraft") # sd = os.path.join(ws, x + ".sd") # self.createImageSDDraft(fc, sddraft, service, con) # self.insertRTFFile(x, sddraft) # if not os.path.exists(sd): # self.analyzeSDDraft(sddraft, sd) # else: # print("file "+sd+" already exist") # if not os.path.exists(con): # print("file "+con+" is not exist") # else: # print("file "+con+" already exist") # arcpy.UploadServiceDefinition_server(sd, con) # print(con) except Exception, e: logger.debug(e.message)
else: lst_ds = arcpy.ListDataStoreItems(ags, "DATABASE") for ds in lst_ds: ds_name = ds[0] if ds_name == tmp_data_store: arcpy.RemoveDataStoreItem(ags, "DATABASE", tmp_data_store) temp_sde = arcpy.CreateScratchName( "DZ" + str(uuid.uuid4()).replace("-", ""), ".sde", None, arcpy.env.scratchFolder) with open(sde_file, 'rb') as f1: with open(temp_sde, 'wb') as f2: f2.write(f1.read()) arcpy.AddDataStoreItem(ags, "DATABASE", tmp_data_store, temp_sde, temp_sde) validity = "unknown" lst_ds = arcpy.ListDataStoreItems(ags, "DATABASE") for ds in lst_ds: ds_name = ds[0] if ds_name == tmp_data_store: validity = "valid" arcpy.RemoveDataStoreItem(ags, "DATABASE", tmp_data_store) if validity == "valid": arcpy.AddMessage(" SUCCESS.") else: arcpy.AddMessage(
except arcpy.ExecuteError: e = sys.exc_info()[1] print(e.args[0]) print arcpy.GetMessages() + "\n\n" sys.exit("Failed establishing server connection") # If Image Service does not exist create Service drafts to publih the service # Otherwise add new raster to existing Image Service # When Image Service does not exist yet create a service definition draft and publish the Service if not os.path.exists(workspace_gdb+"\\"+collection_id+ "Service.sd"): try: print "Try Creating SD draft" if data_store_path not in [i[2] for i in arcpy.ListDataStoreItems(con, 'FOLDER')]: # Register folder with ArcGIS Server site --> both the server path(out_folder_path 1.) and client path (out_folder_path 2.) are the same dsStatus = arcpy.AddDataStoreItem(con, "FOLDER", "Workspace for " + collection_id + 'Service', data_store_path, data_store_path) print "Data store : " + str(dsStatus) Sddraft = os.path.join(workspace_gdb, collection_id+"Service"+".sddraft") # Name = Name der Bilddateien/ Ordner bzw. des sddraft # vorletzter Parameter der createImageSdd Draft Funktion muss nachher mit den Metadaten besetzt werden, als description des Services arcpy.CreateImageSDDraft(os.path.join(workspace_gdb, collection_id+'.gdb\\Master_'+collection_id), Sddraft, collection_id+"Service", 'ARCGIS_SERVER', None, False, 'WaCoDiS', str(json_metadata['productType']), str(json_metadata['productType'])+",image service, WaCoDiS") except arcpy.ExecuteError: e = sys.exc_info()[1] print(e.args[0]) print arcpy.GetMessages() + "\n\n" sys.exit("Failed in creating SD draft") # Analyze the service definition draft Sddraft = os.path.join(workspace_gdb, collection_id+"Service"+".sddraft") Sd = os.path.join(workspace_gdb, collection_id+"Service"+".sd")
print("The following was returned during analysis of the image service:") for key in analysis.keys(): print("---{}---".format(key.upper())) for ((message, code), layerlist) in analysis[key].iteritems(): print(" {} (CODE {})".format(message, code)) print(" applies to: {}".format(" ".join( [layer.name for layer in layerlist]))) print("Start stage and upload the service") # Stage and upload the service if the sddraft analysis did not contain errors if analysis['errors'] == {}: try: print("Adding data path to data store to avoid copying data to server") arcpy.AddDataStoreItem(con, "FOLDER", "Images", MyWorkspace, MyWorkspace) print "Staging service to create service definition" arcpy.StageService_server(Sddraft, Sd) print "Uploading the service definition and publishing image service" arcpy.UploadServiceDefinition_server(Sd, con) print "Service successfully published" except arcpy.ExecuteError: # print(arcpy.GetMessages() + "\n\n") # sys.exit("Failed to stage and upload service") print("") except Exception as err: print("")
import arcpy conn = sys.argv[1] name = sys.argv[2] server_path = sys.argv[3] # check whether an additional publisher path has been provided if (len(sys.argv) > 4): publisher_path = sys.argv[4] else: publisher_path = "" # check whether the folder to register is already register => if yes, delete the former registered folders for item in arcpy.ListDataStoreItems(conn, "FOLDER"): current_name = item[0] if current_name == name: arcpy.RemoveDataStoreItem(conn, "FOLDER", name) if publisher_path == "": arcpy.AddDataStoreItem(conn, "FOLDER", name, server_path) else: arcpy.AddDataStoreItem(conn, "FOLDER", name, server_path, publisher_path)