Exemplo n.º 1
0
def CreateOutGdbXml(OutputGdb, SelectXml):
    arcpy.AddMessage(
        "\nPreliminary Step: Creating output geodatabase, importing schema definition"
    )

    # create separate objects from user supplied path
    SplitGdbPath = OutputGdb.split('\\')
    Folder = '\\'.join(SplitGdbPath[:-1])
    GdbName = '\\'.join(SplitGdbPath[-1:])

    # store xml paths as dictionary values, retrieve user selected
    XmlPaths = {
        'Standard':
        '..\\Parcel_Fabric_Publication_Premier_V3.1_BETA\\XML_CADNSDI_WITH_AREA_IN_1STDIV.xml',
        'Montana':
        '..\\Parcel_Fabric_Publication_Premier_V3.1_BETA\\XML_CADNSDI_MONTANA_HARN.xml',
        'Ohio':
        '..\\Parcel_Fabric_Publication_Premier_V3.1_BETA\\XML_Ohio.xml',
        'Wisconsin':
        '..\\Parcel_Fabric_Publication_Premier_V3.1_BETA\\XML_CADNSDI_WISCONSIN_HARN.xml',
        'California':
        '..\\Parcel_Fabric_Publication_Premier_V3.1_BETA\\XML_CADNSDI_CALIFORNIA_TEAL_ALBERS.xml',
        'Utah':
        '..\\Parcel_Fabric_Publication_Premier_V3.1_BETA\\PLSSFABRIC.XML'
    }

    # create the gdb and import selected xml
    arcpy.CreateFileGDB_management(Folder, GdbName, 'CURRENT')
    arcpy.ImportXMLWorkspaceDocument_management(OutputGdb, XmlPaths[SelectXml],
                                                'SCHEMA_ONLY')
Exemplo n.º 2
0
def prepare_test(test_type):
    """Prepare the geodatabase data for running tests on."""
    cfg = TEST_CONFIG[test_type]
    test_type = cfg['name']
    out_report_folder = os.path.join(tempfile.gettempdir(), test_type)
    if not os.path.exists(out_report_folder):
        os.mkdir(out_report_folder)

    arcpy_loader = pkgutil.find_loader('arcpy')
    if arcpy_loader:
        import arcpy
        arcpy.env.overwriteOutput = True
        xml_schema = cfg['xml_schema']
        in_gdb = arcpy.CreateFileGDB_management(
            out_folder_path=out_report_folder,
            out_name=test_type,
        ).getOutput(0)
        arcpy.ImportXMLWorkspaceDocument_management(
            target_geodatabase=in_gdb,
            in_file=xml_schema,
            import_type='SCHEMA_ONLY',
        )
    else:
        in_gdb = cfg['ogr_geodatabase']

    json_results = cfg['json_results']
    return (in_gdb, out_report_folder, json_results)
Exemplo n.º 3
0
def prepare_test(test_type):
    """prepare the geodatabase data for running tests on"""
    cfg = TEST_CONFIG[test_type]
    test_type = cfg['name']
    out_report_folder = os.path.join(tempfile.gettempdir(), test_type)
    if not os.path.exists(out_report_folder):
        os.mkdir(out_report_folder)

    if arcpy_found:
        xml_schema = cfg['xml_schema']
        in_gdb = arcpy.CreateFileGDB_management(out_report_folder,
                                                test_type).getOutput(0)
        arcpy.ImportXMLWorkspaceDocument_management(in_gdb, xml_schema,
                                                    "SCHEMA_ONLY")
    else:
        in_gdb = cfg['ogr_geodatabase']

    json_results = cfg['json_results']

    return (in_gdb, out_report_folder, json_results)
Exemplo n.º 4
0
def main():
    gdb_path = arcpy.GetParameterAsText(0)
    input_feature = arcpy.GetParameter(1)
    all_the_world = bool(arcpy.GetParameter(2))
    to_clip = bool(arcpy.GetParameter(3))
    osm_scheme = arcpy.GetParameterAsText(4)
    layer_config_file = arcpy.GetParameterAsText(5)
    aprx_model = arcpy.GetParameterAsText(6)
    create_vtpk = bool(arcpy.GetParameter(7))

    pythonPath = os.path.dirname(os.path.realpath(sys.argv[0]))
    settings = ConfigParser()
    settings.read(pythonPath + "/settings.ini")

    db_server = CommonFunctions.readParameter(settings, "database",
                                              'db_server')
    db_port = CommonFunctions.readParameter(settings, "database", 'db_port')
    db_database = CommonFunctions.readParameter(settings, "database",
                                                'db_database')
    db_collection = CommonFunctions.readParameter(settings, "database",
                                                  'db_collection')
    done_path = CommonFunctions.readParameter(settings, "directories",
                                              'done_path')
    tiling_scheme = CommonFunctions.readParameter(settings, "models",
                                                  'tiling_scheme')
    global element_at_time
    element_at_time = int(
        CommonFunctions.readParameter(settings, "general", 'element_at_time'))

    client = MongoClient(db_server, int(db_port))
    db = client[db_database]
    collection = db[db_collection]
    collection.create_index([("id", ASCENDING)], background=True)
    collection.create_index([("osm_type", ASCENDING)], background=True)
    collection.ensure_index([("geometry", GEOSPHERE)], background=True)
    collection.create_index([("geometry.type", ASCENDING)], background=True)
    collection.create_index([("nd.ref", ASCENDING)], background=True)
    collection.create_index([("member.ref", ASCENDING)], background=True)
    collection.create_index([("osm_type", ASCENDING), ("geometry", ASCENDING)],
                            background=True)
    collection.create_index([("osm_type", ASCENDING), ("id", ASCENDING)],
                            background=True)

    geometries = []
    if not all_the_world:
        if (os.path.exists(os.path.join(done_path, "workjson.geojson"))):
            os.remove(os.path.join(done_path, "workjson.geojson"))
        arcpy.FeaturesToJSON_conversion(
            input_feature,
            os.path.join(done_path, "workjson.geojson").replace("\\", "/"),
            geoJSON="GEOJSON")

        time.sleep(1)
        content = ''
        with open(
                os.path.join(done_path, "workjson.geojson").replace("\\",
                                                                    "/")) as f:
            content = f.readlines()

        resultjson = ''
        for single in content:
            resultjson = resultjson + single.replace("\n", "")
        if (os.path.exists(os.path.join(done_path, "workjson.geojson"))):
            os.remove(os.path.join(done_path, "workjson.geojson"))
        d = json.loads(resultjson)
        features = d['features']
        for feature in features:
            geometries.append(feature['geometry'])

        if to_clip:
            if (arcpy.Exists("in_memory/polygon_selection")):
                arcpy.Delete_management("in_memory/polygon_selection")
            arcpy.management.CreateFeatureclass(
                "in_memory",
                "polygon_selection",
                "POLYGON",
                "",
                "DISABLED",
                "DISABLED",
                spatial_reference=
                "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 11258999068426.2;-100000 10000;-100000 10000;8.98315284119521E-09;0.001;0.001;IsHighPrecision"
            )

            # Open an InsertCursor and insert the new geometry
            cursor = arcpy.da.InsertCursor('in_memory/polygon_selection',
                                           ['SHAPE@'])
            for feature in features:
                if (feature['geometry']['type'] == "Polygon"):
                    geom = feature["geometry"]["coordinates"][0]
                    array = arcpy.Array()
                    for g in geom:
                        array.append(arcpy.Point(g[0], g[1]))
                polygon = arcpy.Polygon(array)
                cursor.insertRow([polygon])
            # Delete cursor object
            del cursor

    gdbname = gdb_path.replace("\\", "/")
    gdbname = gdbname.split("/")[len(gdbname.split("/")) - 1]
    database_path = gdb_path.replace(gdbname, "")
    arcpy.AddMessage("Create Geodatabase: " + gdbname + " using " +
                     osm_scheme + " in directory " + database_path)
    arcpy.CreateFileGDB_management(database_path, gdbname)
    arcpy.ImportXMLWorkspaceDocument_management(gdb_path, osm_scheme)

    arcpy.AddMessage("Read layer config file")
    with open(layer_config_file) as f:
        content = f.readlines()

    for single in content:
        single = single.replace("\n", "")
        arcpy.AddMessage("Process " + single.split(",")[1] + ": " +
                         single.split(",")[0])
        readSingleLayer(collection, single, geometries,
                        os.path.join(database_path, gdbname), all_the_world,
                        to_clip)
    client.close()

    if aprx_model != "":
        arcpy.AddMessage('Rebuild aprx file from model')
        aprx = arcpy.mp.ArcGISProject(aprx_model)

        dbs = []
        m = aprx.listMaps()[0]
        arcpy.AddMessage("Update Model databases")
        for lyr in m.listLayers():
            if (lyr.supports("connectionProperties") == True):
                if lyr.connectionProperties:
                    if lyr.connectionProperties['connection_info'][
                            'database'] not in dbs:
                        dbs.append(lyr.connectionProperties['connection_info']
                                   ['database'])

        for db in dbs:
            aprx.updateConnectionProperties(
                db, os.path.join(database_path, gdbname), True, False)

        absname = gdbname.split(".")[0]
        if (arcpy.Exists(os.path.join(database_path, absname + ".aprx"))):
            arcpy.Delete_management(
                os.path.join(database_path, absname + ".aprx"))
        aprx.saveACopy(os.path.join(database_path, absname + ".aprx"))

        if create_vtpk:
            for m in aprx.listMaps():
                arcpy.AddMessage("Tile index creation")
                if (arcpy.Exists(database_path + "/" + absname + "Index.gdb")):
                    arcpy.Delete_management(database_path + "/" + absname +
                                            "Index.gdb")
                arcpy.CreateFileGDB_management(database_path,
                                               absname + "Index.gdb")
                arcpy.management.CreateVectorTileIndex(
                    m, database_path + "/" + absname + "Index.gdb/osmIndex",
                    "EXISTING", tiling_scheme, 10000)

                arcpy.AddMessage("Vector tile map creation")
                if (arcpy.Exists(database_path + "/" + absname + ".vtpk")):
                    arcpy.Delete_management(database_path + "/" + absname +
                                            ".vtpk")
                arcpy.management.CreateVectorTilePackage(
                    m, database_path + "/" + absname + ".vtpk", "EXISTING",
                    tiling_scheme, "INDEXED", 73957190.9489637,
                    1128.49717634527,
                    database_path + "/" + absname + "Index.gdb/osmIndex",
                    "OSM", "World, Vector")
        del aprx

    arcpy.ClearWorkspaceCache_management()
Exemplo n.º 5
0
    print("****Finished tasks as the gdb admin user (sde) ****\n")

    # Create schema and apply permissions.
    # Create a connection as the data owner.
    print("\nCreating a connection to the geodatabase as the data owner (gdb)")
    ownerConn = arcpy.CreateDatabaseConnection_management(
        r'<path_to_save_connection_file>', '<file_name>.sde', platform,
        instance, '<authentication_type>', '<gdb_admin>',
        '<gdb_admin_password>', 'SAVE_USERNAME', database)

    # Import the data as the gdb user and specify the custom config keyword that the gdb admin has provided
    print(
        "\tImporting the data as the data owner (gdb) using a config keyword named 'custom'"
    )
    arcpy.ImportXMLWorkspaceDocument_management(
        ownerConn, r'<path_to_workspace_file>\<file_name>.xml', 'SCHEMA_ONLY',
        'CUSTOM')

    # Get a list of feature classes, tables and feature datasets
    # and apply appropriate permissions.
    print(
        "\tBuilding a list of feature classes, tables, and feature datasets in the geodatabase"
    )
    arcpy.env.workspace = ownerConn[
        0]  #note environments do not work with result objects.
    dataList = arcpy.ListTables() + arcpy.ListFeatureClasses(
    ) + arcpy.ListDatasets("", "Feature")

    # Use roles to apply permissions.
    print(
        "\tGranting appropriate privileges to the data for the 'viewers' and 'editors' roles"
    #print("Import a new geodatabase configuration keyword named 'custom'")
    #arcpy.ImportGeodatabaseConfigurationKeywords_management(gdbAdminConn,  r'C:\presentations\DevSummit2016\Demos\Demo3\CustomConfigKeyword')
    #print("Finished tasks as the gdb admin user (sde) \n")
    
    # Create schema and apply permissions.
    # Create a connection as the data owner.
    print("Creating a connection to the geodatabase as the data owner (onemapgdb_owner)")
    ownerConn = arcpy.CreateDatabaseConnection_management('D:/versioning_training',
                                                          '*****@*****.**', platform, instance,
                                                          'DATABASE_AUTH', 'onemapgdb_owner','onemapgdb_owner',
                                                          'SAVE_USERNAME', database)
    
    # Import the data as the gdb user and specify the custom config keyword that the gdb admin has provided
    print("Importing the data as the data owner (onemapgdb_owner) using a default config keyword named 'default'")
    arcpy.ImportXMLWorkspaceDocument_management(ownerConn,
                                                '//OMM-W20/CommonSharedFolder_TempOnly/Script_for_OMMtest_DB',
                                                'DATA')

    # Get a list of feature classes, tables and feature datasets
    # and apply appropriate permissions.
    print("Building a list of feature classes, tables, and feature datasets in the geodatabase")
    arcpy.env.workspace = ownerConn[0] #note environments do not work with result objects.
    dataList = arcpy.ListTables() + arcpy.ListFeatureClasses() + arcpy.ListDatasets("", "Feature")
    
    #List the data in the dataList
    print("Print the list of dataset")
    for dataset in dataList:
        print(dataset)

    # Use roles to apply permissions.
    print("Granting appropriate privileges to the data for the 'viewers' and 'editors' roles")
Exemplo n.º 7
0
timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
arcpy.GetMessage("Start time: " + timestamp)

# Variables
inGDB = arcpy.GetParameterAsText(0)  # GDB to extract CIP from
name = arcpy.GetParameterAsText(1)  # Installation name - string, no spaces
path = arcpy.GetParameterAsText(2)  # Output location - folder directory
xmlWorkspace = arcpy.GetParameterAsText(
    3)  # XML Shell Document - file variable
name = str(name + ".gdb").replace(" ", "_")

# Create CIP shell with empty GDB and XML Workspace Document
arcpy.GetMessage("Creating empty FGDB")
arcpy.CreateFileGDB_management(path, name)
arcpy.GetMessage("Importing XML schema into FGDB")
arcpy.ImportXMLWorkspaceDocument_management(name, xmlWorkspace, "SCHEMA_ONLY",
                                            "DEFAULTS")
cipLoc = os.path.join(path, name)

arcpy.env.workspace = inGDB  # Location of GDB to extract CIP from

CIP_Datasets = ('Auditory', 'Cadastre', 'environmentalCulturalResources',
                'environmentalNaturalResources', 'environmentalRestoration',
                'MilitaryRangeTraining', 'Pavements', 'Planning',
                'RealProperty', 'Recreation', 'Security', 'Transportation',
                'WaterWays')

CIP_Layers = ('NoiseZone_A', 'Installation_A', 'LandParcel_A', 'Outgrant_A',
              'Site_A', 'Site_P', 'HistoricDistrict_A', 'Wetland_A',
              'EnvRemediationSite_A', 'ImpactArea_A',
              'MilQuantityDistCombinedArc_A', 'MilRange_A', 'MilTrainingLoc_A',
              'PavementBranch_A', 'PavementSection_A', 'AirAccidentZone_A',
Exemplo n.º 8
0
arcpy.CalculateField_management("sdeTableView", "OPERATION_TYPE", "'I'",
                                "PYTHON_9.3")
print "... " + (arcpy.GetMessages())
print("\n")

print "..." + str(num_of_rows2) + " new rows updated"
print("\n")

##### Create Deliverable GDB with GDB2S Schema #####

gdbName2 = "SEA_FiberCableSchedule_" + str(today)

arcpy.CreateFileGDB_management(folder_path, gdbName2)

gdbPath2 = folder_path + "/" + gdbName2 + ".gdb"

print "... Deliverable GDB Created with GDB2S Schema"
print("\n")

xmlPath = r"C:/GIS/LLD_Schema/GDB2S_SCHEMA_LLD_RELEASED_03JAN_V1.xml"  ### The path to the schema XML

delivPath = gdbPath2 + '/' + "Fibercable_Schedule"

arcpy.ImportXMLWorkspaceDocument_management(
    gdbPath2, xmlPath)  ### Import the XML Schema from GDB2

arcpy.Append_management(sdeTable, delivPath, 'NO_TEST')

print "... Data Appended - Process Complete"
Exemplo n.º 9
0
##### Creating the geodatabase if it does not exist #####

if arcpy.Exists(full_path):
    print "... " + "This geodatabase already exists"
    exit()

else:
    print "... " + "Geodatabase creation underway"
    arcpy.CreateFileGDB_management(file_path, gdb_name)
    print("\n")

##### Copy Transmedia into new GDB #####

transPath = "C:/GIS/DBconnections/GISADMIN Connections/Seattle as GISADMIN.sde/Seattle.DBO.Core_Delivered/Seattle.DBO.Transmedia_Delivered"

arcpy.ImportXMLWorkspaceDocument_management(
    full_path, r'C:\GIS\_Seattle_Workspaces\Segment_Permit_Status\Schema.xml')

newTransmedia = full_path + "/" + "Transmedia_Overall"

arcpy.Append_management(transPath, newTransmedia, "NO_TEST")

print "... " + "Copied Transmedia_Overall into GDB"
print("\n")

##### Copy Permit Polygons into new GDB #####

permitPath = "C:/GIS/DBconnections/GISADMIN Connections/Seattle as GISADMIN.sde/Seattle.DBO.Permits"

arcpy.FeatureClassToFeatureClass_conversion(permitPath, full_path,
                                            "Permits_Overall")
roleName = None
dbInstance = "ESRIBX0373\SQLEXPRESS"

arcpy.env.overwriteOutput = True
print("Creating database")
#create database
arcpy.CreateEnterpriseGeodatabase_management("SQL_Server", dbInstance, databasename, "OPERATING_SYSTEM_AUTH",
                                             "", "", "SDE_SCHEMA",
                                             "sde", "sde","",r"D:\EGT16\Part 1 - Administration\keycodes")

# Create Connection
print("Creating connection")
arcpy.CreateDatabaseConnection_management(connectionFolderPath,databasename,"SQL_SERVER",
                                          dbInstance,"OPERATING_SYSTEM_AUTH","","","SAVE_USERNAME",databasename)

## Create list of users
print("Creating users")
userList = ['jack', 'linda', 'bill']

## Create users and assign to editor role
for user in userList:
    print("Creating user: {}".format(user))
    arcpy.CreateDatabaseUser_management(connectionFilepath, "DATABASE_USER", user, "SomePassword01") #, roleName)

#import xml workspace document
print("Importing XML Workspace")
arcpy.ImportXMLWorkspaceDocument_management(connectionFilepath,
                                                'D:\EGT16\Part 1 - Administration\WINDMILLS.XML',
                                                'DATA')

print("Script complete")
Exemplo n.º 11
0
    else:
        return ckt + ','


def isTiePoint(ckts):
    if len(ckts.split(',')) > 2:
        tiePnt = 'Y'
        return tiePnt


if arcpy.Exists(searchTable):
    print 'Truncating...', searchTable
    arcpy.TruncateTable_management(searchTable)
else:
    arcpy.ImportXMLWorkspaceDocument_management(workspace,
                                                MM_ELECTRICSEARCH_XML,
                                                "Schema_Only")

# Start Main
with arcpy.da.Editor(workspace) as edit:
    for fc in fcFacilityID:
        ic = arcpy.da.InsertCursor(searchTable, elecFldsDest)
        print 'Inserting...', changeAliasName(fc)
        with arcpy.da.SearchCursor(connectionRoot % (fc),
                                   fldsFacilityID) as sc:
            for scrow in sc:
                row = ((checkValue(scrow[0]), changeAliasName(fc),
                        checkValue(scrow[1]), None, None,
                        changeAliasName(fc) + '-' + checkValue(scrow[0]),
                        scrow[2]))
                ic.insertRow(row)
Exemplo n.º 12
0
    return len(rows)

def updateRelatedUnits(row,destFlds,guid):
    with arcpy.da.UpdateCursor(unitTable,destFlds,where_clause="eSupportStructure_GLOBALID = " + "'" + guid + "'") as uc:
        for ucrow in uc:
            ucrow = row
            #print('updating destflds %s') % x
            uc.updateRow(ucrow)
            # break and return out of the for loop so we dont update the entire row with same cursor object
            break
        return
    del uc
        
# prep unitTable
if not arcpy.Exists(unitTable):
    arcpy.ImportXMLWorkspaceDocument_management(workspace, MM_POLEEQUIPMENT_XML, "Schema_Only")
    #print('Setting default value: %s) % (stormName)
    arcpy.AssignDefaultToField_management(unitTable,"StormName",stormName)
    #print('Setting default value: %i) % (sapNetwork)
    #arcpy.AssignDefaultToField_management(unitTable,"SAPNetwork",sapNetwork)
else:
    print 'Truncating...',unitTable
    arcpy.TruncateTable_management(unitTable)
#print 'Checking for index eSupportStructure_GLOABLID...'
#indexes = arcpy.ListIndexes(unitTable)
#indexNames = []
#for index in indexes:
#    print index.name
#    indexNames.append(index.name)
#if "IDX_poleGUID" not in indexNames:
#    print'You need to add the Index' #arcpy.AddIndex_management (truncTable, "eSupportStructure_GLOBALID", "IDX_poleGUID", "UNIQUE", "ASCENDING")
Exemplo n.º 13
0
        return ''
    else:
        return ckt + ','


def isTiePoint(ckts):
    if len(ckts.split(',')) > 2:
        tiePnt = 'Y'
        return tiePnt


if arcpy.Exists(searchTable):
    print 'Truncating...', searchTable
    arcpy.TruncateTable_management(searchTable)
else:
    arcpy.ImportXMLWorkspaceDocument_management(workspace, MM_DEVICESEARCH_XML,
                                                "Schema_Only")

# Start Main
with arcpy.da.Editor(workspace) as edit:
    # for fc in fcFacilityID:
    # ic = arcpy.da.InsertCursor(searchTable,elecFldsDest)
    # print 'Inserting...',changeAliasName(fc)
    # with arcpy.da.SearchCursor(connectionRoot%(fc),fldsFacilityID) as sc:
    # for scrow in sc:
    # row = ((checkValue(scrow[0]),changeAliasName(fc),checkValue(scrow[1]),None,None,scrow[2]))
    # ic.insertRow(row)
    # del sc
    # del ic
    for fc in fcFeederID:
        ic = arcpy.da.InsertCursor(searchTable, elecFldsDest)
        print 'Inserting...', changeAliasName(fc)
Exemplo n.º 14
0
# Use Tkinter to get file path for mdb
root = Tkinter.Tk()
root.withdraw()
root.attributes('-topmost', True)
getMDBLoc = tkFileDialog.askopenfilename(parent=root,
                                         initialdir=sys.path[0],
                                         title='Select the MDB to convert')
root.attributes('-topmost', False)
mdbPath = os.path.dirname(getMDBLoc)
mdbFile = os.path.basename(getMDBLoc)
mdbLoc = os.path.abspath(os.path.join(mdbPath, mdbFile))
installationName = os.path.splitext((os.path.split(mdbLoc)[1]))[0]
if len(mdbLoc) > 0:
    print("MDB selected: %s" % os.path.abspath(mdbLoc))
root.destroy()

arcpy.env.workspace = mdbPath

print "Creating FGDB Shell with same name as input Personal GDB"
arcpy.CreateFileGDB_management(mdbPath, installationName)
print "Exporting XML with GDB structure and data from MDB"
arcpy.ExportXMLWorkspaceDocument_management(mdbFile, installationName + ".xml",
                                            "DATA", "BINARY", "METADATA")
print "Importing XML into FGDB Shell"
arcpy.ImportXMLWorkspaceDocument_management(installationName + ".gdb",
                                            installationName + ".xml", "DATA",
                                            "DEFAULTS")

timestamp_end = time.strftime("%Y%m%d %H:%M:%S", time.localtime())
print "Finished @ " + timestamp_end
Exemplo n.º 15
0
    #print("Import a new geodatabase configuration keyword named 'custom'")
    #arcpy.ImportGeodatabaseConfigurationKeywords_management(gdbAdminConn,  r'C:\presentations\DevSummit2016\Demos\Demo3\CustomConfigKeyword')
    #print("Finished tasks as the gdb admin user (sde) \n")
    
    # Create schema and apply permissions.
    # Create a connection as the data owner.
    print("Creating a connection to the geodatabase as the data owner (fdgdb_owner)")
    ownerConn = arcpy.CreateDatabaseConnection_management('D:/FD/yee',
                                                          'DevSumOwner.sde', platform, instance,
                                                          'DATABASE_AUTH', 'fdgdb_owner','fdgdb_owner',
                                                          'SAVE_USERNAME', database)
    
    # Import the data as the gdb user and specify the custom config keyword that the gdb admin has provided
    print("Importing the data as the data owner (fdgdb_owner) using a default config keyword named 'default'")
    arcpy.ImportXMLWorkspaceDocument_management(ownerConn,
                                                'E:/FD_Presentation/data/EXAMPLEDATAVERSIONING.XML',
                                                'DATA')

    # Get a list of feature classes, tables and feature datasets
    # and apply appropriate permissions.
    print("Building a list of feature classes, tables, and feature datasets in the geodatabase")
    arcpy.env.workspace = ownerConn[0] #note environments do not work with result objects.
    dataList = arcpy.ListTables() + arcpy.ListFeatureClasses() + arcpy.ListDatasets("", "Feature")
    
    #List the data in the dataList
    print("Print the list of dataset")
    for dataset in dataList:
        print(dataset)

    # Use roles to apply permissions.
    print("Granting appropriate privileges to the data for the 'viewers' and 'editors' roles")
    #print("Import a new geodatabase configuration keyword named 'custom'")
    #arcpy.ImportGeodatabaseConfigurationKeywords_management(gdbAdminConn,  r'C:\presentations\DevSummit2017\Demos\Demo3\CustomConfigKeyword')
    print("Finished tasks as the gdb admin user (sde) \n")
    
    # Create schema and apply permissions.
    # Create a connection as the data owner.
    print("Creating a connection to the geodatabase as the data owner (gdb)")
    ownerConn = arcpy.CreateDatabaseConnection_management(databaseFolderConnecton,
                                                          'DevSumOwner.sde', platform, instance,
                                                          'DATABASE_AUTH', 'gdb','gdb',
                                                          'SAVE_USERNAME', database)
    
    # Import the data as the gdb user and specify the custom config keyword that the gdb admin has provided
    print("Importing the data as the data owner (gdb) using a default config keyword ")
    arcpy.ImportXMLWorkspaceDocument_management(ownerConn,
                                                'D:\Presentations\data\XMLEXPORT_MYANMAR_DATA.xml',
                                                'DATA')

    # Get a list of feature classes, tables and feature datasets
    # and apply appropriate permissions.
    print("Building a list of feature classes, tables, and feature datasets in the geodatabase")
    arcpy.env.workspace = ownerConn[0] #note environments do not work with result objects.
    dataList = arcpy.ListTables() + arcpy.ListFeatureClasses() + arcpy.ListDatasets("", "Feature")

    # Use roles to apply permissions.
    print("Granting appropriate privileges to the data for the 'viewers' and 'editors' roles")
    arcpy.ChangePrivileges_management(dataList, 'viewers', 'GRANT')
    arcpy.ChangePrivileges_management(dataList, 'editors', 'GRANT', 'GRANT')

    # Register the data as versioned.
    print("Registering the data as versioned")