Exemple #1
0
    def loadtarget(self, keyword=None, register='Y'):

        arcpy.Delete_management(self.sdetarget)

        # avoid objectid_1 column generation
        arcpy.DeleteField_management(self.shpsource, "OBJECTID")

        if keyword is None:

            arcpy.FeatureClassToFeatureClass_conversion(
                self.shpsource, self.sdepath, self.name)

        else:

            arcpy.FeatureClassToFeatureClass_conversion(self.shpsource,
                                                        self.sdepath,
                                                        self.name,
                                                        config_keyword=keyword)

        self.field_remapping()

        if register == 'Y':

            if self.featuredataset == "":
                arcpy.RegisterAsVersioned_management(self.sdetarget)
            else:
                # first fc into an annoying feature dataset bucket sets the versioning
                arcpy.RegisterAsVersioned_management(self.sdepath)
Exemple #2
0
def register_db_as_versioned(ds_list, exception_list):
    """
    Register the list as versioned in the workspace
    :param ds_list:
    :param exception_list:
    :return:
    """
    for i, ds in enumerate(ds_list):
        registerdataset = checkexception(ds, exception_list)
        if not registerdataset == 1:
            message(
                ("--- exception-Rule! Do no register: " + "\"" + ds + "\""), 0)
        else:
            desc = arcpy.Describe(ds)
            if not desc.isVersioned:
                try:
                    message(("registering as versioned: " + "\"" + ds + "\""),
                            0)
                    arcpy.RegisterAsVersioned_management(
                        ds, "NO_EDITS_TO_BASE")
                except Exception as ex:
                    message(("problem with registering: " + "\"" + ds + "\"" +
                             '\n' + arcpy.GetMessages(2)), 2)
                    message(ex.message)
            else:
                message(("--- already registered as versioned: " + "\"" + ds +
                         "\""), 0)
Exemple #3
0
    def version(self):

        # https://pro.arcgis.com/en/pro-app/tool-reference/data-management/register-as-versioned.htm

        logging.info('versioning {0}'.format(self.name))

        arcpy.RegisterAsVersioned_management(self.featureclass,
                                             "NO_EDITS_TO_BASE")

        # https://support.esri.com/en/technical-article/000023226
        # When an ArcGIS 10.8 / ArcGIS Pro 2.5 (or newer) client connects to a
        # 10.7.1, or earlier, release of an Enterprise geodatabase in Oracle,
        # and registers the data as versioned, the versioned view is not created
        # for the associated table or feature class.

        # I cant get this shell out to python27 to work
        # so like I dummy I'm gonna print it to the screen for now
        # the test will fail until I (or esri) get it right, thats honest at least

        py2versionedviews = pathlib.Path(__file__).parent.parent \
                                                  .joinpath('py27') \
                                                  .joinpath('create_versionedviews.py')

        # see gdb class for this path, perhaps 'C:\Python27\ArcGIS10.6'
        callcmd = r'{0} {1} {2}'.format(self.gdb.arcpy2path, py2versionedviews,
                                        self.name)
        logging.info(
            'YOU MUST CREATE versioned views from py27 using {0}'.format(
                callcmd))
        logging.info('YOU YES YOU MUST call this: {0}'.format(callcmd))
def init_msp_recordset(rs_info):
    print("- " + rs_info.name + ":")

    if rs_info.editorTrackingEnabled:
        pass
        # Editor tracing is set automatically during importing DbSchema.xml
        # print("  (Editor tracking was already enabled)")
    else:
        print("  WARNING: Editor tracking is not enabled")

    if not is_egdb:
        print("  WARNING: Versioning is not supported")
        print("  WARNING: Archiving is not supported")
        return

    if rs_info.isVersioned:
        print("  (Already registered as versioned)")
    elif rs_info.isArchived:
        print("  WARNING: Versioning cannot be set on archived " +
              rs_info.datasetType)
    else:
        arcpy.RegisterAsVersioned_management(rs_info.name)
        print("  Registered as versioned")

    if rs_info.isArchived:
        print("  (Archiving already enabled)")
    else:
        arcpy.EnableArchiving_management(rs_info.name)
        print("  Archiving enabled")

    print("")
Exemple #5
0
def snapshotfunc(source_sde, target_sde):

    # all the deletes here
    prepare_target(target_sde)

    source_fd = os.path.join(source_sde, feature_dataset)

    target_fd = os.path.join(target_sde, feature_dataset)

    print "copying {0} to {1}".format(source_fd, target_fd)

    # all but the data tables are caught in this feature dataset web
    arcpy.Copy_management(source_fd, target_fd)

    for dt in data_tables:

        source_dt = os.path.join(source_sde, dt)

        target_dt = os.path.join(target_sde, dt)

        print "copying {0} to {1}".format(source_dt, target_dt)

        arcpy.Copy_management(source_dt, target_dt)

        # if our pal ESRI renames COUNT to COUNT_
        renamecol(target_dt, 'COUNT_', 'COUNT')

    print "registering as versioned {0}".format(feature_dataset)

    arcpy.RegisterAsVersioned_management(
        os.path.join(target_sde, feature_dataset))

    allgrantableobjects = feature_classes + entwined_tables + data_tables

    for copiedobject in allgrantableobjects:

        print "granting privs on {0}".format(copiedobject)

        arcpy.ChangePrivileges_management(
            os.path.join(target_sde, copiedobject), "DOITT_DTM_VIEWER",
            "GRANT", "")
        arcpy.ChangePrivileges_management(
            os.path.join(target_sde, copiedobject), "TAXMAP_VIEWER", "GRANT",
            "")
        arcpy.ChangePrivileges_management(
            os.path.join(target_sde, copiedobject), "DOF_READONLY", "GRANT",
            "")
        arcpy.ChangePrivileges_management(
            os.path.join(target_sde, copiedobject), "DOF_TAXMAP_EDITOR",
            "GRANT", "GRANT")
        arcpy.ChangePrivileges_management(
            os.path.join(target_sde, copiedobject), "DCP_MAP_EDITOR", "GRANT",
            "GRANT")

    exitcode = qa_target(source_sde, target_sde)

    return exitcode
Exemple #6
0
def update_fields():
    try:
        workspace = ec_arcpy_util.sde_workspace_via_host()

        arcpy.env.workspace = workspace
        ds = ec_arcpy_util.find_dataset("*Streets")
        fc = ec_arcpy_util.find_feature_class("*dt_streets", "Streets")

        #
        # arcgis stuff for multi-users
        arcpy.AcceptConnections(workspace, False)
        arcpy.DisconnectUser(workspace, "ALL")
        if not arcpy.Describe(ds).isVersioned:
            arcpy.RegisterAsVersioned_management(ds, "EDITS_TO_BASE")

        fields = arcpy.ListFields(fc)
        is_schema_updated = False
        for field in fields:
            if "pwid" == field.name:
                is_schema_updated = True
                break

        if not is_schema_updated:
            arcpy.AddField_management("dt_streets", "pwid", "TEXT", field_length=32, field_alias="PubWorks ID", field_is_nullable="NULLABLE")
            arcpy.AddField_management("dt_streets", "pwname", "TEXT", field_length=64, field_alias="PubWorks Name", field_is_nullable="NULLABLE")
            arcpy.AddField_management("dt_streets", "st_fullname", "TEXT", field_length=50, field_alias="Street Full Name", field_is_nullable="NULLABLE")
            arcpy.AddField_management("dt_streets", "from_addr_l", "LONG", field_alias="From/Left #", field_is_nullable="NULLABLE")
            arcpy.AddField_management("dt_streets", "to_addr_l", "LONG", field_alias="To/Left #", field_is_nullable="NULLABLE")
            arcpy.AddField_management("dt_streets", "from_addr_r", "LONG", field_alias="From/Right #", field_is_nullable="NULLABLE")
            arcpy.AddField_management("dt_streets", "to_addr_r", "LONG", field_alias="To/Right #", field_is_nullable="NULLABLE")



















    except Exception as e:
        print(e)
        logging.error("update_fields():{}".format(e))

    finally:
        logging.info("done.")
Exemple #7
0
def VersionManagement(DbItem):
    '''
    Takes a database table and unregisters it as versioned and then
    re-registers it as versioned without the option to move edits
    to base.  This compresses edits to base. Note: workspace must
    be specified prior to running this function.
    '''
    arcpy.UnregisterAsVersioned_management(DbItem, "NO_KEEP_EDIT",
                                           "COMPRESS_DEFAULT")
    arcpy.RegisterAsVersioned_management(DbItem, "NO_EDITS_TO_BASE")
    print "Edits from" + DbItem + " have been compressed to base"
def TruncateProductionTables(Tables):
    '''
    Remove all entries from the production tables
    but keep the schema intact.
    '''
    env.workspace = r"Database Connections/IAMUW-FS_CEO.sde"
    for Table in Tables:
        # Tables must be unregistered as versioned before they can be truncated
        arcpy.UnregisterAsVersioned_management(Table, "NO_KEEP_EDIT",
                                               "COMPRESS_DEFAULT")
        arcpy.TruncateTable_management(Table)
        # Re-register as versioned after successfully truncating table
        arcpy.RegisterAsVersioned_management(Table, "NO_EDITS_TO_BASE")
Exemple #9
0
def EnableEditing(PubDomainDatabase):
    '''
        The workflow for updating domains will revolve around editing the domains
        in the PUB Domain database tables and then republishing those tables back
        to the domains in the production databases.  To edit these tables versioning
        needs to be enabled.
        '''
    arcpy.env.workspace = PubDomainDatabase
    Tables = arcpy.ListTables()
    for Table in Tables:
        DatasetVersioned = arcpy.Describe(Table).isVersioned
        if DatasetVersioned == True:
            pass
        else:
            arcpy.RegisterAsVersioned_management(Table, "EDITS_TO_BASE")
def RegisterAsVersioned(nome):
    try:
        description = arcpy.Describe(nome)

        if description.isVersioned:
            raise VersionamentoError(
                u"A tabela {} ja eh versionada.".format(nome))

        if not description.canVersion:
            raise VersionamentoError(
                u"A tabela {} nao pode ser versionada.".format(nome))

        arcpy.RegisterAsVersioned_management(arcpy.env.workspace + "\\" + nome,
                                             "NO_EDITS_TO_BASE")
        print u"Entidade {} registrada como versionada".format(nome)
    except Exception as ex:
        print u"ERRO:", nome, ex.message
	else:
		pass

	SR = arcpy.Describe(Parcels)
	SRHelper = SR.spatialReference
	PhotoFeatureClass2 = Geodatabase + "\\PointAttachments"

	arcpy.Project_management(PhotoFeatureClass, PhotoFeatureClass2, SRHelper)
	arcpy.DeleteIdentical_management(PhotoFeatureClass2, "Shape")
	arcpy.Delete_management(PhotoFeatureClass)

	EntGDB = arcpy.Describe(Geodatabase)
	EntGDB.workspaceType

	if EntGDB is 'RemoteDatabase':
		arcpy.RegisterAsVersioned_management(PhotoFeatureClass2)
	else:
		pass

	arcpy.AddMessage("Step 2:  Converting Photos to points")

	# Load up the parcel dataset for the property association (and make a copy)

	ParcelsFeatureClass = Geodatabase + "\\Parcels"""
	arcpy.CopyFeatures_management(Parcels, ParcelsFeatureClass)

	arcpy.AddMessage("Step 3:  Copying Parcels to staging geodatabase")

	# Snap Passenger Photos to nearest parcel edge (30ft. default)

	shape = arcpy.Describe(PhotoFeatureClass2).ShapeFieldName
try:
    data_set = arcpy.GetParameterAsText(0)
    is_compress_db = False
    if arcpy.GetArgumentCount() > 1:
        is_compress_db = arcpy.GetParameterAsText(1)
    work_space = arcpy.env["scriptWorkspace"]
    assert work_space

    data_set_desc = arcpy.Describe(data_set)
    if not data_set_desc.isVersioned:
        arcpy.AddMessage(f"Disallowing new connections.")
        arcpy.AcceptConnections(work_space, False)
        arcpy.AddMessage(f"Disconnecting all Users.")
        arcpy.DisconnectUser(work_space, "ALL")
        arcpy.AddMessage(f"Registering {data_set} as Versioned.")
        arcpy.RegisterAsVersioned_management(data_set, "EDITS_TO_BASE")
        if is_compress_db:
            arcpy.AddMessage(
                f"Compressing geodatabase, this could take some time.")
            arcpy.Compress_management(work_space)
        arcpy.AddMessage(f"Accepting User Connections.")
        arcpy.AcceptConnections(work_space, True)
    else:
        arcpy.AddMessage(
            f"Nothing to version, {data_set} is already registered as versioned."
        )

except Exception as e:
    arcpy.AddError(f"Problem in the script.\n {e}")
Exemple #13
0
def main():

    try:
        if sde_fida is False and del_exists is True:
            create_fgdb()
        load_json_file()
        if sde_fida is True:
            get_connected_user()
            block_connections()
            disconnect_user()
            accept_connections()
        set_workspace()
        if del_exists is True:
            create_fds()
            del_all_domains()
            create_domins()
            create_fcs()
        fill_domains()
        add_field()
        create_rel_classes()
        if sde_fida is True and del_exists is True:
            arcpy.RegisterAsVersioned_management(
                in_dataset=os.path.join(get_workspace(), featuredataset),
            )
            print("Regisert as versioned")

            # # Datenbank ist auf Branch gestellt --> Archiving is already enabled on the dataset.

            # Enable Replica Tracking
            datasetname = os.path.join(get_workspace(), featuredataset)
            arcpy.EnableReplicaTracking_management(datasetname)
            # Execute ChangePrivileges
            arcpy.ChangePrivileges_management(
                datasetname, "ARCSDE_EDITOR", "GRANT", "GRANT"
            )
            arcpy.ChangePrivileges_management(datasetname, "ARCSDE_READER", "GRANT")

            # enable Replica fuer einzelnen Tabellen
            # (Wichtig fuer Branchversioning)
            _uebersetzungtable = os.path.join(get_workspace(), "FIDA_UEBERSETZUNG")
            arcpy.RegisterAsVersioned_management(in_dataset=_uebersetzungtable)
            arcpy.EnableReplicaTracking_management(_uebersetzungtable)
            print("Table: " + _uebersetzungtable + " enableReplica")

            _arbeitskuerzeltable = os.path.join(
                get_workspace(), "FIDA_ARBEITSKUERZELLFP"
            )
            arcpy.RegisterAsVersioned_management(in_dataset=_arbeitskuerzeltable)
            arcpy.EnableReplicaTracking_management(_arbeitskuerzeltable)
            print("Table: " + _arbeitskuerzeltable + " enableReplica")

            _monitortable = os.path.join(get_workspace(), "FIDA_MONITOR")
            arcpy.RegisterAsVersioned_management(in_dataset=_monitortable)
            arcpy.EnableReplicaTracking_management(_monitortable)
            print("Table: " + _monitortable + " enableReplica")

            _inmutationtable = os.path.join(get_workspace(), "FIDA_INMUTATION")
            arcpy.RegisterAsVersioned_management(in_dataset=_inmutationtable)
            arcpy.EnableReplicaTracking_management(_inmutationtable)
            print("Table: " + _inmutationtable + " enableReplica")

    except Exception:
        print("!!! Exception !!!")
        e = sys.exc_info()[1]
        print(e.args[0])
        if sde_fida is True:
            accept_connections()
Exemple #14
0
def createTables(surveyGDB, outWorkspace, prefix):
    '''Creates the doamins, tables and relationships of the survey in the target workspace'''
    arcpy.AddMessage('\t-Creating Tables')
    arcpy.env.workspace = surveyGDB
    allTables = getSurveyTables(surveyGDB)

    dscW = arcpy.Describe(arcpy.env.workspace)
    #migrate the domains
    arcpy.AddMessage('\t\t-Creating Domains')
    for domainName in dscW.domains:
        if domainName[0:3] == 'cvd':
            arcpy.AddMessage('\t\t\t-'.format(domainName))
            tempTable = 'in_memory\{0}'.format(domainName)
            domainTable = arcpy.DomainToTable_management(surveyGDB, domainName, tempTable,'CODE', 'DESC')
            newDomain = arcpy.TableToDomain_management(tempTable, 'CODE', 'DESC', outWorkspace, domainName, update_option='REPLACE')
            arcpy.Delete_management(tempTable)

    arcpy.AddMessage("\t\t-Creating Feature Classes & Tables")
    for table in allTables:
        dsc = arcpy.Describe(table)
        newTableName = "{0}_{1}".format(prefix, table)
        templateTable = template=os.path.join(surveyGDB, table)

        if dsc.datatype == u'FeatureClass':
            newTable = arcpy.CreateFeatureclass_management(outWorkspace, newTableName, "POINT", template=templateTable, spatial_reference=dsc.spatialReference)
        else:
            newTable = arcpy.CreateTable_management(outWorkspace, newTableName, template=templateTable)
        arcpy.AddMessage("\t\t\t-Created {0}".format(newTableName))

        #Attach domains to fields
        tableFields = arcpy.ListFields(table)
        for field in tableFields:
            if field.domain != '':
                arcpy.AssignDomainToField_management(newTable, field.name, field.domain)
        if dscW.workspaceType == "RemoteDatabase":
            arcpy.RegisterAsVersioned_management(newTable)

    arcpy.AddMessage('\t\t-Creating Relationships')
    #Reconnect Relationship classes, checking for attachments
    CARDINALITIES = {
    'OneToOne': "ONE_TO_ONE",
    'OneToMany': "ONE_TO_MANY",
    'ManyToMany': "MANY_TO_MANY"
    }

    for child in [(c.name, c.datatype) for c in dscW.children if c.datatype == u'RelationshipClass']:
        dscRC = arcpy.Describe(child[0])
        RCOriginTable = dscRC.originClassNames[0]
        RCDestTable = dscRC.destinationClassNames[0]
        newOriginTable = "{0}_{1}".format(prefix, RCOriginTable)
        newOriginPath = os.path.join(outWorkspace, newOriginTable)
        if dscRC.isAttachmentRelationship:
            #Simple case - attachments have a dedicated tool
            arcpy.EnableAttachments_management(newOriginPath)
        else:
            newDestTable = "{0}_{1}".format(prefix, RCDestTable)
            newDestPath = os.path.join(outWorkspace, newDestTable)
            newRC = os.path.join(outWorkspace, "{0}_{1}".format(prefix, child[0]))
            relationshipType = "COMPOSITE" if dscRC.isComposite else "SIMPLE"
            fwd_label = dscRC.forwardPathLabel if dscRC.forwardPathLabel != '' else 'Repeat'
            bck_label = dscRC.backwardPathLabel if dscRC.backwardPathLabel != '' else 'MainForm'
            msg_dir = dscRC.notification.upper()
            cardinality = CARDINALITIES[dscRC.cardinality]
            attributed = "ATTRIBUTED" if dscRC.isAttributed else "NONE"
            originclassKeys = dscRC.originClassKeys
            originclassKeys_dict = {}
            for key in originclassKeys:
                originclassKeys_dict[key[1]] = key[0]
            originPrimaryKey = originclassKeys_dict[u'OriginPrimary']
            originForiegnKey = originclassKeys_dict[u'OriginForeign']
            arcpy.CreateRelationshipClass_management(newOriginPath, newDestPath, newRC, relationshipType, fwd_label, bck_label, msg_dir, cardinality, attributed, originPrimaryKey, originForiegnKey)
Exemple #15
0
def main():
    # uncomment two lines below when debugging in Pycharm
    import pydevd_pycharm
    pydevd_pycharm.settrace('localhost',
                            port=12345,
                            stdoutToServer=True,
                            stderrToServer=True)

    csDict = {}
    try:
        with open('tables/2021-spcs-counties.csv', mode='r') as csFile:
            reader = csv.reader(csFile)
            csDict = {row[1]: (row[2], row[3], row[5]) for row in reader}
    except:
        arcpy.AddError("ERROR: Unable to find Coordinate System CSV, exit")
        sys.exit("ERROR: Unable to find Coordinate System CSV, exit")

    # Parameters
    num_cl_shp = arcpy.GetParameterAsText(
        0)  # Number of CoreLogic shapefiles to be processed
    cl_parcel_shp1 = arcpy.GetParameterAsText(1)  # CoreLogic shapefile 1
    cl_parcel_shp2 = arcpy.GetParameterAsText(2)  # CoreLogic shapefile 2
    cl_parcel_shp3 = arcpy.GetParameterAsText(3)  # CoreLogic shapefile 3
    cl_parcel_shp4 = arcpy.GetParameterAsText(4)  # CoreLogic shapefile 4
    workspace = arcpy.GetParameterAsText(5)  # Workspace
    env.workspace = workspace
    project_fgdb = arcpy.GetParameterAsText(6)  # Project FGDB
    project_Number = arcpy.GetParameterAsText(7)  # Output parcel FC
    firstSaleID = arcpy.GetParameterAsText(8)
    secondSaleID = arcpy.GetParameterAsText(9)
    subjectProperty = arcpy.GetParameterAsText(10)  # Subject property FC
    calculateDistance = arcpy.GetParameterAsText(
        11)  # Method to measure distance
    sale_year_query = arcpy.GetParameterAsText(12)  # Sale Year Query
    impValQuery = arcpy.GetParameterAsText(13) or "1000"
    create_parcel_fc = arcpy.GetParameterAsText(14)
    create_salefc = arcpy.GetParameterAsText(
        15)  # Boolean for creation of sale feature class
    create_comp_insp_pt_fc = arcpy.GetParameterAsText(16)
    naming_convention = arcpy.GetParameterAsText(17)

    countyFIPS = os.path.splitext(
        os.path.basename(cl_parcel_shp1))[0].split('_')[1]
    if not countyFIPS.isdigit():
        arcpy.AddError("ERROR: Unable to parse FIPS from file name, exit")
        sys.exit("ERROR: Unable to parse FIPS from file name, exit")

    location = csDict.get(countyFIPS)[:2]
    if not location:
        arcpy.AddError("ERROR: Unable to identify county from FIPS code, exit")
        sys.exit("ERROR: Unable to identify county from FIPS code, exit")
    countyName, stateAbbrev = location
    countyName = re.sub('[^A-Za-z0-9_\s]+', '', countyName)

    parcel_output_fc = "_".join(
        [project_Number, "parcelcl", countyName,
         stateAbbrev]).replace(' ', '_')
    full_parcel_output_fc = os.path.join(project_fgdb, parcel_output_fc)

    name_components = [
        project_Number, "compsale", countyName, stateAbbrev
    ] if naming_convention == 'County/State' else [project_Number, "compsale"]
    sale_output_fc = "_".join(name_components).replace(' ', '_')
    full_sale_parcel_output_fc = os.path.join(project_fgdb, sale_output_fc)
    update_sale_output_fc_permissions = True

    name_components = [
        project_Number, "comp_insp_pt", countyName, stateAbbrev
    ] if naming_convention == 'County/State' else [
        project_Number, "comp_insp_pt"
    ]
    comp_insp_pt_output_fc = "_".join(name_components).replace(' ', '_')
    full_comp_insp_pt_output_fc = os.path.join(project_fgdb,
                                               comp_insp_pt_output_fc)
    update_comp_insp_pt_fc_permissions = True

    # 1 - Check for existence of temporary feature classes & tables and delete if exist
    arcpy.AddMessage(
        "START (1/21): Check for temporary features and tables and delete")
    delete_temp_files()
    arcpy.AddMessage(
        "COMPLETE (1/21): Check for temporary features and tables and delete")

    # 2 Create Output Feature Classes
    arcpy.AddMessage("START (2/21): Create Output Feature Classes")
    if create_parcel_fc.lower() == "true":
        if arcpy.Exists(full_parcel_output_fc):
            arcpy.AddError(
                "ERROR: Output parcel feature class already exists, exit")
            sys.exit("ERROR: Output parcel feature class already exists, exit")
        arcpy.Copy_management(r"templates\templates.gdb\parcelcl",
                              full_parcel_output_fc)

    if create_salefc.lower() == "true":
        if arcpy.Exists(full_sale_parcel_output_fc):
            arcpy.AddMessage("Compsale output already exists, skipping")
            update_sale_output_fc_permissions = False
        else:
            arcpy.Copy_management(r"templates\templates.gdb\compsale",
                                  full_sale_parcel_output_fc)

    if create_comp_insp_pt_fc.lower() == "true":
        if arcpy.Exists(full_comp_insp_pt_output_fc):
            arcpy.AddMessage("Comp_insp_pt output already exists, skipping")
            update_comp_insp_pt_fc_permissions = False
        else:
            arcpy.Copy_management(r"templates\templates.gdb\comp_insp_pt",
                                  full_comp_insp_pt_output_fc)

    arcpy.AddMessage("COMPLETE (2/21): Create Output Feature Classes")

    if create_parcel_fc.lower() == "true":
        # 3 - Project CL parcel shapefile 1 to local StatePlane 2011 and output to temp FGDB
        epsg = int(csDict.get(countyFIPS)[2])
        if not epsg:
            arcpy.AddError("ERROR: Unable to identify Coordinate System, exit")
            sys.exit("ERROR: Unable to identify Coordinate System, exit")

        state_coordinate_system = arcpy.SpatialReference(epsg)
        arcpy.AddMessage(
            "START (3/21): Project Parcel data 1 to local State Plane")
        arcpy.Project_management(cl_parcel_shp1, "temp1",
                                 state_coordinate_system)
        arcpy.AddMessage(
            "COMPLETE (3/21): Project Parcel data 1 to local State Plane")

        # 4a - Project CL parcel shapefile 2 to local StatePlane 2011 and append to temp1
        if num_cl_shp == "2":
            arcpy.AddMessage(
                "START (4a-1/21): Project Parcel data 2 to local State Plane")
            arcpy.Project_management(cl_parcel_shp2, "temp1a",
                                     state_coordinate_system)
            arcpy.AddMessage(
                "COMPLETE (4a-1/21): Project Parcel data to local State Plane")
            arcpy.AddMessage(
                "START (4a-2/21): Project Parcel data 2 append to temp1")
            arcpy.Append_management("temp1a", "temp1", "TEST")
            arcpy.AddMessage(
                "COMPLETE (4a-2/21): Project Parcel data 2 append to temp1")
        elif num_cl_shp == "3":
            # 4b - Project CL parcel shapefiles 2 and 3 to local StatePlane 2011 and append to temp1
            arcpy.AddMessage(
                "START (4b-1/21): Project Parcel data 2 to local State Plane")
            arcpy.Project_management(cl_parcel_shp2, "temp1a",
                                     state_coordinate_system)
            arcpy.AddMessage(
                "COMPLETE (4b-1/21): Project Parcel data 2 to local State Plane"
            )
            arcpy.AddMessage(
                "START (4b-2/21): Project Parcel data 3 to local State Plane")
            arcpy.Project_management(cl_parcel_shp3, "temp1b",
                                     state_coordinate_system)
            arcpy.AddMessage(
                "COMPLETE (4b-2/21): Project Parcel data 3 to local State Plane"
            )
            arcpy.AddMessage(
                "START (4b-3/21): Project Parcel data 2 and 3 append to temp1")
            arcpy.Append_management(["temp1a", "temp1b"], "temp1", "TEST")
            arcpy.AddMessage(
                "COMPLETE (4b-3/21): Project Parcel data 2 and 3 append to temp1"
            )
        elif num_cl_shp == "4":
            # 4c - Project CL parcel shapefiles 2, 3, and 4 to local StatePlane 2011 and append to temp1
            arcpy.AddMessage(
                "START (4c-1/21): Project Parcel data 2 to local State Plane")
            arcpy.Project_management(cl_parcel_shp2, "temp1a",
                                     state_coordinate_system)
            arcpy.AddMessage(
                "COMPLETE (4c-1/21): Project Parcel data 2 to local State Plane"
            )
            arcpy.AddMessage(
                "START (4c-2/21): Project Parcel data 3 to local State Plane")
            arcpy.Project_management(cl_parcel_shp3, "temp1b",
                                     state_coordinate_system)
            arcpy.AddMessage(
                "COMPLETE (4c-2/21): Project Parcel data 3 to local State Plane"
            )
            arcpy.AddMessage(
                "START (4c-3/21): Project Parcel data 4 to local State Plane")
            arcpy.Project_management(cl_parcel_shp4, "temp1c",
                                     state_coordinate_system)
            arcpy.AddMessage(
                "COMPLETE (4c-3/21): Project Parcel data 4 to local State Plane"
            )
            arcpy.AddMessage(
                "START (4c-4/21): Project Parcel data 2, 3, and 4 append to temp1"
            )
            arcpy.Append_management(["temp1a", "temp1b", "temp1c"], "temp1",
                                    "TEST")
            arcpy.AddMessage(
                "COMPLETE (4c-4/21): Project Parcel data 2, 3, and 4 append to temp1"
            )
        else:
            # No additional shapefiles to project and append
            arcpy.AddMessage("No additional shapefiles to project and append")

        # 5 - Add fields
        arcpy.AddMessage("START (5/21): Add additional attribute fields")
        fields = [
            ("SALE", "TEXT", "20"),
            ("BOOK_PAGE", "TEXT", "20"),
            ("SALE_DOC_NBR", "TEXT", "20"),
            ("COUNTY", "TEXT", "50"),
            ("M_ACRES", "DOUBLE", ""),
            ("M_SQFT", "DOUBLE", ""),
            ("SALE_CNT", "SHORT", ""),
            ("SALE_TOT_VAL", "DOUBLE", ""),
            ("SALE_LND_VAL", "DOUBLE", ""),
            ("SALE_IMP_VAL", "DOUBLE", ""),
            ("SALE_LAND_ACRES", "DOUBLE", ""),
            ("SALE_LAND_SQFT", "LONG", ""),
            ("SALE_UBLD_SQFT", "LONG", ""),
            ("SALE_YR_BLT_MIN", "SHORT", ""),
            ("SALE_YR_BLT_MAX", "SHORT", ""),
            ("SALE_M_ACRES", "DOUBLE", ""),
            ("SALE_M_SQFT", "DOUBLE", ""),
            ("SALE_PR_ACRE", "DOUBLE", ""),
            ("SALE_PR_SQFT", "DOUBLE", ""),
            ("MI_TO_SUBJECT", "DOUBLE", ""),
            ("SALE_YEAR", "SHORT", ""),
            ("DEED_DATE", "DATE", ""),
            ("GRANTOR", "TEXT", "255"),
            ("GRANTEE", "TEXT", "255"),
            ("RMI_USE", "TEXT", "50"),
            ("SALE_NOTES", "TEXT", "255"),
            ("VRFD_ACRES", "DOUBLE", ""),
            ("VRFD_SQFT", "DOUBLE", ""),
            ("VRFD_SIZE_SOURCE", "SHORT", ""),
            ("DEED_TAX", "DOUBLE", ""),
            ("DEED_SALE_PRICE", "DOUBLE", ""),
            ("DEED_NOTES", "TEXT", "255"),
            ("PARCEL_INCL_SALE", "SHORT", ""),
            ("VERIFIED_BY", "TEXT", "10"),
            ("DEED", "TEXT", "3"),
            ("DEED_VRFD", "TEXT", "3"),
            ("DEED_PKG", "TEXT", "3"),
            ("MAPPED", "TEXT", "3"),
            ("INSPECTED", "TEXT", "3"),
            ("SELECTION", "SHORT", ""),
            ("STATUS", "SHORT", ""),
            ("SUBSTATUS", "SHORT", ""),
            ("STATUS_NOTES", "TEXT", "255"),
            ("SALE_GRP1", "SHORT", ""),
            ("SALE_GRP2", "SHORT", ""),
            ("SALE_GRP3", "SHORT", ""),
            ("SALE_GRP4", "SHORT", ""),
            ("SALE_GRP5", "SHORT", ""),
            ("SALE_PRIM_SEC", "SHORT", ""),
            ("SALE_QUERY", "SHORT", ""),
            ("CUSTOM1", "TEXT", "50"),
            ("CUSTOM2", "TEXT", "50"),
            ("CUSTOM3", "TEXT", "50"),
            ("CUSTOM4", "TEXT", "50"),
            ("CUSTOM5", "TEXT", "50"),
            ("SALE_APNS", "TEXT", "255"),
        ]

        # Create the fields using the above parameters
        for field in fields:
            arcpy.AddField_management("temp1", field[0], field[1], "", "",
                                      field[2], "", "NULLABLE", "NON_REQUIRED",
                                      "")
        arcpy.AddMessage("COMPLETE (5/21): Add additional attribute fields")

        # 6 - Calculate Field M_ACRES and M_SQFT
        arcpy.AddMessage("START (6/21): Calculate parcel measured size")
        arcpy.CalculateField_management("temp1", "M_ACRES",
                                        "!Shape.area@acres!", "PYTHON3")
        # arcpy.CalculateGeometryAttributes_management("temp1", geometry_property=[["M_ACRES", "AREA"]], area_unit="ACRES", coordinate_system=state_coordinate_system)

        arcpy.CalculateField_management("temp1", "M_SQFT",
                                        "!Shape.area@squarefeet!", "PYTHON3")
        # arcpy.CalculateGeometryAttributes_management("temp1", geometry_property=[["M_SQFT", "AREA"]], area_unit="SQUARE_FEET_US", coordinate_system=state_coordinate_system)

        arcpy.AddMessage("COMPLETE (6/21): Calculate parcel measured size")

        # 7 - Calculate Sale Book/Page
        arcpy.AddMessage("START (7/21): Calculate Book/Page")
        expression_BP = "getBookPage(!SALE_BK_PG!)"
        codeblock_BP = """def getBookPage(SALE_BK_PG):
            if SALE_BK_PG == '':
                return None
            elif SALE_BK_PG == ' ':
                return None
            else:
                return (SALE_BK_PG[0:6].lstrip("0") + '-'
                + SALE_BK_PG[6:].lstrip("0"))"""
        arcpy.CalculateField_management("temp1", "BOOK_PAGE", expression_BP,
                                        "PYTHON3", codeblock_BP)
        arcpy.AddMessage("COMPLETE (7/21): Calculate Book/Page")

        # 8 - Calculate Sale Document Number
        arcpy.AddMessage("START (8/21): Calculate Document Number")
        expression_DN = "getDocNbr(!REC_DT!,!DOC_NBR!)"
        codeblock_DN = """def getDocNbr(REC_DT,DOC_NBR):
            if REC_DT == 0:
                return None
            elif DOC_NBR == '':
                return None
            elif DOC_NBR == ' ':
                return None
            else:
                return str(REC_DT)[0:4] + '-' + str(DOC_NBR)"""
        arcpy.CalculateField_management("temp1", "SALE_DOC_NBR", expression_DN,
                                        "PYTHON3", codeblock_DN)
        arcpy.AddMessage("COMPLETE (8/21): Calculate Document Number")

        # 9 - Calculate Sale Year
        arcpy.AddMessage("START (9/21): Calculate Sale Year")
        expression_SY = "getSaleYear(!SALE_DT!)"
        codeblock_SY = """def getSaleYear(SALE_DT):
            if SALE_DT == 0:
                return None
            else:
                SALE_YR = str(SALE_DT)[0:4]
                return int(SALE_YR)"""
        arcpy.CalculateField_management("temp1", "SALE_YEAR", expression_SY,
                                        "PYTHON3", codeblock_SY)
        arcpy.AddMessage("COMPLETE (9/21): Calculate Sale Year")

        # 10 - Calculate Additional Attributes (County & Boolean)

        no_value = "NO"
        arcpy.AddMessage(
            "START 10/21): Calculate Additional Attributes (County & Boolean)")
        arcpy.CalculateField_management("temp1", "COUNTY",
                                        "'" + countyName + "'", "PYTHON3")
        arcpy.CalculateField_management("temp1", "DEED", "'" + no_value + "'",
                                        "PYTHON3")
        arcpy.CalculateField_management("temp1", "DEED_VRFD",
                                        "'" + no_value + "'", "PYTHON3")
        arcpy.CalculateField_management("temp1", "DEED_PKG",
                                        "'" + no_value + "'", "PYTHON3")
        arcpy.CalculateField_management("temp1", "MAPPED",
                                        "'" + no_value + "'", "PYTHON3")
        arcpy.CalculateField_management("temp1", "INSPECTED",
                                        "'" + no_value + "'", "PYTHON3")
        arcpy.AddMessage(
            "COMPLETE (10/21): Calculate Additional Attributes (County & Boolean)"
        )

        # 11 - Calculate SALE_ID (Select whether Book_Page or SALE_DOC_NBR is appropriate)
        arcpy.AddMessage("START (11/21): Calculate SaleID")
        expression = "getSaleID(!SALE_DOC_NBR!,!BOOK_PAGE!)"
        codeblock1 = """def getSaleID(SALE_DOC_NBR,BOOK_PAGE):
            if SALE_DOC_NBR is not None:
                return SALE_DOC_NBR
            else:
                return None"""
        codeblock2 = """def getSaleID(SALE_DOC_NBR,BOOK_PAGE):
            if SALE_DOC_NBR is not None:
                return SALE_DOC_NBR
            elif SALE_DOC_NBR is None and BOOK_PAGE is not None:
                return BOOK_PAGE
            else:
                return None"""
        codeblock3 = """def getSaleID(SALE_DOC_NBR,BOOK_PAGE):
            if BOOK_PAGE is not None:
                return BOOK_PAGE
            else:
                return None"""
        codeblock4 = """def getSaleID(SALE_DOC_NBR,BOOK_PAGE):
            if BOOK_PAGE is not None:
                return BOOK_PAGE
            elif BOOK_PAGE is None and SALE_DOC_NBR is not None:
                return SALE_DOC_NBR
            else:
                return None"""
        # Execute Calculate Field
        if firstSaleID == "Document Number" and secondSaleID == "None":
            arcpy.CalculateField_management("temp1", "SALE", expression,
                                            "PYTHON3", codeblock1)
        elif firstSaleID == "Document Number" and secondSaleID == "Book/Page":
            arcpy.CalculateField_management("temp1", "SALE", expression,
                                            "PYTHON3", codeblock2)
        elif firstSaleID == "Book/Page" and secondSaleID == "None":
            arcpy.CalculateField_management("temp1", "SALE", expression,
                                            "PYTHON3", codeblock3)
        elif firstSaleID == "Book/Page" and secondSaleID == "Document Number":
            arcpy.CalculateField_management("temp1", "SALE", expression,
                                            "PYTHON3", codeblock4)
        elif firstSaleID == "Document Number" and secondSaleID == "Document Number":
            print(
                "ERROR: Cannot select same field for First and Second Sale ID."
            )
        elif firstSaleID == "Book/Page" and secondSaleID == "Book/Page":
            print(
                "ERROR: Cannot select same field for First and Second Sale ID."
            )
        else:
            print(
                "ERROR: Must select First Sale ID & OPTIONALLY select Second Sale \n"
                "ID.")
        arcpy.AddMessage("COMPLETE (11/21): Calculate SaleID")

        # 12 - Select non-null SaleIDs, first create feature layer
        arcpy.AddMessage(
            "START (12/21): Non-null SaleID Selection and Calculation \n"
            "of Summary Statistics")
        arcpy.MakeFeatureLayer_management("temp1", "temp1_lyr")
        arcpy.SelectLayerByAttribute_management("temp1_lyr", "NEW_SELECTION",
                                                "SALE IS NOT NULL")
        # Calculate Summary Statistics of selected Sales
        arcpy.Statistics_analysis(
            "temp1_lyr", "temp_sum",
            [["TOT_VAL", "SUM"], ["LAN_VAL", "SUM"], ["IMP_VAL", "SUM"],
             ["LAND_ACRES", "SUM"], ["LAND_SQ_FT", "SUM"],
             ["UBLD_SQ_FT", "SUM"], ["YR_BLT", "MIN"], ["YR_BLT", "MAX"],
             ["M_ACRES", "SUM"], ["M_SQFT", "SUM"]], "SALE")
        arcpy.AddMessage("COMPLETE (12/21): Non-null SaleID Selection and \n"
                         "Calculation of Summary Statistics")

        # 13 - Join summary statistics to parcel fc
        arcpy.AddMessage(
            "START (13/21): Summary Statistics Join and Transfer of \n"
            "Values")
        arcpy.MakeTableView_management("temp_sum", "temp_sum_view")
        arcpy.SelectLayerByAttribute_management("temp1_lyr", "CLEAR_SELECTION")
        arcpy.AddJoin_management("temp1_lyr", "SALE", "temp_sum_view", "SALE")

        # Calculate fields from joined table to feature class
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_CNT",
                                        "!temp_sum.FREQUENCY!", "PYTHON3")
        arcpy.AddMessage("PROCESS (1/11): SALE_CNT Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_TOT_VAL",
                                        "!temp_sum.SUM_TOT_VAL!", "PYTHON3")
        arcpy.AddMessage("PROCESS (2/11): SALE_TOT_VAL Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_LND_VAL",
                                        "!temp_sum.SUM_LAN_VAL!", "PYTHON3")
        arcpy.AddMessage("PROCESS (3/11): SALE_LND_VAL Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_IMP_VAL",
                                        "!temp_sum.SUM_IMP_VAL!", "PYTHON3")
        arcpy.AddMessage("PROCESS (4/11): SALE_IMP_VAL Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_LAND_ACRES",
                                        "!temp_sum.SUM_LAND_ACRES!", "PYTHON3")
        arcpy.AddMessage("PROCESS (5/11): SALE_LAND_ACRES Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_LAND_SQFT",
                                        "!temp_sum.SUM_LAND_SQ_FT!", "PYTHON3")
        arcpy.AddMessage("PROCESS (6/11): SALE_LAND_SQFT Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_UBLD_SQFT",
                                        "!temp_sum.SUM_UBLD_SQ_FT!", "PYTHON3")
        arcpy.AddMessage("PROCESS (7/11): SALE_UBLD_SQFT Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_YR_BLT_MIN",
                                        "!temp_sum.MIN_YR_BLT!", "PYTHON3")
        arcpy.AddMessage("PROCESS (8/11): SALE_YR_BLT_MIN Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_YR_BLT_MAX",
                                        "!temp_sum.MAX_YR_BLT!", "PYTHON3")
        arcpy.AddMessage("PROCESS (9/11): SALE_YR_BLT_MAX Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_M_ACRES",
                                        "!temp_sum.SUM_M_ACRES!", "PYTHON3")
        arcpy.AddMessage("PROCESS (10/11): SALE_M_ACRES Complete")
        arcpy.CalculateField_management("temp1_lyr", "temp1.SALE_M_SQFT",
                                        "!temp_sum.SUM_M_SQFT!", "PYTHON3")
        arcpy.AddMessage("PROCESS (11/11): SALE_M_SQFT Complete")

        # Remove temp_sum Join
        arcpy.RemoveJoin_management("temp1_lyr", "temp_sum")
        arcpy.AddMessage(
            "COMPLETE (13/21): Summary Statistics Join and Transfer \n"
            "of Values")

        # 14 - Concatenate Sale APNs
        arcpy.AddMessage("COMPLETE (14/21): Concatenate APNs")
        apnDict = dict()

        def isNotEmpty(val):
            return bool(val is not None and len(str(val).strip()) > 0)

        count = 0
        with arcpy.da.SearchCursor('temp1', ['SALE', 'APN']) as cursor:
            for row in cursor:
                count += 1
                print(count)
                if isNotEmpty(row[0]) and isNotEmpty(row[1]):
                    apnDict.setdefault(row[0], []).append(str(row[1]).strip())

        with arcpy.da.UpdateCursor('temp1', ['SALE', 'SALE_APNS']) as cursor:
            for row in cursor:
                if row[0] in apnDict:
                    row[1] = get_APN_string_list(apnDict[row[0]])
                    cursor.updateRow(row)

        arcpy.AddMessage("COMPLETE (14/21): Concatenate APNs")

        # 15 - Calculate Sale Unit Values
        # Calculate SALE_PR_ACRE
        arcpy.AddMessage("START (15/21): Calculate Sale Unit values")
        expression_SPA = "getPrAcre(!SALE_PRICE!, !SALE_M_ACRES!)"
        codeblock_SPA = """def getPrAcre(SALE_PRICE,SALE_M_ACRES):
            if SALE_PRICE > 0 and SALE_M_ACRES is not None:
                return SALE_PRICE/SALE_M_ACRES
            else:
                return None"""
        arcpy.CalculateField_management("temp1", "SALE_PR_ACRE",
                                        expression_SPA, "PYTHON3",
                                        codeblock_SPA)
        arcpy.AddMessage("PROCESS (1/2): SALE_PR_ACRE Complete")

        # Calculate SALE_PR_SQFT
        expression_SPSF = "getPrSqft(!SALE_PRICE!, !SALE_M_SQFT!)"
        codeblock_SPSF = """def getPrSqft(SALE_PRICE,SALE_M_SQFT):
            if SALE_PRICE > 0 and SALE_M_SQFT is not None:
                return SALE_PRICE/SALE_M_SQFT
            else:
                return None"""
        arcpy.CalculateField_management("temp1", "SALE_PR_SQFT",
                                        expression_SPSF, "PYTHON3",
                                        codeblock_SPSF)

        arcpy.AddMessage("PROCESS (2/2): SALE_PR_SQFT Complete")
        arcpy.AddMessage("COMPLETE (15/21): Calculate Sale Unit values")

        # 16 - Query Sales for SALE_QUERY Attribute
        # 1 = SALE_DT >= "'" + sale_year_query + "0101'" & SALE_PRICE > 100
        # 2 =
        arcpy.AddMessage("START (16/21): Sale Query")
        select_exp1 = str("SALE_DT >= " + sale_year_query +
                          "0101 AND SALE_PRICE > 100")
        select_exp2 = str("SALE_DT >= " + sale_year_query +
                          "0101 AND SALE_PRICE > 100 AND SALE_IMP_VAL <= " +
                          impValQuery)
        select_exp3 = str(
            "SALE_DT >= " + sale_year_query +
            "0101 AND SALE_PRICE > 100 AND SALE_YR_BLT_MAX >= SALE_YEAR")
        select_exp4 = str("SALE_DT >= " + sale_year_query +
                          "0101 AND SALE_PRICE > 100 AND SALE_IMP_VAL <= " +
                          impValQuery + " AND SALE_YR_BLT_MAX >= SALE_YEAR")
        arcpy.MakeFeatureLayer_management("temp1", "temp1_lyr2")
        arcpy.SelectLayerByAttribute_management("temp1_lyr2", "NEW_SELECTION",
                                                select_exp1)
        arcpy.CalculateField_management("temp1_lyr2", "SALE_QUERY", "1",
                                        "PYTHON3")
        arcpy.SelectLayerByAttribute_management("temp1_lyr2", "NEW_SELECTION",
                                                select_exp2)
        arcpy.CalculateField_management("temp1_lyr2", "SALE_QUERY", "2",
                                        "PYTHON3")
        arcpy.SelectLayerByAttribute_management("temp1_lyr2", "NEW_SELECTION",
                                                select_exp3)
        arcpy.CalculateField_management("temp1_lyr2", "SALE_QUERY", "3",
                                        "PYTHON3")
        arcpy.SelectLayerByAttribute_management("temp1_lyr2", "NEW_SELECTION",
                                                select_exp4)
        arcpy.CalculateField_management("temp1_lyr2", "SALE_QUERY", "4",
                                        "PYTHON3")
        arcpy.AddMessage("COMPLETE (16/21): Sale Query")

        # 17 - Delete fields
        arcpy.AddMessage("START (17/21): Delete Unused Attributes")
        del_fields = [
            "PARCEL_ID", "STATE_CODE", "CNTY_CODE", "ZIP", "PLUS", "STD_ADDR",
            "STD_CITY", "STD_STATE", "STD_ZIP", "STD_PLUS", "FIPS_CODE",
            "UNFRM_APN", "APN_SEQ_NO", "ORIG_APN", "ACCT_NO", "TH_BRO_MAP",
            "MAP_REF1", "MAP_REF2", "CENSUS_TR", "M_HOME_IND", "SUB_TR_NUM",
            "SUB_PLT_BK", "SUB_PLT_PG", "OWN_CP_IND", "OWN2_LAST", "OWN2_FRST",
            "MAIL_NBRPX", "MAIL_NBR", "MAIL_NBR2", "MAIL_NBRSX", "MAIL_DIR",
            "MAIL_STR", "MAIL_MODE", "MAIL_QDRT", "MAIL_UNIT", "MAIL_CITY",
            "MAIL_STATE", "MAIL_ZIP", "MAIL_CC", "MAIL_OPT", "TOT_VAL_CD",
            "LAN_VAL_CD", "TAX_AMT", "TAX_YR", "ASSD_YR", "TAX_AREA",
            "TITL_CO_CD", "TITL_CO_NM", "RES_MOD_CD", "MTG_AMT", "MTG_DT",
            "MTG_LOAN", "MTG_DEED", "MTG_TRM_CD", "MTG_TRM", "MTG_DUE_DT",
            "MTG_ASSUM", "MTG_LEN_CD", "MTG_LEN_NM", "MTG2_AMT", "MTG2_LOAN",
            "MTG2_DEED", "FRONT_FT", "DEPTH_FT", "LOT_AREA", "BLD_SQ_IND",
            "BLD_SQ_FT", "LIV_SQ_FT", "GF_SQ_FT", "GR_SQ_FT", "ADJ_SQ_FT",
            "BSMT_SQ_FT", "PKG_SQ_FT", "BEDROOMS", "ROOMS", "BATHS_CAL",
            "BATHS", "FULL_BATHS", "HALF_BATHS", "QTR1_BATHS", "QTR3_BATHS",
            "BATH_FIX", "AIR_COND", "BSMT_FNSH", "BLD_CODE", "BLD_IMP_CD",
            "CONDITION", "CONSTR_TYP", "EXT_WALLS", "FRPLCE_IND", "FRPLCE_NBR",
            "FRPLCE_TYP", "FOUNDATION", "FLOOR", "FRAME", "GARAGE", "HEATING",
            "PKG_SPACES", "PKG_TYP", "POOL", "POOL_CD", "QUALITY",
            "ROOF_COVER", "ROOF_TYP", "STORY_CD", "STORY_NBR", "BLD_VIEW",
            "LOC_INF", "BLD_UNITS", "UNITS_NBR", "ENERGY_USE", "FUEL", "SEWER",
            "WATER"
        ]
        arcpy.DeleteField_management("temp1", del_fields)
        arcpy.AddMessage("COMPLETE (17/21): Delete Unused Attributes")

        # 18/20 - Calculate Distance to Subject
        temp1_desc = arcpy.Describe("temp1")
        yMax = temp1_desc.extent.YMax
        yMin = temp1_desc.extent.YMin
        xMax = temp1_desc.extent.XMax
        xMin = temp1_desc.extent.XMin
        cutoffDistance = math.sqrt(((yMax - yMin)**2) + ((xMax - xMin)**2))

        arcpy.AddMessage("YMax: " + str(yMax))
        arcpy.AddMessage("YMin: " + str(yMin))
        arcpy.AddMessage("XMax: " + str(xMax))
        arcpy.AddMessage("XMin: " + str(xMin))
        arcpy.AddMessage("Cutoff Distance: " + str(cutoffDistance))

        if calculateDistance.lower() == "true":
            # Near - ArcGIS Pro Advanced (Home)
            arcpy.AddMessage(
                "START (18/21): Calculation of Parcel distance to \n"
                "Subject Property - ArcGIS Adv")

            arcpy.Near_analysis("temp1", subjectProperty, cutoffDistance)
            arcpy.AddMessage("PROCESS (1/3): NEAR FEATURE Complete")

            # Calculate MI_TO_SUBJECT (Home)
            arcpy.CalculateField_management("temp1", "MI_TO_SUBJECT",
                                            "!NEAR_DIST!/5280", "PYTHON3")
            arcpy.AddMessage(
                "PROCESS (2/3): MI_TO_SUBJECT Calculation Complete")

            # Delete ET_Closest & ET_Dist (Home)
            arcpy.DeleteField_management("temp1", ["NEAR_FID", "NEAR_DIST"])
            arcpy.AddMessage("PROCESS (3/3): ET Attributes Delete Complete")
            arcpy.AddMessage(
                "COMPLETE (18/21): Calculation of Parcel distance to \n"
                "Subject Property")

        else:
            # No subject distance calculation
            arcpy.AddMessage("Skipping calculation of distance of \n"
                             "parcels to subject property")

        # Copy parcel feature class to Project FGDB (Home)
        arcpy.AddMessage(
            "START (19/21): Copy of Completed Parcels to Project \n"
            "FGDB")
        arcpy.Append_management("temp1",
                                os.path.join(project_fgdb, parcel_output_fc),
                                "NO_TEST")
        arcpy.AddMessage("COMPLETE (19/21): Copy of Completed Parcels to \n"
                         "Project FGDB")

    # 26 - Check for existence of temporary feature classes & tables and delete if exist
    arcpy.AddMessage(
        "START (20/21): Check for temporary features and tables and delete")
    delete_temp_files()
    arcpy.AddMessage(
        "COMPLETE (20/21): Check for temporary features and tables and delete")
    # 27 - Check for existence of temporary feature classes & tables and delete if exist

    # * - Check for existence of temporary feature classes & tables and delete if exist
    arcpy.AddMessage("START (21/21): Add Privileges and Register as Versioned")
    if project_fgdb.lower().endswith(".sde"):
        if create_parcel_fc.lower() == "true":
            arcpy.ChangePrivileges_management(full_parcel_output_fc,
                                              "rmi_editors", "GRANT", "GRANT")
            arcpy.ChangePrivileges_management(full_parcel_output_fc,
                                              "rmi_viewers", "GRANT", "AS_IS")
            arcpy.RegisterAsVersioned_management(full_parcel_output_fc,
                                                 "NO_EDITS_TO_BASE")

        if create_salefc.lower(
        ) == "true" and update_sale_output_fc_permissions:
            arcpy.ChangePrivileges_management(full_sale_parcel_output_fc,
                                              "rmi_editors", "GRANT", "GRANT")
            arcpy.ChangePrivileges_management(full_sale_parcel_output_fc,
                                              "rmi_viewers", "GRANT", "AS_IS")
            arcpy.RegisterAsVersioned_management(full_sale_parcel_output_fc,
                                                 "NO_EDITS_TO_BASE")

        if create_comp_insp_pt_fc.lower(
        ) == "true" and update_comp_insp_pt_fc_permissions:
            arcpy.ChangePrivileges_management(full_comp_insp_pt_output_fc,
                                              "rmi_editors", "GRANT", "GRANT")
            arcpy.ChangePrivileges_management(full_comp_insp_pt_output_fc,
                                              "rmi_viewers", "GRANT", "AS_IS")
            arcpy.RegisterAsVersioned_management(full_comp_insp_pt_output_fc,
                                                 "NO_EDITS_TO_BASE")

    arcpy.AddMessage(
        "COMPLETE (21/21): Add Privileges and Register as Versioned")

    arcpy.AddMessage("COMPLETE PROCESSING CORELOGIC PARCEL DATA")
    EventDissolveString = RoutIDField + " LINE " + FromMeasureField + " " + ToMeasureField
    arcpy.AddMessage(FieldList)

    for fieldName in FieldList:
        InputField = UniqueValToDomain.FindField(Table, str(fieldName))
        outTable = os.path.join(Workspace, TablePrefix + fieldName)
        print "Dissolving events for " + outTable + " using " + EventDissolveString + " by field " + fieldName
        arcpy.AddMessage("Dissolving events for " + outTable + " using " +
                         EventDissolveString + " by field " + fieldName)
        arcpy.DissolveRouteEvents_lr(Table, EventDissolveString, fieldName,
                                     outTable, EventDissolveString, "DISSOLVE",
                                     "INDEX")
        print "Adding date fields"
        arcpy.AddMessage("Adding date fields")
        #Add from and to date fields
        if (UniqueValToDomain.FindField(outTable, "FromDate") == None):
            arcpy.AddField_management(outTable, "FromDate", "DATE")
        if (UniqueValToDomain.FindField(outTable, "ToDate") == None):
            arcpy.AddField_management(outTable, "ToDate", "DATE")
        print "Creating domain"
        arcpy.AddMessage("Creating domain")
        UniqueValToDomain.UniqueValueToDomain(Workspace, outTable, fieldName,
                                              "d" + fieldName)
        print "Registering as versioned"
        arcpy.AddMessage("Registering as versioned")
        try:
            arcpy.RegisterAsVersioned_management(outTable, "NO_EDITS_TO_BASE")
        except:
            arcpy.AddMessage("Unable to register table " + outTable +
                             " as versioned")
def copyDatasets(sourceGeodatabase, destinationGeodatabase, datasetsOption,
                 updateMode, configFile, datasetList, dataType, includeViews):
    # Loop through the datasets
    for dataset in datasetList:
        # If feature datasets
        if (dataType == "Feature Dataset"):
            # Get a list of the feature classes in the feature dataset
            dataset2List = arcpy.ListFeatureClasses("", "", dataset)
        # Feature classes and tables
        else:
            # Reassign list
            dataset2List = datasetList
            datasetList = []

        # Change dataset name to be just name (remove user and schema if SDE database) - Used just for source dataset
        splitDataset = dataset.split('.')
        newDataset = splitDataset[-1]

        # Store current dataset working on
        currentDataset = newDataset

        # Loop through the datasets
        for dataset2 in dataset2List:
            # Set default variables
            datasetInConfig = "false"
            versionDataset = "false"
            # If feature datasets
            if (dataType == "Feature Dataset"):
                needFeatureDataset = "true"
            # Feature classes and tables
            else:
                needFeatureDataset = "false"

            # Change feature class name to be just name (remove user and schema if SDE database) - Used just for source dataset
            splitDataset = dataset2.split('.')
            newDataset2 = splitDataset[-1]

            # If feature datasets
            if (dataType == "Feature Dataset"):
                # Setup the source and destination paths - Source needs to have full name including schema and user
                sourceDatasetPath = os.path.join(
                    sourceGeodatabase + "\\" + dataset, dataset2)
                destinationDatasetPath = os.path.join(
                    destinationGeodatabase + "\\" + newDataset, newDataset2)

            # Feature classes and tables
            else:
                # Setup the source and destination paths - Source needs to have full name including schema and user
                sourceDatasetPath = os.path.join(sourceGeodatabase, dataset2)
                destinationDatasetPath = os.path.join(destinationGeodatabase,
                                                      newDataset2)

            # If configuration provided
            if (configFile):
                # Set CSV delimiter
                csvDelimiter = ","
                # Look through configuration file to see if source dataset is in there
                # Open the CSV file
                with open(configFile, 'rb') as csvFile:
                    # Read the CSV file
                    rows = csv.reader(csvFile, delimiter=csvDelimiter)

                    # For each row in the CSV
                    count = 0
                    for row in rows:
                        # Ignore the first line containing headers
                        if (count > 0):
                            sourceDataset = row[0]
                            destinationDataset = row[1]
                            version = row[2]

                            # If feature datasets
                            if (dataType == "Feature Dataset"):
                                selectDataset = newDataset + "\\" + newDataset2
                            # Feature classes and tables
                            else:
                                selectDataset = newDataset2

                            # If dataset is in config file
                            if ((selectDataset) == sourceDataset):
                                datasetInConfig = "true"
                                # Change the destination path
                                destinationDatasetPath = os.path.join(
                                    destinationGeodatabase, destinationDataset)
                                # Logging
                                if (enableLogging == "true"):
                                    logger.info("Changing dataset name from " +
                                                sourceDatasetPath + " to " +
                                                destinationDatasetPath + "...")
                                arcpy.AddMessage(
                                    "Changing dataset name from " +
                                    sourceDatasetPath + " to " +
                                    destinationDatasetPath + "...")

                                # Check for a backslash in dataset name
                                splitDataset = destinationDataset.split('\\')
                                # If split has occured, dataset is necessary in destination database
                                if (len(splitDataset) > 1):
                                    newDataset = splitDataset[0]
                                    needFeatureDataset = "true"
                                else:
                                    needFeatureDataset = "false"

                                # If versioning the dataset
                                if (version == "yes"):
                                    versionDataset = "true"

                        count = count + 1

            # If feature dataset already exists in destination database
            if arcpy.Exists(os.path.join(destinationGeodatabase, newDataset)):
                # Copy over dataset if necessary
                if ((datasetsOption == "All") or (datasetInConfig == "true")):
                    # Get count of the source dataset
                    datasetCount = arcpy.GetCount_management(sourceDatasetPath)
                    # Check Dataset record count is more than 0
                    if (long(str(datasetCount)) > 0):
                        # Don't include _H - archive table
                        if (newDataset2[-2:].lower() != "_h"):
                            # Don't include views if specified
                            if (("VW" not in newDataset2) and
                                ("vw" not in newDataset2)) or (
                                    (("VW" in newDataset2) or
                                     ("vw" in newDataset2)) and
                                    (includeViews == "true")):
                                # If dataset already exists when doing a data copy
                                if ((arcpy.Exists(destinationDatasetPath))
                                        and (updateMode == "New")):
                                    # Delete the dataset first
                                    arcpy.Delete_management(
                                        destinationDatasetPath, "FeatureClass")

                                # If creating new dataset - updateMode is New
                                if (updateMode == "New"):
                                    # If table
                                    if (dataType == "Table"):
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Copying over table - " +
                                                destinationDatasetPath + "...")
                                        arcpy.AddMessage(
                                            "Copying over table - " +
                                            destinationDatasetPath + "...")
                                        # Copy over table
                                        arcpy.CopyRows_management(
                                            sourceDatasetPath,
                                            destinationDatasetPath, "")
                                        arcpy.AddMessage(
                                            "Dataset record count - " +
                                            str(datasetCount))
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Dataset record count - " +
                                                str(datasetCount))

                                        # Set the archive dataset paths
                                        sourceArchiveDatasetPath = os.path.join(
                                            sourceGeodatabase,
                                            newDataset2 + "_H")
                                        destinationArchiveDatasetPath = os.path.join(
                                            destinationGeodatabase,
                                            newDataset2 + "_H")

                                        # Check if dataset is archived
                                        datasetArchived = arcpy.Exists(
                                            sourceArchiveDatasetPath)

                                        if (datasetArchived == 1):
                                            # Logging
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Copying over archive table - "
                                                    +
                                                    destinationArchiveDatasetPath
                                                    + "...")
                                            arcpy.AddMessage(
                                                "Copying over archive table - "
                                                +
                                                destinationArchiveDatasetPath +
                                                "...")
                                            # Copy over archive dataset (_H) too
                                            arcpy.CopyRows_management(
                                                sourceArchiveDatasetPath,
                                                destinationArchiveDatasetPath,
                                                "")
                                            arcpy.AddMessage(
                                                "Dataset record count - " +
                                                str(datasetCount))
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Dataset record count - " +
                                                    str(datasetCount))

                                    # Feature classes
                                    else:
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Copying over feature class - "
                                                + destinationDatasetPath +
                                                "...")
                                        arcpy.AddMessage(
                                            "Copying over feature class - " +
                                            destinationDatasetPath + "...")
                                        # Copy over feature class
                                        arcpy.CopyFeatures_management(
                                            sourceDatasetPath,
                                            destinationDatasetPath, "", "0",
                                            "0", "0")
                                        arcpy.AddMessage(
                                            "Dataset record count - " +
                                            str(datasetCount))
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Dataset record count - " +
                                                str(datasetCount))

                                        # Set the archive dataset paths
                                        sourceArchiveDatasetPath = os.path.join(
                                            sourceGeodatabase,
                                            newDataset2 + "_H")
                                        destinationArchiveDatasetPath = os.path.join(
                                            destinationGeodatabase,
                                            newDataset2 + "_H")

                                        # Check if dataset is archived
                                        datasetArchived = arcpy.Exists(
                                            sourceArchiveDatasetPath)

                                        if (datasetArchived == 1):
                                            # Logging
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Copying over archive feature class - "
                                                    +
                                                    destinationArchiveDatasetPath
                                                    + "...")
                                            arcpy.AddMessage(
                                                "Copying over archive feature class - "
                                                +
                                                destinationArchiveDatasetPath +
                                                "...")
                                            # Copy over archive dataset (_H) too
                                            arcpy.CopyFeatures_management(
                                                sourceArchiveDatasetPath,
                                                destinationArchiveDatasetPath,
                                                "", "0", "0", "0")
                                            arcpy.AddMessage(
                                                "Dataset record count - " +
                                                str(datasetCount))
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Dataset record count - " +
                                                    str(datasetCount))

                                # Else refreshing existing dataset - updateMode is Existing
                                else:
                                    # If table
                                    if (dataType == "Table"):
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Loading in records for table - "
                                                + destinationDatasetPath +
                                                "...")
                                        arcpy.AddMessage(
                                            "Loading in records for table - " +
                                            destinationDatasetPath + "...")
                                        # Refreshing table
                                        arcpy.DeleteRows_management(
                                            destinationDatasetPath)
                                        # Try append in data - Catch error if there are any and continue
                                        try:
                                            arcpy.Append_management(
                                                sourceDatasetPath,
                                                destinationDatasetPath,
                                                "NO_TEST", "", "")
                                            arcpy.AddMessage(
                                                "Dataset record count - " +
                                                str(datasetCount))
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Dataset record count - " +
                                                    str(datasetCount))

                                        # If python error
                                        except Exception as e:
                                            errorMessage = ""
                                            # Build and show the error message
                                            for i in range(len(e.args)):
                                                if (i == 0):
                                                    errorMessage = str(
                                                        e.args[i])
                                                else:
                                                    errorMessage = errorMessage + " " + str(
                                                        e.args[i])
                                            arcpy.AddError(errorMessage)
                                            logger.warning(errorMessage)

                                    # Feature classes
                                    else:
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Loading in records for feature class - "
                                                + destinationDatasetPath +
                                                "...")
                                        arcpy.AddMessage(
                                            "Loading in records for feature class - "
                                            + destinationDatasetPath + "...")
                                        # Refreshing feature class
                                        arcpy.DeleteFeatures_management(
                                            destinationDatasetPath)
                                        # Try append in data - Catch error if there are any and continue
                                        try:
                                            arcpy.Append_management(
                                                sourceDatasetPath,
                                                destinationDatasetPath,
                                                "NO_TEST", "", "")
                                            arcpy.AddMessage(
                                                "Dataset record count - " +
                                                str(datasetCount))
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Dataset record count - " +
                                                    str(datasetCount))

                                        # If python error
                                        except Exception as e:
                                            errorMessage = ""
                                            # Build and show the error message
                                            for i in range(len(e.args)):
                                                if (i == 0):
                                                    errorMessage = str(
                                                        e.args[i])
                                                else:
                                                    errorMessage = errorMessage + " " + str(
                                                        e.args[i])
                                            arcpy.AddError(errorMessage)
                                            logger.warning(errorMessage)

                                if (versionDataset == "true"):
                                    # If dataset is not versioned already and update mode is new - Feature dataset
                                    datasetVersioned = arcpy.Describe(
                                        os.path.join(destinationGeodatabase,
                                                     dataset)).isVersioned
                                    if ((datasetVersioned == 0)
                                            and (updateMode == "New")):
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Versioning dataset - " +
                                                os.path.join(
                                                    destinationGeodatabase,
                                                    newDataset) + "...")
                                        arcpy.AddMessage(
                                            "Versioning dataset - " +
                                            os.path.join(
                                                destinationGeodatabase,
                                                newDataset) + "...")
                                        arcpy.RegisterAsVersioned_management(
                                            os.path.join(
                                                destinationGeodatabase,
                                                newDataset),
                                            "NO_EDITS_TO_BASE")
                    else:
                        arcpy.AddWarning("Dataset " + destinationDatasetPath +
                                         " is empty and won't be copied...")
                        # Logging
                        if (enableLogging == "true"):
                            logger.warning("Dataset " +
                                           destinationDatasetPath +
                                           " is empty and won't be copied...")

            # Otherwise
            else:
                # Copy over dataset if necessary
                if ((datasetsOption == "All") or (datasetInConfig == "true")):
                    # Get count of the source dataset
                    datasetCount = arcpy.GetCount_management(sourceDatasetPath)
                    # Check Dataset record count is more than 0
                    if (long(str(datasetCount)) > 0):
                        # Don't include _H - archive table
                        if (newDataset2[-2:].lower() != "_h"):
                            # Don't include views if specified
                            if (("VW" not in newDataset2) and
                                ("vw" not in newDataset2)) or (
                                    (("VW" in newDataset2) or
                                     ("vw" in newDataset2)) and
                                    (includeViews == "true")):
                                # If feature dataset is necessary in destination database
                                if (needFeatureDataset == "true"):
                                    # Create feature dataset
                                    arcpy.CreateFeatureDataset_management(
                                        destinationGeodatabase, newDataset,
                                        sourceDatasetPath)

                                # If creating new dataset - updateMode is New
                                if (updateMode == "New"):
                                    # If table
                                    if (dataType == "Table"):
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Copying over table - " +
                                                destinationDatasetPath + "...")
                                        arcpy.AddMessage(
                                            "Copying over table - " +
                                            destinationDatasetPath + "...")
                                        # Copy over table
                                        arcpy.CopyRows_management(
                                            sourceDatasetPath,
                                            destinationDatasetPath, "")
                                        arcpy.AddMessage(
                                            "Dataset record count - " +
                                            str(datasetCount))
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Dataset record count - " +
                                                str(datasetCount))

                                        # Set the archive dataset paths
                                        sourceArchiveDatasetPath = os.path.join(
                                            sourceGeodatabase,
                                            newDataset2 + "_H")
                                        destinationArchiveDatasetPath = os.path.join(
                                            destinationGeodatabase,
                                            newDataset2 + "_H")

                                        # Check if dataset is archived
                                        datasetArchived = arcpy.Exists(
                                            sourceArchiveDatasetPath)

                                        if (datasetArchived == 1):
                                            # Logging
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Copying over archive table - "
                                                    +
                                                    destinationArchiveDatasetPath
                                                    + "...")
                                            arcpy.AddMessage(
                                                "Copying over archive table - "
                                                +
                                                destinationArchiveDatasetPath +
                                                "...")
                                            # Copy over archive dataset (_H) too
                                            arcpy.CopyRows_management(
                                                sourceArchiveDatasetPath,
                                                destinationArchiveDatasetPath,
                                                "")
                                            arcpy.AddMessage(
                                                "Dataset record count - " +
                                                str(datasetCount))
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Dataset record count - " +
                                                    str(datasetCount))

                                    # Feature classes
                                    else:
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Copying over feature class - "
                                                + destinationDatasetPath +
                                                "...")
                                        arcpy.AddMessage(
                                            "Copying over feature class - " +
                                            destinationDatasetPath + "...")
                                        # Copy over feature class
                                        arcpy.CopyFeatures_management(
                                            sourceDatasetPath,
                                            destinationDatasetPath, "", "0",
                                            "0", "0")
                                        arcpy.AddMessage(
                                            "Dataset record count - " +
                                            str(datasetCount))
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Dataset record count - " +
                                                str(datasetCount))

                                        # Set the archive dataset paths
                                        sourceArchiveDatasetPath = os.path.join(
                                            sourceGeodatabase,
                                            newDataset2 + "_H")
                                        destinationArchiveDatasetPath = os.path.join(
                                            destinationGeodatabase,
                                            newDataset2 + "_H")

                                        # Check if dataset is archived
                                        datasetArchived = arcpy.Exists(
                                            sourceArchiveDatasetPath)

                                        if (datasetArchived == 1):
                                            # Logging
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Copying over archive feature class - "
                                                    +
                                                    destinationArchiveDatasetPath
                                                    + "...")
                                            arcpy.AddMessage(
                                                "Copying over archive feature class - "
                                                +
                                                destinationArchiveDatasetPath +
                                                "...")
                                            # Copy over archive dataset (_H) too
                                            arcpy.CopyFeatures_management(
                                                sourceArchiveDatasetPath,
                                                destinationArchiveDatasetPath,
                                                "", "0", "0", "0")
                                            arcpy.AddMessage(
                                                "Dataset record count - " +
                                                str(datasetCount))
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Dataset record count - " +
                                                    str(datasetCount))

                                # Else refreshing existing dataset - updateMode is Existing
                                else:
                                    # If table
                                    if (dataType == "Table"):
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Loading in records for table - "
                                                + destinationDatasetPath +
                                                "...")
                                        arcpy.AddMessage(
                                            "Loading in records for table - " +
                                            destinationDatasetPath + "...")
                                        # Refreshing table
                                        arcpy.DeleteRows_management(
                                            destinationDatasetPath)
                                        # Try append in data - Catch error if there are any and continue
                                        try:
                                            arcpy.Append_management(
                                                sourceDatasetPath,
                                                destinationDatasetPath,
                                                "NO_TEST", "", "")
                                            arcpy.AddMessage(
                                                "Dataset record count - " +
                                                str(datasetCount))
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Dataset record count - " +
                                                    str(datasetCount))

                                        # If python error
                                        except Exception as e:
                                            errorMessage = ""
                                            # Build and show the error message
                                            for i in range(len(e.args)):
                                                if (i == 0):
                                                    errorMessage = str(
                                                        e.args[i])
                                                else:
                                                    errorMessage = errorMessage + " " + str(
                                                        e.args[i])
                                            arcpy.AddError(errorMessage)
                                            logger.warning(errorMessage)

                                    # Feature classes
                                    else:
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Loading in records for feature class - "
                                                + destinationDatasetPath +
                                                "...")
                                        arcpy.AddMessage(
                                            "Loading in records for feature class - "
                                            + destinationDatasetPath + "...")
                                        # Refreshing feature class
                                        arcpy.DeleteFeatures_management(
                                            destinationDatasetPath)
                                        # Try append in data - Catch error if there are any and continue
                                        try:
                                            arcpy.Append_management(
                                                sourceDatasetPath,
                                                destinationDatasetPath,
                                                "NO_TEST", "", "")
                                            arcpy.AddMessage(
                                                "Dataset record count - " +
                                                str(datasetCount))
                                            if (enableLogging == "true"):
                                                logger.info(
                                                    "Dataset record count - " +
                                                    str(datasetCount))

                                        # If python error
                                        except Exception as e:
                                            errorMessage = ""
                                            # Build and show the error message
                                            for i in range(len(e.args)):
                                                if (i == 0):
                                                    errorMessage = str(
                                                        e.args[i])
                                                else:
                                                    errorMessage = errorMessage + " " + str(
                                                        e.args[i])
                                            arcpy.AddError(errorMessage)
                                            logger.warning(errorMessage)

                                if (versionDataset == "true"):
                                    # If feature dataset has been created - Set path to that
                                    if (needFeatureDataset == "true"):
                                        datasetPath = os.path.join(
                                            destinationGeodatabase, newDataset)
                                    # Otherwise - Set path to feature class
                                    else:
                                        datasetPath = destinationDatasetPath

                                    # If dataset is not versioned already and update mode is new
                                    datasetVersioned = arcpy.Describe(
                                        datasetPath).isVersioned
                                    if ((datasetVersioned == 0)
                                            and (updateMode == "New")):
                                        # Logging
                                        if (enableLogging == "true"):
                                            logger.info(
                                                "Versioning dataset - " +
                                                datasetPath + "...")
                                        arcpy.AddMessage(
                                            "Versioning dataset - " +
                                            datasetPath + "...")
                                        arcpy.RegisterAsVersioned_management(
                                            datasetPath, "NO_EDITS_TO_BASE")
                    else:
                        arcpy.AddWarning("Dataset " + destinationDatasetPath +
                                         " is empty and won't be copied...")
                        # Logging
                        if (enableLogging == "true"):
                            logger.warning("Dataset " +
                                           destinationDatasetPath +
                                           " is empty and won't be copied...")

            # Change dataset name back to current dataset
            newDataset = currentDataset
            versionDataset = "false"
Exemple #18
0
def createregiontask(regiontask_id, regiontask_filepath):
    service_name = 'mmanage' + str(regiontask_id)
    file_dir = os.path.dirname(regiontask_filepath)         # 解压后文件保存路径
    file_name = os.path.basename(regiontask_filepath)       # 文件名
    z = zipfile.is_zipfile(regiontask_filepath)
    r = rarfile.is_rarfile(regiontask_filepath)
    if z:
        # -ibck: 后台运行; -o+:覆盖已存在文件
        rar_command = '"C:\Program Files\WinRAR\WinRAR.exe" x %s %s -ibck -o+' % (regiontask_filepath, file_dir)
        rar_command = rar_command.encode('gbk')
        os.system(rar_command)
        print rar_command.decode('gbk')
        print u"解压 {0} 成功".format(file_name)
    elif r:
        fz = rarfile.RarFile(regiontask_filepath, 'r')
        for file in fz.namelist():
            fz.extract(file, file_dir)
        print u"解压 {0} 成功".format(file_name)
    else:
        print(u'This is not zip or rar')
        return False

    time_ymdhms = datetime.now().strftime(u"%Y%m%d%H%M%S")
    dir_list = os.listdir(file_dir)

    # 创建空间库
    print u"开始创建空间库"
    for dir in dir_list:
        dir_abspath = os.path.join(file_dir, dir)
        if os.path.isdir(dir_abspath):
            for subdir in os.listdir(dir_abspath):
                if subdir.startswith(u"接图表"):
                    gdbpath = os.path.join(dir_abspath, subdir)
                    datatype = u"mapindex"
                    ARCGIS_create_database(gdbpath, time_ymdhms, datatype)

                elif subdir.startswith(u"RGS"):
                    gdbpath = os.path.join(dir_abspath, subdir)
                    datatype = u"rgs"
                    ARCGIS_create_database(gdbpath, time_ymdhms, datatype)

        if dir.startswith(u"接图表"):
            gdbpath = os.path.join(file_dir, dir)
            datatype = u"mapindex"
            ARCGIS_create_database(gdbpath, time_ymdhms, datatype)
            # print u"创建mapindex数据库成功"
        elif dir.startswith(u"RGS"):
            gdbpath = os.path.join(file_dir, dir)
            datatype = u"rgs"
            ARCGIS_create_database(gdbpath, time_ymdhms, datatype)
            # print u"创建rgs数据库成功"

    print u'创建空间库成功'

    mapindexsde = "mapindex" + time_ymdhms + ".sde"
    rgssde = "rgs" + time_ymdhms + ".sde"
    # mmanage.mxd对应的mapindexsde,要放在当前目录下
    # old_mapindexsde = "mapindex20181137133843.sde"
    old_mapindexsde = "mapindex20181206202724.sde"

    # 注册版本,第一个参数为要注册版本的数据集的名称
    print u'开始注册版本'
    mapindexsde_dataset = os.path.join(os.path.dirname(os.path.abspath(__file__)), mapindexsde,
                                       mapindexsde + u".DLG_50000")
    rgssde_dataset = os.path.join(os.path.dirname(os.path.abspath(__file__)), rgssde, rgssde + u".DLG_K050")
    arcpy.RegisterAsVersioned_management(mapindexsde_dataset, u'EDITS_TO_BASE')
    arcpy.RegisterAsVersioned_management(rgssde_dataset, u'EDITS_TO_BASE')
    print u'注册版本成功'

    # 添加用于标记颜色的status字段
    ARCGIS_add_field(mapindexsde)

    # 发布服务
    is_successfull = ARCGIS_publishService(service_name, old_mapindexsde, mapindexsde)

    if is_successfull:
        # 填充postgresql中对应字段
        Posrgres_change_regiontask(regiontask_id, service_name, mapindexsde, rgssde)
        return True
    else:
        return False
Exemple #19
0
'''
Created on Mar 12, 2013

@author: kyleg
'''
# Import system modules
import arcpy

# Set local variables
datasetName = r"Database Connections/SDEPROD_GIS.sde/GIS.KDOT_RAIL"

# Execute RegisterAsVersioned
arcpy.RegisterAsVersioned_management(datasetName, "NO_EDITS_TO_BASE")
Exemple #20
0
    arcpy.env.workspace = ownerConn[
        0]  #note environments do not work with result objects.
    dataList = arcpy.ListTables() + arcpy.ListFeatureClasses(
    ) + arcpy.ListDatasets("", "Feature")

    # Use roles to apply permissions.
    print(
        "\tGranting appropriate privileges to the data for the 'viewers' and 'editors' roles"
    )
    arcpy.ChangePrivileges_management(dataList, 'viewers', 'GRANT')
    arcpy.ChangePrivileges_management(dataList, 'editors', 'GRANT', 'GRANT')

    # Register the data as versioned.
    print("\tRegistering the data as versioned")
    for dataset in dataList:
        arcpy.RegisterAsVersioned_management(dataset)

    # Finally, create a version for each editor.
    print("\tCreating a private version for each user in the editor role")
    for user3 in editors:
        verCreateConn = arcpy.CreateDatabaseConnection_management(
            r'<path_to_save_connection_file>', '<file_name>.sde', platform,
            instance, 'DATABASE_AUTH', user3, '<your_password_choice>',
            'SAVE_USERNAME', database)
        arcpy.CreateVersion_management(verCreateConn, 'sde.Default',
                                       '<your_choice_of_version_name>',
                                       'PRIVATE')
        arcpy.ClearWorkspaceCache_management()
    print(
        '\n****Done Creating Demo3 Geodatabase with roles, users, data and privileges.'
    )
Exemple #21
0
def mainFunction(
    configFile
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # Logging
        if (enableLogging == "true"):
            # Setup logging
            logger, logMessage = setLogging(logFile)
            # Log start of process
            logger.info("Process started.")

        # --------------------------------------- Start of code --------------------------------------- #

        # If configuration provided
        if (configFile):
            # Set CSV delimiter
            csvDelimiter = ","
            # Open the CSV file
            with open(configFile, 'rb') as csvFile:
                # Read the CSV file
                rows = csv.reader(csvFile, delimiter=csvDelimiter)

                # For each row in the CSV
                count = 0
                for row in rows:
                    # Ignore the first line containing headers
                    if (count > 0):
                        # Get the full dataset name
                        dataset = row[0]

                        # Get the database connection string from the input
                        splitDataset = dataset.split('.sde')
                        database = splitDataset[0] + ".sde"

                        # Disconnect users from the database
                        arcpy.DisconnectUser(database, "ALL")

                        # Get dataset properties
                        datasetDesc = arcpy.Describe(dataset)

                        # Add Global IDs
                        # Logging
                        if (enableLogging == "true"):
                            logger.info("Adding Global IDs for " + dataset +
                                        "...")
                        arcpy.AddMessage("Adding Global IDs for " + dataset +
                                         "...")
                        arcpy.AddGlobalIDs_management(dataset)

                        # If dataset isn't versioned
                        if (datasetDesc.isVersioned == 0):
                            # Register As Versioned
                            # Logging
                            if (enableLogging == "true"):
                                logger.info(
                                    "Registering dataset as versioned - " +
                                    dataset + "...")
                            arcpy.AddMessage(
                                "Registering dataset as versioned - " +
                                dataset + "...")
                            arcpy.RegisterAsVersioned_management(
                                dataset, "NO_EDITS_TO_BASE")
                        else:
                            # Logging
                            if (enableLogging == "true"):
                                logger.info("Dataset already versioned - " +
                                            dataset + "...")
                            arcpy.AddMessage("Dataset already versioned - " +
                                             dataset + "...")
                    count = count + 1
        # No configuration provided
        else:
            arcpy.AddError("No configuration file provided...")
            # Logging
            if (enableLogging == "true"):
                # Log error
                logger.error("No configuration file provided...")
                # Remove file handler and close log file
                logging.FileHandler.close(logMessage)
                logger.removeHandler(logMessage)
            if (sendErrorEmail == "true"):
                # Send email
                sendEmail("No configuration file provided...")

        # --------------------------------------- End of code --------------------------------------- #

        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                arcpy.SetParameterAsText(1, output)
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        pass
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        for i in range(len(e.args)):
            if (i == 0):
                errorMessage = str(e.args[i])
            else:
                errorMessage = errorMessage + " " + str(e.args[i])
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
Exemple #22
0
    # start an edit session
    edit.startEditing()
    print("edit started")
    edit.startOperation()
    print("operation started")
    # Perform edits
    ## transfer bmp land use values to parcel collect
    fieldJoinCalc(parcelCollect, ['APN', 'TRPA_LANDUSE_DESCRIPTION'], bmpLU,
                  ['APN', 'LANDUSE'], "BMPStatus = 'BMP'")
    print(
        "The 'TRPA_LANDUSE_DESCRIPTION' field in the parcel collection data has been updated"
    )
    # stop edits
    edit.stopOperation()
    print("operation stopped")
    edit.stopEditing(
        True)  ## Stop the edit session with True to save the changes
    print("edit stopped")
    # register feature dataset as versioned using SDE credentials
    arcpy.RegisterAsVersioned_management(featureDataset, "EDITS_TO_BASE")
except Exception as err:
    print(err)
    if edit.isEditing:
        edit.stopOperation()
        print("operation stopped in except")
        edit.stopEditing(
            False)  ## Stop the edit session with False to abandon the changes
        print("edit stopped in except")
finally:
    # Cleanup
    arcpy.ClearWorkspaceCache_management()
Exemple #23
0
"""
Registers datasets, feature classes, and tables as versioned

Input: sde connection file as string
returns: no output

todo: error handling around already versioned objects
    output for success/failure
"""

import os

import arcpy

# Set variables
connection_file = raw_input('Enter SDE connection filename: ')

# Set the workspace
arcpy.env.workspace = 'Database Connections/' + connection_file

# Enumerate Database elements
version_items = arcpy.ListDatasets() + arcpy.ListFeatureClasses(
) + arcpy.ListTables()

for item in version_items:
    arcpy.RegisterAsVersioned_management(in_dataset=item,
                                         edit_to_base="NO_EDITS_TO_BASE")