Ejemplo n.º 1
0
def AttribFields(fc, tbl, layer_name, table_name, workspace):
    #Updates the crossing type attribute values in the GIS database from the CANSYS table.  I believe this should work but needs to be tested more.
    try:
        MakeFeatureLayer_management(fc, layer_name)
        MakeQueryTable_management(tbl, table_name, "USE_KEY_FIELDS",
                                  "CIIMS.CIIMS_VWCROSSINGGIS3.CROSSINGID", "#",
                                  "#")
        AddJoin_management(layer_name, "CROSSINGID", table_name, "CROSSINGID",
                           "KEEP_ALL")
        SelectLayerByAttribute_management(
            layer_name, "NEW_SELECTION",
            "CIIMS.Static_Crossings.CROSSINGTYPE <> vwcrossings3.CROSSINGTYPE")
        with da.Editor(
                workspace) as edit:  # @UnusedVariable @UndefinedVariable
            CalculateField_management(layer_name,
                                      'CIIMS.Static_Crossings.CROSSINGTYPE',
                                      '!vwcrossings3.CROSSINGTYPE!',
                                      'PYTHON_9.3')
            CalculateField_management(layer_name,
                                      "CIIMS.Static_Crossings.LOADDATE",
                                      "datetime.datetime.now( )", "PYTHON_9.3",
                                      "#")
        del layer_name, fc, table_name, tbl
        print "attrib fields updated for crossing type"
    except ExecuteError:
        print(GetMessages(2))
        endingTime = datetime.datetime.now()
        ScriptStatusLogging('POINT_UPDATE_PROD.py', 'CIIMS.Static_Crossings',
                            scriptFailure, startingTime, endingTime,
                            GetMessages(2))
Ejemplo n.º 2
0
def LatLongFields(fc, tbl, layer_name, table_name, workspace):
    #Updates the XY attributes values in the GIS database from the CANSYS table
    try:
        MakeFeatureLayer_management(fc, layer_name)
        MakeQueryTable_management(tbl, table_name, "USE_KEY_FIELDS",
                                  "CIIMS.CIIMS_VWCROSSINGGIS3.CROSSINGID", "#",
                                  "#")
        AddJoin_management(layer_name, "CROSSINGID", table_name, "CROSSINGID",
                           "KEEP_ALL")
        #select the rows where the CIIMS position has been changed
        SelectLayerByAttribute_management(
            layer_name, "NEW_SELECTION",
            "CIIMS.Static_Crossings.CROSSINGLATITUDE <> vwcrossings3.CROSSINGLATITUDE OR CIIMS.Static_Crossings.CROSSINGLONGITUDE <> vwcrossings3.CROSSINGLONGITUDE"
        )
        with da.Editor(
                workspace) as edit:  # @UnusedVariable @UndefinedVariable
            CalculateField_management(
                layer_name, 'CIIMS.Static_Crossings.CROSSINGLATITUDE',
                '!vwcrossings3.CROSSINGLATITUDE!', 'PYTHON_9.3')
            CalculateField_management(
                layer_name, 'CIIMS.Static_Crossings.CROSSINGLONGITUDE',
                '!vwcrossings3.CROSSINGLONGITUDE!', 'PYTHON_9.3')
            CalculateField_management(layer_name,
                                      "CIIMS.Static_Crossings.LOADDATE",
                                      "datetime.datetime.now( )", "PYTHON_9.3",
                                      "#")
        del layer_name, fc, table_name, tbl
    except ExecuteError:
        print(GetMessages(2))
        endingTime = datetime.datetime.now()
        ScriptStatusLogging('POINT_UPDATE_PROD.py', 'CIIMS.Static_Crossings',
                            scriptFailure, startingTime, endingTime,
                            GetMessages(2))
Ejemplo n.º 3
0
def LatLongFields(fc, tbl, layer_name, table_name, workspace):
    #Updates the XY attributes  values in the GIS database from the CANSYS table
    try:
        MakeFeatureLayer_management(fc, layer_name)
        MakeTableView_management(tbl, table_name)
        AddJoin_management(layer_name, "CROSSINGID", table_name, "CROSSINGID",
                           "KEEP_ALL")
        SelectLayerByAttribute_management(
            layer_name, "NEW_SELECTION",
            "CIIMS.Static_Crossings.CROSSINGLATITUDE <> CIIMS.CIIMS_VWCROSSINGGIS3.CROSSINGLATITUDE OR CIIMS.Static_Crossings.CROSSINGLONGITUDE <> CIIMS.CIIMS_VWCROSSINGGIS3.CROSSINGLONGITUDE"
        )
        with da.Editor(workspace) as edit:
            CalculateField_management(
                layer_name, 'CIIMS.Static_Crossings.CROSSINGLATITUDE',
                '!CIIMS.CIIMS_VWCROSSINGGIS3.CROSSINGLATITUDE!', 'PYTHON_9.3')
            CalculateField_management(
                layer_name, 'CIIMS.Static_Crossings.CROSSINGLONGITUDE',
                '!CIIMS.CIIMS_VWCROSSINGGIS3.CROSSINGLONGITUDE!', 'PYTHON_9.3')
            CalculateField_management(layer_name,
                                      "CIIMS.Static_Crossings.LOADDATE",
                                      "datetime.datetime.now( )", "PYTHON_9.3",
                                      "#")
        del layer_name, fc, table_name, tbl
    except ExecuteError:
        print(GetMessages(2))
Ejemplo n.º 4
0
def AddInsert(fc, layer_name, newtbl, workspace):
    MakeTableView_management(newtbl, "NEWROWS_View", "#", "#", "#")
    addcount = int(GetCount_management("NEWROWS_View").getOutput(0))
    if addcount == 0:
        print "no new records"
        pass
    else:
        MakeFeatureLayer_management(fc, layer_name)
        MakeXYEventLayer_management(
            "NEWROWS_View", "CROSSINGLONGITUDE", "CROSSINGLATITUDE",
            "NEWROWS_Layer",
            "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],VERTCS['NAVD_1988',VDATUM['North_American_Vertical_Datum_1988'],PARAMETER['Vertical_Shift',0.0],PARAMETER['Direction',1.0],UNIT['Meter',1.0]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision",
            "#")
        FeatureClassToFeatureClass_conversion(
            "NEWROWS_Layer", "D:/Temp", "LOADTHIS1.shp", "#",
            """CROSSINGID "CROSSINGID" true false false 30 Text 0 0 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGID,-1,-1;CROSSINGLA "CROSSINGLA" true true false 8 Double 10 38 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGLATITUDE,-1,-1;CROSSINGLO "CROSSINGLO" true true false 8 Double 10 38 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGLONGITUDE,-1,-1;CROSSINGTY "CROSSINGTY" true true false 2 Text 0 0 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGTYPE,-1,-1""",
            "#")
        Append_management(
            "D:/Temp/LOADTHIS1.shp", layer_name, "NO_TEST",
            """CROSSINGID "CROSSINGID" true false false 30 Text 0 0 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGID,-1,-1;CROSSINGLATITUDE "CROSSINGLATITUDE" true true false 8 Double 10 38 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGLA,-1,-1;CROSSINGLONGITUDE "CROSSINGLONGITUDE" true true false 8 Double 10 38 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGLO,-1,-1;CROSSINGTYPE "CROSSINGTYPE" true true false 2 Text 0 0 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGTY,-1,-1;LOADDATE "LOADDATE" true true false 36 Date 0 0 ,First,#""",
            "#")
        Delete_management("D:/Temp/LOADTHIS1.shp", "#")
        updatelyr = layer_name + "new"
        MakeFeatureLayer_management(layer_name, updatelyr, "LOADDATE IS NULL")
        with da.Editor(workspace) as edit:
            CalculateField_management(updatelyr, "LOADDATE",
                                      "datetime.datetime.now( )", "PYTHON_9.3",
                                      "#")
        del fc, layer_name, newtbl, workspace, updatelyr
        print "new rows inserted into Static_Crossings"
Ejemplo n.º 5
0
def PointGEOM(fc, tbl, workspace, layer_name, fields):
    #Updates the Geometry point location based on the XY attributes in the GIS table, run this after the XY attributes have been updated
    try:
        MakeFeatureLayer_management(fc, layer_name)
        #the tolerance is how close a lat/long field value must match the coordinate position
        Tolerance = 0.000001
        #start the edit operation using the DA cursor
        edit = da.Editor(workspace)  # @UndefinedVariable
        edit.startEditing()
        edit.startOperation()
        with da.UpdateCursor(fc, fields) as ucursor:  # @UndefinedVariable
            for row in ucursor:
                #rows 0 and 1 are the lat long fields in the table
                point = Point(row[0], row[1])
                #row 2 is the geometry lat long tuple, and needs to be split in to lat/long parts
                rowx, rowy = (row[2])
                rowvalues = (row[0], row[1], point, datetime.datetime.now())
                #compare the lat long table values to the point location
                if (type(rowx) == float):
                    intolX = abs(row[0] - rowx)
                    intolY = abs(row[1] - rowy)
                    if intolX < Tolerance and intolY < Tolerance:
                        pass
                    else:
                        #if the shape needs to be adjusted, this will update the coordinate position from the feild info
                        point = Point(row[0], row[1])
                        rowvalues = (row[0], row[1], point,
                                     datetime.datetime.now())
                        print "these rows are outside the position tolerance:"
                        print(rowvalues)
                        ucursor.updateRow(rowvalues)
                    #print (rowvalues)
                else:
                    point = Point(row[0], row[1])
                    rowvalues = (row[0], row[1], point,
                                 datetime.datetime.now())
                    print "these rows need to be calculated:"
                    print(rowvalues)
                    ucursor.updateRow(rowvalues)
        edit.stopOperation()
        edit.stopEditing(True)
        del layer_name, fc, fields, workspace
        print "point geometry updated"
    except ExecuteError:
        print(GetMessages(2))
        endingTime = datetime.datetime.now()
        ScriptStatusLogging('POINT_UPDATE_PROD.py', 'CIIMS.Static_Crossings',
                            scriptFailure, startingTime, endingTime,
                            GetMessages(2))
Ejemplo n.º 6
0
def PointGEOM(fc, tbl, workspace, layer_name, fields):
    #Updates the Geometry point location based on the XY attributes in the GIS table, run this after the XY attributes have been updated
    try:
        MakeFeatureLayer_management(fc, layer_name)
        Tolerance = 0.0000001
        #start the edit operation
        edit = da.Editor(workspace)
        edit.startEditing()
        edit.startOperation()
        with da.UpdateCursor(fc, fields) as ucursor:
            for row in ucursor:
                point = Point(row[0], row[1])
                rowx, rowy = (row[2])
                rowvalues = (row[0], row[1], point, datetime.datetime.now())
                if (type(rowx) == float):
                    intolX = abs(row[0] - rowx)
                    intolY = abs(row[1] - rowy)
                    if intolX < Tolerance and intolY < Tolerance:
                        pass
                    else:
                        point = Point(row[0], row[1])
                        rowvalues = (row[0], row[1], point,
                                     datetime.datetime.now())
                        print(rowvalues)
                        ucursor.updateRow(rowvalues)
                    #print (rowvalues)
                else:
                    point = Point(row[0], row[1])
                    rowvalues = (row[0], row[1], point,
                                 datetime.datetime.now())
                    print "these rows are outside the position tolerance:"
                    print(rowvalues)
                    ucursor.updateRow(rowvalues)
        edit.stopOperation()
        edit.stopEditing(True)
        del layer_name, fc, fields, workspace
        print "point geometry updated"
    except ExecuteError:
        print(GetMessages(2))
Ejemplo n.º 7
0
def AttribFields(fc, tbl, layer_name, table_name, workspace):
    #Updates the crossing type attribute values in the GIS database from the CANSYS table.  I believe this should work but needs to be tested more.
    try:
        MakeFeatureLayer_management(fc, layer_name)
        MakeTableView_management(tbl, table_name)
        AddJoin_management(layer_name, "CROSSINGID", table_name, "CROSSINGID",
                           "KEEP_ALL")
        SelectLayerByAttribute_management(
            layer_name, "NEW_SELECTION",
            "CIIMS.Static_Crossings.CROSSINGTYPE <> CIIMS.CIIMS_VWCROSSINGGIS3.CROSSINGTYPE"
        )
        with da.Editor(workspace) as edit:
            CalculateField_management(
                layer_name, 'CIIMS.Static_Crossings.CROSSINGTYPE',
                '!CIIMS.CIIMS_VWCROSSINGGIS3.CROSSINGTYPE!', 'PYTHON_9.3')
            CalculateField_management(layer_name,
                                      "CIIMS.Static_Crossings.LOADDATE",
                                      "datetime.datetime.now( )", "PYTHON_9.3",
                                      "#")
        del layer_name, fc, table_name, tbl
        print "attrib fields updated for crossing type"
    except ExecuteError:
        print(GetMessages(2))
Ejemplo n.º 8
0
}
logs.info('Creating Tax Parcel Lot Dictionary - Complete')
logs.info('Creating Customer Index / Assigning newCustomer Index Cursor')
customers = [x for x in arcpy.da.SearchCursor(B, idFieldB)]
newCustomer = arcpy.da.InsertCursor(B, [
    'PARCELID', 'Block', 'AddressNo', 'Address', 'Zip', 'City', 'State',
    'GDistID', 'GZone', 'G', 'Garbage3', 'Recycle', 'Recycle_2', 'YardWaste',
    'Yard_Waste_2', 'Symbology', 'GNotes', 'ActStatus', 'LotStatus',
    'GTaxStatus', 'GTaxUnits', 'GTaxDate', 'Multiple_Units', 'DISABLED_SRVC',
    'AddressPre', 'AddressRoad', 'AddressSuf', 'GCollection', 'YW',
    'GDistName', 'SHAPE@'
])
logs.info(
    'Creating Customer Index / Assigning newCustomer Index Cursor - Complete')

edit = da.Editor(engSDE)
edit.startEditing(True, True)
edit.startOperation()

file = 'TRASH - DISTRICT MASTER LIST.xlsx'
districtList = createTrashDefList(path, file)

logs.info('Inserting New Customers')
newCounter = []
newCounterLots = []
existingCounter = []
existingCounterLots = []
if noNewCustomers == True:
    logs.error('No new or "waiting to be verified" customers!')
if noNewCustomers == False:
    for x in newCust:
Ejemplo n.º 9
0
def main(*argv):
    """ main driver of program """
    try:
        #   User Inputs
        #
        #inputFC = argv[0]
        inputFD = argv[0]  #os.path.split(inputFC)[0]
        #inputFD = argv[0]#
        #Spreedsheet
        filename = argv[1]  #
        #Check Type
        tabname = argv[2]  #
        #Output GDB
        output_fcs = argv[3]

        #alias_table = get_field_alias(inputFC)
        #arcpy.AddMessage(alias_table)
        fc_domain_dict = get_fc_domains(os.path.dirname(inputFD))
        arcpy.AddMessage(os.path.dirname(inputFD))
        arcpy.AddMessage(fc_domain_dict)

        fcs = os.path.basename(output_fcs)
        outputGDB = os.path.dirname(output_fcs)  #argv[3]#
        #  Local Variables
        #
        error_fcs = {}
        empty = (-999999, '', None, 'noInformation', 'None', 'Null', 'NULL',
                 -999999.0)
        #  Logic
        #
        now = datetime.datetime.now()
        if outputGDB is None or \
           outputGDB == "" or \
           outputGDB == "#":
            outputGDB = env.scratchGDB
        if arcpy.Exists(outputGDB) == False:
            arcpy.CreateFileGDB_management(
                out_folder_path=os.path.dirname(outputGDB),
                out_name=os.path.basename(outputGDB))
        arcpy.AddMessage("Beginning null attribute check.")
        env.workspace = inputFD
        specificAttributeDict, attrCheck = create_attr_dict(filename, tabname)
        if "Crv" in fcs:
            pnt_fc = fcs.replace("Crv", "Pnt")
            srf_fc = fcs.replace("Crv", "Srf")
        else:
            pnt_fc = fcs + "Pnt"
            srf_fc = fcs + "Srf"
        errorFCs = [
            [os.path.join(outputGDB, pnt_fc), "POINT"
             ],  # "FindTdsErrorPnt_"+attrCheck.replace('-','_')), "POINT"],
            [os.path.join(outputGDB, fcs), "POLYLINE"
             ],  #"FindTdsErrorCrv_"+attrCheck.replace('-','_')), "POLYLINE"],
            [os.path.join(outputGDB, srf_fc), "POLYGON"]
        ]  #"FindTdsErrorSrf_"+attrCheck.replace('-','_')), "POLYGON"]]
        desc = arcpy.Describe(inputFD)
        if desc.dataType.lower() == "FeatureDataset".lower():
            sr = arcpy.Describe(inputFD).spatialReference
        else:
            sr = None
        for fc in errorFCs:
            error_fcs[fc[1]] = create_error_fc(outFC=fc[0],
                                               geometryType=fc[1],
                                               sr=sr)
            del fc
        del errorFCs
        del sr
        edit = da.Editor(outputGDB)
        edit.startEditing(False, True)
        edit.startOperation()
        pntInsert = da.InsertCursor(error_fcs['POINT'], [
            "SHAPE@", "DEFICIENCY", "FEATURE_CLASS", "SUBTYPE", "ORIG_OID",
            "DEFICIENCY_CNT"
        ])
        crvInsert = da.InsertCursor(error_fcs['POLYLINE'], [
            "SHAPE@", "DEFICIENCY", "FEATURE_CLASS", "SUBTYPE", "ORIG_OID",
            "DEFICIENCY_CNT"
        ])
        srfInsert = da.InsertCursor(error_fcs['POLYGON'], [
            "SHAPE@", "DEFICIENCY", "FEATURE_CLASS", "SUBTYPE", "ORIG_OID",
            "DEFICIENCY_CNT"
        ])
        for fc in arcpy.ListFeatureClasses():  #[os.path.split(inputFC)[1]]: #
            arcpy.AddMessage("Looking at: %s" % fc)
            alias_table = get_field_alias(fc)
            arcpy.AddMessage(alias_table)
            stList = unique_values(os.path.join(inputFD, fc), "F_CODE")
            errorCount = 0
            if len(stList) > 0:
                field_names_lookup = {field.name : field.type \
                                      for field in arcpy.ListFields(os.path.join(inputFD, fc)) \
                                      if field.type not in ['Blob', 'Geometry', 'OID', 'Raster']}
                field_names_lookup['SHAPE@'] = 'Geometry'
                field_names_lookup['OID@'] = 'OID'
                for s in stList:
                    if s in specificAttributeDict:
                        sub_sql = " or ".join([assemble_sql(field_name=f,
                                                field_type=field_names_lookup[f]) \
                                               for f in specificAttributeDict[s] ])
                        sql = "F_CODE = '{fcode}' and ({subsql})".format(
                            fcode=s, subsql=sub_sql)
                        with da.SearchCursor(os.path.join(inputFD, fc),
                                             field_names_lookup.keys(),
                                             where_clause=sql) as rows:
                            index_lookup = None
                            for row in rows:
                                if index_lookup is None:
                                    index_lookup = {key:rows.fields.index(key) \
                                                    for key in rows.fields}
                                vals = [alias_table[i] for i in specificAttributeDict[s] \
                                        if row[index_lookup[i]] in empty]
                                if len(vals) > 0:
                                    fs = ",".join(vals)
                                    oid = row[index_lookup["OID@"]]
                                    #arcpy.AddMessage(fc_domain_dict[s])
                                    ERROR = str(fc) + r" | " + str(
                                        fc_domain_dict[s]) + r" | OID: " + str(
                                            oid) + r" | " + fs
                                    irow = [
                                        row[index_lookup['SHAPE@']], ERROR, fc,
                                        fc_domain_dict[s], oid,
                                        len(vals)
                                    ]
                                    if fc[-3:].lower() == "pnt":
                                        pntInsert.insertRow(irow)
                                    elif fc[-3:].lower() == "crv":
                                        crvInsert.insertRow(irow)
                                    elif fc[-3:].lower() == "srf":
                                        srfInsert.insertRow(irow)
                                    errorCount += 1
                                    del irow
                                    del oid
                                    del ERROR
                                    del fs
                                del vals
                                del row

                        not_sub_sql = " and ".join([assemble_sql(field_name=f,
                                                field_type=field_names_lookup[f],
                                                not_in=True) \
                                               for f in specificAttributeDict[s] ])

                        not_sql = "F_CODE = '{fcode}' and ({subsql})".format(
                            fcode=s, subsql=not_sub_sql)
                        with da.SearchCursor(os.path.join(inputFD, fc),
                                             field_names_lookup.keys(),
                                             where_clause=not_sql) as rows:
                            index_lookup = None
                            for row in rows:
                                if index_lookup is None:
                                    index_lookup = {key:rows.fields.index(key) \
                                                    for key in rows.fields}
                                vals = [i for i in specificAttributeDict[s] \
                                        if row[index_lookup[i]] in empty]
                                fs = "N/A"
                                oid = row[index_lookup["OID@"]]
                                ERROR = str(fc) + r" | " + str(
                                    fc_domain_dict[s]) + r" | OID: " + str(
                                        oid) + r" | " + fs
                                irow = [
                                    row[index_lookup['SHAPE@']], ERROR, fc,
                                    fc_domain_dict[s], oid, 0
                                ]
                                if fc[-3:].lower() == "pnt":
                                    pntInsert.insertRow(irow)
                                elif fc[-3:].lower() == "crv":
                                    crvInsert.insertRow(irow)
                                elif fc[-3:].lower() == "srf":
                                    srfInsert.insertRow(irow)
                                errorCount += 1
                                del irow
                                del oid
                                del ERROR
                                del fs
                                del vals
                                del row
                            del index_lookup
                    del s
                del field_names_lookup
            if errorCount > 0:
                arcpy.AddMessage("       Errors in " + fc + ": " +
                                 str(errorCount))
            del stList
        edit.stopOperation()
        edit.stopEditing(True)
        del pntInsert, crvInsert, srfInsert
        del edit
        arcpy.AddMessage("Total Processing time: %s" %
                         str(datetime.datetime.now() - now))
        arcpy.SetParameterAsText(4, ";".join(error_fcs.values()))
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
def main(*argv):
    """ main driver of program """
    try:
        #   User Inputs
        #
        inputFC = argv[0]
        #inputFD = os.path.split(inputFC)[0]
        desc = arcpy.Describe(inputFC)
        sr = desc.spatialReference
        try:
            inputFD = desc.featureClass.path #argv[1]#
            fc = decs.featureClass.name

        except:
            inputFD = desc.path #argv[1]#
            fc = desc.name

        #Spreedsheet
        filename = argv[1]#
        #Check Type
        tabname = argv[2]#
        #Output GDB
        output_fcs = argv[3]


        alias_table = get_field_alias(inputFC)
        arcpy.AddMessage(alias_table)
        arcpy.AddMessage(os.path.dirname(inputFD))
        try:
            fc_domain_dict = get_fc_domains(os.path.dirname(inputFD))
        except:
            #fc_domain_dict = get_fc_domains(r'C:\PROJECTS\STATE_OF_THE_DATA\DATA\TDS\TDS_6_1_MNG_FGCM_sub1.gdb')
            arcpy.AddMessage("Attribution Assessment not configured to pull "
                + "domain dictionary from service. Please add a path to a TDS"
                + " feature class here.")
            exit(0)
        arcpy.AddMessage(os.path.dirname(inputFD))
        arcpy.AddMessage(fc_domain_dict)

        outputGDB = os.path.dirname(output_fcs) #argv[3]#
        #  Local Variables
        #
        error_fcs = {}
        empty = (-999999, '', None, 'noInformation',
                 'None', 'Null', 'NULL', -999999.0)
        #  Logic
        #
        now = datetime.datetime.now()
        if outputGDB is None or \
           outputGDB == "" or \
           outputGDB == "#":
            outputGDB = env.scratchGDB
        if arcpy.Exists(outputGDB) == False:
            arcpy.CreateFileGDB_management(out_folder_path=os.path.dirname(outputGDB),
                                          out_name=os.path.basename(outputGDB))
        arcpy.AddMessage("Beginning null attribute check.")
        env.workspace = inputFD
        specificAttributeDict, attrCheck = create_attr_dict(filename, tabname)
        #desc = arcpy.Describe(inputFD)
##        if desc.dataType.lower() == "FeatureDataset".lower():
##            sr = arcpy.Describe(inputFD).spatialReference
##        else:
##            sr = None
        error_fc = output_fcs
        error_fc = create_error_fc(output_fcs,'POLYLINE',sr=sr)
        del sr
        edit = da.Editor(outputGDB)
        edit.startEditing(False, True)
        edit.startOperation()
        crvInsert = da.InsertCursor(error_fc,
                                    ["SHAPE@", "DEFICIENCY", "FEATURE_CLASS",
                                     "SUBTYPE", "ORIG_OID", "DEFICIENCY_CNT"])

##-----------------
        arcpy.AddMessage("Looking at: %s" % output_fcs)
        stList = unique_values(inputFC,"F_CODE")
        errorCount = 0
        if len(stList) > 0 :
            field_names_lookup = {field.name : field.type \
                                  for field in arcpy.ListFields(inputFC) \
                                  if field.type not in ['Blob', 'Geometry', 'OID', 'Raster']}
            field_names_lookup['SHAPE@'] = 'Geometry'
            field_names_lookup['OID@'] = 'OID'
            for s in stList:
                if s in specificAttributeDict:
                    sub_sql = " or ".join([assemble_sql(field_name=f,
                                            field_type=field_names_lookup[f]) \
                                           for f in specificAttributeDict[s] ])
                    sql = "F_CODE = '{fcode}' and ({subsql})".format(fcode=s, subsql=sub_sql)
                    with da.SearchCursor(inputFC,
                                         field_names_lookup.keys(),
                                         where_clause=sql) as rows:
                        index_lookup = None
                        for row in rows:
                            if index_lookup is None:
                                index_lookup = {key:rows.fields.index(key) \
                                                for key in rows.fields}
                            vals = [alias_table[i] for i in specificAttributeDict[s] \
                                    if row[index_lookup[i]] in empty]
                            if len(vals) > 0:
                                fs = ",".join(vals)
                                oid = row[index_lookup["OID@"]]
                                #arcpy.AddMessage(fc_domain_dict[s])
                                ERROR = str(fc) + r" | " + str(fc_domain_dict[s]) + r" | OID: " + str(oid) + r" | " + fs
                                irow = [row[index_lookup['SHAPE@']],
                                        ERROR,
                                        fc,
                                        fc_domain_dict[s],
                                        oid,
                                        len(vals)
                                        ]

                                crvInsert.insertRow(irow)

                                errorCount += 1
                                del irow
                                del oid
                                del ERROR
                                del fs
                            del vals
                            del row

                    not_sub_sql = " and ".join([assemble_sql(field_name=f,
                                            field_type=field_names_lookup[f],
                                            not_in=True) \
                                           for f in specificAttributeDict[s] ])

                    not_sql = "F_CODE = '{fcode}' and ({subsql})".format(fcode=s, subsql=not_sub_sql)
                    with da.SearchCursor(inputFC,
                                         field_names_lookup.keys(),
                                         where_clause=not_sql) as rows:
                        index_lookup = None
                        for row in rows:
                            if index_lookup is None:
                                index_lookup = {key:rows.fields.index(key) \
                                                for key in rows.fields}
                            vals = [i for i in specificAttributeDict[s] \
                                    if row[index_lookup[i]] in empty]
                            fs = "N/A"
                            oid = row[index_lookup["OID@"]]
                            ERROR = str(fc) + r" | " + str(fc_domain_dict[s]) + r" | OID: " + str(oid) + r" | " + fs
                            irow = [row[index_lookup['SHAPE@']],
                                    ERROR,
                                    fc,
                                    fc_domain_dict[s],
                                    oid,
                                    0
                                    ]

                            crvInsert.insertRow(irow)

                            errorCount += 1
                            del irow
                            del oid
                            del ERROR
                            del fs
                            del vals
                            del row
                        del index_lookup
                del s
            del field_names_lookup
        if errorCount > 0:
            arcpy.AddMessage("       Errors in " + fc + ": " + str(errorCount))
        del stList

##------------------------------------------------------------------------------
        edit.stopOperation()
        edit.stopEditing(True)
        del crvInsert
        del edit
        arcpy.AddMessage("Total Processing time: %s" % str(datetime.datetime.now() - now))
        #arcpy.SetParameterAsText(4, ";".join(error_fcs.values()))
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)