def main(config_file, *args):
    """
    Import the incidents to a feature class,
    filtering out duplicates if necessary,
    assign geometry using addresses or XY values,
    and publish the results usign AGOL or ArcGIS for Server.
    Output is an updated feature class, processign reports,
    and optionally a service
    """

    # Current date and time for file names
    fileNow = dt.strftime(dt.now(), prefix)

    if isfile(config_file):
        cfg = ConfigParser.ConfigParser()
        cfg.read(config_file)
    else:
        raise Exception(e1.format("Configuration file", config_file, ""))

    # Get general configuration values
    incidents = cfg.get('GENERAL', 'spreadsheet')
    inc_features = cfg.get('GENERAL', 'incident_features')
    id_field = cfg.get('GENERAL', 'incident_id')
    report_date_field = cfg.get('GENERAL', 'report_date_field')
    reports = cfg.get('GENERAL', 'reports')
    loc_type = cfg.get('GENERAL', 'loc_type')
    summary_field = cfg.get('GENERAL', 'summary_field')
    transform_method = cfg.get('GENERAL', 'transform_method')
    pub_status = cfg.get('GENERAL', 'pub_status')
    delete_duplicates = cfg.get('GENERAL', 'delete_duplicates')

    if delete_duplicates in ('true', 'True', True):
        delete_duplicates = True
        if report_date_field == "":
            raise Exception(e16)
    if delete_duplicates in ('false', 'False'):
        delete_duplicates = False

    # Log file
    if exists(reports):
        rptLog = join(reports, "{0}_{1}.log".format(fileNow, log_name))

    else:
        raise Exception(e1.format("Report location", reports, w5))

    # Scratch workspace
    tempgdb = arcpy.env.scratchGDB

    with open(rptLog, "w") as log:
        try:
            # Log file header
            log.write(l1.format(fileNow))
            log.write(l2.format(getpass.getuser()))
            log.write(l3.format(incidents))
            log.write(l4.format(inc_features))
            if loc_type == "ADDRESSES":
                log.write(l5.format(cfg.get('ADDRESSES', 'locator')))

            # Validate output feature class geometry type
            desc = arcpy.Describe(inc_features)
            if not desc.shapeType == "Point":
                raise Exception(e6.format(inc_features))

            # Identify field names in both fc and csv
            if arcpy.Exists(incidents):
                csvfieldnames = [f.name for f in arcpy.ListFields(incidents)]

            else:
                raise Exception(e1.format("Spreadsheet", incidents, ""))

            if arcpy.Exists(inc_features):
                incfieldnames = [f.name for f in arcpy.ListFields(inc_features)]
            else:
                raise Exception(e1.format("Feature Class", inc_features, ""))

            matchfieldnames = []
            for name in csvfieldnames:
                if name in incfieldnames:
                    matchfieldnames.append(name)

            # If data is to be geocoded
            if loc_type == "ADDRESSES":

                # Get geocoding parameters
                address_field = cfg.get('ADDRESSES', 'address_field')
                city_field = cfg.get('ADDRESSES', 'city_field')
                state_field = cfg.get('ADDRESSES', 'state_field')
                zip_field = cfg.get('ADDRESSES', 'zip_field')
                locator = cfg.get('ADDRESSES', 'locator')

                # Geocoding field names
                reqFields = [address_field, id_field]#, report_date_field]
                opFields = [city_field, state_field, zip_field, summary_field, report_date_field]

                if locator == "":
                    raise Exception(e13)

                # Test geolocator fields
                loc_address_fields = [loc_address_field, loc_city_field, loc_zip_field, loc_state_field]
                for a in loc_address_fields:
                    if not a == "":
                        if not a in all_locator_fields:
                            raise Exception(e14)

            # If data has coordinate values
            else:

                # Get coordinate parameters
                lg_field = cfg.get('COORDINATES', 'long_field')
                lt_field = cfg.get('COORDINATES', 'lat_field')
                coord_system = cfg.get('COORDINATES', 'coord_system')
                remove_zeros = cfg.get('COORDINATES', 'ignore_zeros')
                if remove_zeros in ('true', 'True'):
                    remove_zeros = True
                if remove_zeros in ('false', 'False'):
                    remove_zeros = False

                # Coordinate field names
                reqFields = [id_field, lg_field, lt_field]#, report_date_field]
                opFields = [summary_field, report_date_field]

            # Validate required field names
            field_test(incidents, reqFields, csvfieldnames, True)
            field_test(inc_features, reqFields, incfieldnames, True)

            # Validate optional field names
            field_test(incidents, opFields, csvfieldnames)
            field_test(inc_features, opFields, incfieldnames)

            # Validate basic publishing parameters
            if not pub_status == "":

                # Get general publishing parameters
                mxd = cfg.get('PUBLISHING', 'mxd')
                username = cfg.get('PUBLISHING', 'user_name')
                password = cfg.get('PUBLISHING', 'password')

                # Test for required inputs
                if not arcpy.Exists(mxd):
                    raise Exception(e1.format("Map document", mxd, ""))

                if splitext(mxd)[1] != ".mxd":
                    raise Exception(e3)

                # Test for required inputs
                if username == "" or password == "":
                    if pub_status == "ARCGIS_ONLINE":
                        raise Exception(e8)

            # Get address fields for geocoding
            if loc_type == "ADDRESSES":

                addresses = ""
                loc_fields = []
                adr_string = "{0} {1} VISIBLE NONE;"

                for loc_field in all_locator_fields:
                    if loc_field == loc_address_field:
                        addresses += adr_string.format(loc_field, address_field)
                        loc_fields.append(address_field)

                    elif loc_field == loc_city_field and city_field != "":
                        addresses += adr_string.format(loc_field, city_field)
                        loc_fields.append(city_field)

                    elif loc_field == loc_state_field and state_field != "":
                        addresses += adr_string.format(loc_field, state_field)
                        loc_fields.append(state_field)

                    elif loc_field == loc_zip_field and zip_field != "":
                        addresses += adr_string.format(loc_field, zip_field)
                        loc_fields.append(zip_field)

                    else:
                        addresses += adr_string.format(loc_field, "<None>")

            # Get coordinate fields
            else:
                loc_fields = [lg_field, lt_field]

            total_records = len(field_vals(incidents,id_field))

            messages(m17.format(total_records, incidents), log)

            if not summary_field == "":
                SumVals = field_vals(incidents, summary_field)
                listSumVals = [val for val in SumVals if val != None]

                if not len(SumVals) == len(listSumVals):
                    print m19.format(len(SumVals)-len(listSumVals))
                    log.write(m19.format(len(SumVals)-len(listSumVals)))
                listSumVals.sort()

                log.write(l10.format(summary_field))
                dateCount = 1
                i = 0
                n = len(listSumVals)

                while i < n:

                    try:
                        if listSumVals[i] == listSumVals[i + 1]:
                            dateCount += 1
                        else:
                            log.write(l11.format(listSumVals[i], dateCount))
                            dateCount = 1
                    except:
                        log.write(l11.format(listSumVals[i], dateCount))
                    i += 1

                log.write("\n")

            # Remove duplicate incidents
            if delete_duplicates:

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m13.format(timeNow), log)

                incidents, req_nulls, countUpdate, countDelete = remove_dups(tempgdb,
                                                                             incidents,
                                                                             inc_features,
                                                                             matchfieldnames,
                                                                             id_field,
                                                                             report_date_field,
                                                                             loc_fields)

                if not req_nulls == "":
                    req_nulls = "{}\n".format(req_nulls)
                    messages(w3.format(req_nulls), log, 1)

                if not countUpdate == 0:
                    messages(m14.format(countUpdate,inc_features), log)

                if countDelete > 0:
                    messages(m15.format(countDelete,inc_features), log)

            # Create features
            tempFC = join(tempgdb, "tempDataLE")

            # Create point features from spreadsheet

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m1.format(timeNow), log)

            if loc_type == "ADDRESSES":

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m3.format(timeNow), log)

                # Geocode the incidents
                arcpy.GeocodeAddresses_geocoding(incidents,
                                                 locator,
                                                 addresses,
                                                 tempFC,
                                                 "STATIC")

                # Initiate geocoding report counts
                countMatch = 0
                countTrueMatch = 0
                countUnmatch = 0

                # Create geocoding reports
                rptUnmatch = join(reports, "{0}_{1}.csv".format(
                                                        fileNow, unmatch_name))

                fieldnames = [f.name for f in arcpy.ListFields(tempFC)]

                # Sort incidents based on match status
                statusIndex = fieldnames.index(status)
                locIndex = fieldnames.index(addr_type)

                # Write incidents that were not well geocoded to file and
                #       delete from temp directory
                with open (rptUnmatch, "wb") as umatchFile:
                    unmatchwriter = csv.writer(umatchFile)
                    unmatchwriter.writerow(fieldnames)

                    # Delete incidents that were not Matched
                    countUnmatch = sort_records(tempFC, unmatchwriter,
                                                statusIndex, match_value,
                                                False, True)

                    if not countUnmatch == 0:
                        messages(w6.format(countUnmatch, rptUnmatch), log, 1)

                    # Incidents that were not matched to an acceptable accuracy
                    countMatch = sort_records(tempFC, unmatchwriter,
                                              locIndex, addrOK, False, True)

                    if not countMatch == 0:
                        messages(w7.format(countMatch, addrOK, rptUnmatch), log, 1)

                    countTrueMatch = len(field_vals(tempFC, "OBJECTID"))

                    messages(m16.format(countTrueMatch, inc_features), log)

            else:
                # Create temporary output storage
                tempFL = arcpy.MakeXYEventLayer_management(incidents,
                                                           lg_field,
                                                           lt_field,
                                                           "tempLayerLE",
                                                           coord_system)

                # Convert the feature layer to a feature class to prevent
                #   field name changes

                arcpy.CopyFeatures_management(tempFL, tempFC)
                arcpy.Delete_management(tempFL)

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m4.format(timeNow, inc_features), log)

            # Fields that will be copied from geocode results to final fc
            copyfieldnames = []
            copyfieldnames.extend(matchfieldnames)
            copyfieldnames.append("SHAPE@XY")

            # Fields for error reporting
            errorfieldnames = []
            errorfieldnames.extend(matchfieldnames)
            errorfieldnames.insert(0, errorfield)
            errorfieldnames += [long_field, lat_field]

            # Reproject the features
            sr_input = arcpy.Describe(tempFC).spatialReference
            sr_output = arcpy.Describe(inc_features).spatialReference

            if sr_input != sr_output:
                proj_out = "{}_proj".format(tempFC)

                arcpy.Project_management(tempFC,
                                         proj_out,
                                         sr_output,
                                         transform_method)
                tempFC = proj_out

            # Append geocode results to fc
            rptNoAppend = join(reports, "{0}_{1}.csv".format(fileNow, noappend_name))

            with arcpy.da.SearchCursor(tempFC, copyfieldnames) as csvrows:
                with arcpy.da.InsertCursor(inc_features, copyfieldnames) as incrows:
                    # Open csv for un-appended records
                    with open(rptNoAppend, "wb") as appendFile:

                        appendwriter = csv.writer(appendFile)
                        appendwriter.writerow(errorfieldnames)

                        # Index of field with incident ID
                        record = errorfieldnames.index(id_field)

                        # Initiate count of successfully appended records
                        countAppend = 0

                        # List of ids of records not successfully appended
                        errorRecords = []

                        for csvrow in csvrows:
                            try:
                                if loc_type == "COORDINATES":
                                    if remove_zeros:
                                        lt_index = copyfieldnames.index(lt_field)
                                        lg_index = copyfieldnames.index(lg_field)

                                        ltVal = csvrow[lt_index]
                                        lgVal = csvrow[lg_index]

                                        if ltVal == 0 and lgVal == 0:
                                            raise Exception("invalid_coordinates")

                                # If the row can be appended
                                incrows.insertRow(csvrow)
                                countAppend += 1

                            except Exception as reason:
                                # e.g. 'The value type is incompatible with the
                                #       field type. [INCIDENTDAT]'
                                # Alternatively, the exception
                                #      'invalid_coordinates' raised by the
                                #       remove_zeros test above

                                # Get the name of the problem field
                                badfield = reason[0].split(" ")[-1]
                                badfield = badfield.strip(" []")

                                # Append field name to start of record
                                csvrow = list(csvrow)
                                csvrow.insert(0, badfield)

                                # Split the coordinate tuple into X and Y
                                lng, lat = list(csvrow[-1])
                                csvrow[-1] = lng
                                csvrow.append(lat)
                                csvrow = tuple(csvrow)

                                # Write the record out to csv
                                appendwriter.writerow(csvrow)

                                # Add id and field to issue list
                                errorRecords.append(w4.format(csvrow[record], badfield))

            # If issues were reported, print them
            if len(errorRecords) != 0:
                messages(w1.format(len(errorRecords), inc_features, rptNoAppend), log, 1)

            messages(m18.format(countAppend, inc_features), log)

            del incrows, csvrows

            # Convert times to UTC if publishing to AGOL
            if pub_status == "ARCGIS_ONLINE":

                # Get date fields
                date_fields = [f.name for f in arcpy.ListFields(inc_features) if f.type == "Date" and f.name in matchfieldnames]

                # Convert from system timezone to UTC
                convert_to_utc(inc_features, date_fields)

            # Publish incidents
            if not pub_status == "":

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m5.format(timeNow), log)

                errors = serviceutils.publish_service(cfg, pub_status, mxd, username, password)

                # Print analysis errors
                if errors:
                    raise Exception(e4.format(errors))

            # Convert times from UTC to system timezone
            if pub_status == "ARCGIS_ONLINE":
                convert_from_utc(inc_features, date_fields)

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m8.format(timeNow), log)

        except arcpy.ExecuteError:
            print("{}\n{}\n".format(gp_error, arcpy.GetMessages(2)))
            timeNow = dt.strftime(dt.now(), "{} {}".format(
                                                date_format, time_format))
            arcpy.AddError("{} {}:\n".format(timeNow, gp_error))
            arcpy.AddError("{}\n".format(arcpy.GetMessages(2)))

            log.write("{} ({}):\n".format(gp_error, timeNow))
            log.write("{}\n".format(arcpy.GetMessages(2)))

            for msg in range(0, arcpy.GetMessageCount()):
                if arcpy.GetSeverity(msg) == 2:
                    code = arcpy.GetReturnCode(msg)
                    print("Code: {}".format(code))
                    print("Message: {}".format(arcpy.GetMessage(msg)))

        except Exception as ex:
            print("{}: {}\n".format(py_error, ex))
            timeNow = dt.strftime(dt.now(), "{}".format(time_format))

            arcpy.AddError("{} {}:\n".format(timeNow, py_error))
            arcpy.AddError("{}\n".format(ex))

            log.write("{} {}:\n".format(timeNow, py_error))
            log.write("{}\n".format(ex))

        finally:
            # Clean up
            try:
                arcpy.Delete_management(tempgdb)
            except:
                pass
Exemple #2
0
def main(config_file, *args):
    """
    Import the incidents to a feature class,
    filtering out duplicates if necessary,
    assign geometry using addresses or XY values,
    and publish the results usign AGOL or ArcGIS for Server.
    Output is an updated feature class, processign reports,
    and optionally a service
    """

    # Current date and time for file names
    fileNow = dt.strftime(dt.now(), prefix)

    if isfile(config_file):
        cfg = ConfigParser.ConfigParser()
        cfg.read(config_file)
    else:
        raise Exception(e1.format("Configuration file", config_file, ""))

    # Get general configuration values
    incidents = cfg.get('GENERAL', 'spreadsheet')
    inc_features = cfg.get('GENERAL', 'incident_features')
    id_field = cfg.get('GENERAL', 'incident_id')
    report_date_field = cfg.get('GENERAL', 'report_date_field')
    reports = cfg.get('GENERAL', 'reports')
    loc_type = cfg.get('GENERAL', 'loc_type')
    summary_field = cfg.get('GENERAL', 'summary_field')
    transform_method = cfg.get('GENERAL', 'transform_method')
    pub_status = cfg.get('GENERAL', 'pub_status')
    delete_duplicates = cfg.get('GENERAL', 'delete_duplicates')

    if delete_duplicates in ('true', 'True', True):
        delete_duplicates = True
        if report_date_field == "":
            raise Exception(e16)
    if delete_duplicates in ('false', 'False'):
        delete_duplicates = False

    # Log file
    if exists(reports):
        rptLog = join(reports, "{0}_{1}.log".format(fileNow, log_name))

    else:
        raise Exception(e1.format("Report location", reports, w5))

    # Scratch workspace
    tempgdb = arcpy.env.scratchGDB

    with open(rptLog, "w") as log:
        try:
            # Log file header
            log.write(l1.format(fileNow))
            log.write(l2.format(getpass.getuser()))
            log.write(l3.format(incidents))
            log.write(l4.format(inc_features))
            if loc_type == "ADDRESSES":
                log.write(l5.format(cfg.get('ADDRESSES', 'locator')))

            # Validate output feature class geometry type
            desc = arcpy.Describe(inc_features)
            if not desc.shapeType == "Point":
                raise Exception(e6.format(inc_features))

            # Identify field names in both fc and csv
            if arcpy.Exists(incidents):
                csvfieldnames = [f.name for f in arcpy.ListFields(incidents)]

            else:
                raise Exception(e1.format("Spreadsheet", incidents, ""))

            if arcpy.Exists(inc_features):
                incfieldnames = [
                    f.name for f in arcpy.ListFields(inc_features)
                ]
            else:
                raise Exception(e1.format("Feature Class", inc_features, ""))

            matchfieldnames = []
            for name in csvfieldnames:
                if name in incfieldnames:
                    matchfieldnames.append(name)

            # If data is to be geocoded
            if loc_type == "ADDRESSES":

                # Get geocoding parameters
                address_field = cfg.get('ADDRESSES', 'address_field')
                city_field = cfg.get('ADDRESSES', 'city_field')
                state_field = cfg.get('ADDRESSES', 'state_field')
                zip_field = cfg.get('ADDRESSES', 'zip_field')
                locator = cfg.get('ADDRESSES', 'locator')

                # Geocoding field names
                reqFields = [address_field, id_field]  #, report_date_field]
                opFields = [
                    city_field, state_field, zip_field, summary_field,
                    report_date_field
                ]

                if locator == "":
                    raise Exception(e13)

                # Test geolocator fields
                loc_address_fields = [
                    loc_address_field, loc_city_field, loc_zip_field,
                    loc_state_field
                ]
                for a in loc_address_fields:
                    if not a == "":
                        if not a in all_locator_fields:
                            raise Exception(e14)

            # If data has coordinate values
            else:

                # Get coordinate parameters
                lg_field = cfg.get('COORDINATES', 'long_field')
                lt_field = cfg.get('COORDINATES', 'lat_field')
                coord_system = cfg.get('COORDINATES', 'coord_system')
                remove_zeros = cfg.get('COORDINATES', 'ignore_zeros')
                if remove_zeros in ('true', 'True'):
                    remove_zeros = True
                if remove_zeros in ('false', 'False'):
                    remove_zeros = False

                # Coordinate field names
                reqFields = [id_field, lg_field,
                             lt_field]  #, report_date_field]
                opFields = [summary_field, report_date_field]

            # Validate required field names
            field_test(incidents, reqFields, csvfieldnames, True)
            field_test(inc_features, reqFields, incfieldnames, True)

            # Validate optional field names
            field_test(incidents, opFields, csvfieldnames)
            field_test(inc_features, opFields, incfieldnames)

            # Validate basic publishing parameters
            if not pub_status == "":

                # Get general publishing parameters
                mxd = cfg.get('PUBLISHING', 'mxd')
                username = cfg.get('PUBLISHING', 'user_name')
                password = cfg.get('PUBLISHING', 'password')

                # Test for required inputs
                if not arcpy.Exists(mxd):
                    raise Exception(e1.format("Map document", mxd, ""))

                if splitext(mxd)[1] != ".mxd":
                    raise Exception(e3)

                # Test for required inputs
                if username == "" or password == "":
                    if pub_status == "ARCGIS_ONLINE":
                        raise Exception(e8)

            # Get address fields for geocoding
            if loc_type == "ADDRESSES":

                addresses = ""
                loc_fields = []
                adr_string = "{0} {1} VISIBLE NONE;"

                for loc_field in all_locator_fields:
                    if loc_field == loc_address_field:
                        addresses += adr_string.format(loc_field,
                                                       address_field)
                        loc_fields.append(address_field)

                    elif loc_field == loc_city_field and city_field != "":
                        addresses += adr_string.format(loc_field, city_field)
                        loc_fields.append(city_field)

                    elif loc_field == loc_state_field and state_field != "":
                        addresses += adr_string.format(loc_field, state_field)
                        loc_fields.append(state_field)

                    elif loc_field == loc_zip_field and zip_field != "":
                        addresses += adr_string.format(loc_field, zip_field)
                        loc_fields.append(zip_field)

                    else:
                        addresses += adr_string.format(loc_field, "<None>")

            # Get coordinate fields
            else:
                loc_fields = [lg_field, lt_field]

            total_records = len(field_vals(incidents, id_field))

            messages(m17.format(total_records, incidents), log)

            if not summary_field == "":
                SumVals = field_vals(incidents, summary_field)
                listSumVals = [val for val in SumVals if val != None]

                if not len(SumVals) == len(listSumVals):
                    print m19.format(len(SumVals) - len(listSumVals))
                    log.write(m19.format(len(SumVals) - len(listSumVals)))
                listSumVals.sort()

                log.write(l10.format(summary_field))
                dateCount = 1
                i = 0
                n = len(listSumVals)

                while i < n:

                    try:
                        if listSumVals[i] == listSumVals[i + 1]:
                            dateCount += 1
                        else:
                            log.write(l11.format(listSumVals[i], dateCount))
                            dateCount = 1
                    except:
                        log.write(l11.format(listSumVals[i], dateCount))
                    i += 1

                log.write("\n")

            # Remove duplicate incidents
            if delete_duplicates:

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m13.format(timeNow), log)

                incidents, req_nulls, countUpdate, countDelete = remove_dups(
                    tempgdb, incidents, inc_features, matchfieldnames,
                    id_field, report_date_field, loc_fields)

                if not req_nulls == "":
                    req_nulls = "{}\n".format(req_nulls)
                    messages(w3.format(req_nulls), log, 1)

                if not countUpdate == 0:
                    messages(m14.format(countUpdate, inc_features), log)

                if countDelete > 0:
                    messages(m15.format(countDelete, inc_features), log)

            # Create features
            tempFC = join(tempgdb, "tempDataLE")

            # Create point features from spreadsheet

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m1.format(timeNow), log)

            if loc_type == "ADDRESSES":

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m3.format(timeNow), log)

                # Geocode the incidents
                arcpy.GeocodeAddresses_geocoding(incidents, locator, addresses,
                                                 tempFC, "STATIC")

                # Initiate geocoding report counts
                countMatch = 0
                countTrueMatch = 0
                countUnmatch = 0

                # Create geocoding reports
                rptUnmatch = join(reports,
                                  "{0}_{1}.csv".format(fileNow, unmatch_name))

                fieldnames = [f.name for f in arcpy.ListFields(tempFC)]

                # Sort incidents based on match status
                statusIndex = fieldnames.index(status)
                locIndex = fieldnames.index(addr_type)

                # Write incidents that were not well geocoded to file and
                #       delete from temp directory
                with open(rptUnmatch, "wb") as umatchFile:
                    unmatchwriter = csv.writer(umatchFile)
                    unmatchwriter.writerow(fieldnames)

                    # Delete incidents that were not Matched
                    countUnmatch = sort_records(tempFC, unmatchwriter,
                                                statusIndex, match_value,
                                                False, True)

                    if not countUnmatch == 0:
                        messages(w6.format(countUnmatch, rptUnmatch), log, 1)

                    # Incidents that were not matched to an acceptable accuracy
                    countMatch = sort_records(tempFC, unmatchwriter, locIndex,
                                              addrOK, False, True)

                    if not countMatch == 0:
                        messages(w7.format(countMatch, addrOK, rptUnmatch),
                                 log, 1)

                    countTrueMatch = len(field_vals(tempFC, "OBJECTID"))

                    messages(m16.format(countTrueMatch, inc_features), log)

            else:
                # Create temporary output storage
                tempFL = arcpy.MakeXYEventLayer_management(
                    incidents, lg_field, lt_field, "tempLayerLE", coord_system)

                # Convert the feature layer to a feature class to prevent
                #   field name changes

                arcpy.CopyFeatures_management(tempFL, tempFC)
                arcpy.Delete_management(tempFL)

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m4.format(timeNow, inc_features), log)

            # Fields that will be copied from geocode results to final fc
            copyfieldnames = []
            copyfieldnames.extend(matchfieldnames)
            copyfieldnames.append("SHAPE@XY")

            # Fields for error reporting
            errorfieldnames = []
            errorfieldnames.extend(matchfieldnames)
            errorfieldnames.insert(0, errorfield)
            errorfieldnames += [long_field, lat_field]

            # Reproject the features
            sr_input = arcpy.Describe(tempFC).spatialReference
            sr_output = arcpy.Describe(inc_features).spatialReference

            if sr_input != sr_output:
                proj_out = "{}_proj".format(tempFC)

                arcpy.Project_management(tempFC, proj_out, sr_output,
                                         transform_method)
                tempFC = proj_out

            # Append geocode results to fc
            rptNoAppend = join(reports,
                               "{0}_{1}.csv".format(fileNow, noappend_name))

            with arcpy.da.SearchCursor(tempFC, copyfieldnames) as csvrows:
                with arcpy.da.InsertCursor(inc_features,
                                           copyfieldnames) as incrows:
                    # Open csv for un-appended records
                    with open(rptNoAppend, "wb") as appendFile:

                        appendwriter = csv.writer(appendFile)
                        appendwriter.writerow(errorfieldnames)

                        # Index of field with incident ID
                        record = errorfieldnames.index(id_field)

                        # Initiate count of successfully appended records
                        countAppend = 0

                        # List of ids of records not successfully appended
                        errorRecords = []

                        for csvrow in csvrows:
                            try:
                                if loc_type == "COORDINATES":
                                    if remove_zeros:
                                        lt_index = copyfieldnames.index(
                                            lt_field)
                                        lg_index = copyfieldnames.index(
                                            lg_field)

                                        ltVal = csvrow[lt_index]
                                        lgVal = csvrow[lg_index]

                                        if ltVal == 0 and lgVal == 0:
                                            raise Exception(
                                                "invalid_coordinates")

                                # If the row can be appended
                                incrows.insertRow(csvrow)
                                countAppend += 1

                            except Exception as reason:
                                # e.g. 'The value type is incompatible with the
                                #       field type. [INCIDENTDAT]'
                                # Alternatively, the exception
                                #      'invalid_coordinates' raised by the
                                #       remove_zeros test above

                                # Get the name of the problem field
                                badfield = reason[0].split(" ")[-1]
                                badfield = badfield.strip(" []")

                                # Append field name to start of record
                                csvrow = list(csvrow)
                                csvrow.insert(0, badfield)

                                # Split the coordinate tuple into X and Y
                                lng, lat = list(csvrow[-1])
                                csvrow[-1] = lng
                                csvrow.append(lat)
                                csvrow = tuple(csvrow)

                                # Write the record out to csv
                                appendwriter.writerow(csvrow)

                                # Add id and field to issue list
                                errorRecords.append(
                                    w4.format(csvrow[record], badfield))

            # If issues were reported, print them
            if len(errorRecords) != 0:
                messages(
                    w1.format(len(errorRecords), inc_features, rptNoAppend),
                    log, 1)

            messages(m18.format(countAppend, inc_features), log)

            del incrows, csvrows

            # Convert times to UTC if publishing to AGOL
            if pub_status == "ARCGIS_ONLINE":

                # Get date fields
                date_fields = [
                    f.name for f in arcpy.ListFields(inc_features)
                    if f.type == "Date" and f.name in matchfieldnames
                ]

                # Convert from system timezone to UTC
                convert_to_utc(inc_features, date_fields)

            # Publish incidents
            if not pub_status == "":

                timeNow = dt.strftime(dt.now(), time_format)
                messages(m5.format(timeNow), log)

                errors = serviceutils.publish_service(cfg, pub_status, mxd,
                                                      username, password)

                # Print analysis errors
                if errors:
                    raise Exception(e4.format(errors))

            # Convert times from UTC to system timezone
            if pub_status == "ARCGIS_ONLINE":
                convert_from_utc(inc_features, date_fields)

            timeNow = dt.strftime(dt.now(), time_format)
            messages(m8.format(timeNow), log)

        except arcpy.ExecuteError:
            print("{}\n{}\n".format(gp_error, arcpy.GetMessages(2)))
            timeNow = dt.strftime(dt.now(),
                                  "{} {}".format(date_format, time_format))
            arcpy.AddError("{} {}:\n".format(timeNow, gp_error))
            arcpy.AddError("{}\n".format(arcpy.GetMessages(2)))

            log.write("{} ({}):\n".format(gp_error, timeNow))
            log.write("{}\n".format(arcpy.GetMessages(2)))

            for msg in range(0, arcpy.GetMessageCount()):
                if arcpy.GetSeverity(msg) == 2:
                    code = arcpy.GetReturnCode(msg)
                    print("Code: {}".format(code))
                    print("Message: {}".format(arcpy.GetMessage(msg)))

        except Exception as ex:
            print("{}: {}\n".format(py_error, ex))
            timeNow = dt.strftime(dt.now(), "{}".format(time_format))

            arcpy.AddError("{} {}:\n".format(timeNow, py_error))
            arcpy.AddError("{}\n".format(ex))

            log.write("{} {}:\n".format(timeNow, py_error))
            log.write("{}\n".format(ex))

        finally:
            # Clean up
            try:
                arcpy.Delete_management(tempgdb)
            except:
                pass
def main(indicator_value, *argv):

    """
    Main function to record the number of incidents for a selected performance indicator value since
    the data/time the previous count of this value was performed, or within a specified number of days.
    It will also maintain a record of the previous 4 count records. Density of these new incidents
    will be mapped using the Kernel Density tool.

    Required arguments:
            indicator_value -- Performance Indicator value (i.e. Violent Crime)

    """

    # Open log file for reporting.
    with open(os.path.join(os.path.dirname(__file__), 'ed_log.log'), 'a') as log_file:
        try:
            # Check out the Spatial analyst extension.
            if arcpy.CheckExtension('Spatial') == 'Available':
                arcpy.CheckOutExtension('Spatial')
            else:
                raise Exception(e1)

            # Set overwrite output option to True.
            arcpy.env.overwriteOutput = True

            # Set performance indicators (tuples) based on the indicator value.
            try:
                indicator_values = ic.indicators[indicator_value]
            except KeyError:
                raise KeyError(e2.format(indicator_value))

            # Select from perform_ind_features where PERFORMIND == indicator_value.
            dsc = arcpy.Describe(ic.perform_ind_features)
            cp = os.path.dirname(dsc.catalogPath)
            if cp.endswith('.gdb'):
                domains = arcpy.da.ListDomains(cp)
            else:
                domains = arcpy.da.ListDomains(os.path.dirname(cp))
            for domain in domains:
                if domain.name == 'PerformanceIndicators':
                    c = find_key(domain.codedValues, indicator_value.lower())
                    if not c:
                        raise Exception(e3.format(indicator_value))
                    else:
                        break
            # Create a copy in memory instead using Select
            perform_ind_lyr = arcpy.management.MakeFeatureLayer(ic.perform_ind_features,
                                                "perform_ind_lyr", """{0} = {1}""".format(performind, c))

            # Update historical count and date fields.

            row_cnt = arcpy.management.GetCount(perform_ind_lyr)
            if int(row_cnt[0]) > 0:
                update_observ_fields(perform_ind_lyr)
            else:
                raise Exception(e4.format(indicator_value, ic.perform_ind_features, indicator_value))

            inc_time_field = indicator_values["inc_time_field"]
            # Select all incident features where:

            # 1. If number of days parameter is None, do this. Else, grab last # of days from now
            if indicator_values["number_of_days"] == '':
                # a. calltime is more recent than ENDDATE value from PerformanceIndicator where PERFORMIND == domain value
                with arcpy.da.SearchCursor(perform_ind_lyr, end_date, sql_clause=(None, "ORDER BY {0} DESC".format(end_date))) as dates:
                    last_update_value = dates.next()[0]
                    d = last_update_value
                if not last_update_value is None:
                    arcpy.management.MakeFeatureLayer(indicator_values["inc_features"], 'inc_lyr')
                    incident_lyr = arcpy.management.SelectLayerByAttribute('inc_lyr', "NEW_SELECTION",
                        """{0} > date '{1}'""".format(inc_time_field, str(last_update_value.replace(microsecond=0))))
                else:
                    raise Exception(e5)
            else:
                # b. Value of inc_time_field is >= the current date minus the number of days specified in number_of_days.
                d = datetime.datetime.now() - datetime.timedelta(days=int(indicator_values["number_of_days"]))
                arcpy.management.MakeFeatureLayer(indicator_values["inc_features"], 'inc_lyr')
                incident_lyr = arcpy.management.SelectLayerByAttribute('inc_lyr', "NEW_SELECTION",
                                        """{0} >= date '{1}'""".format(inc_time_field, str(d.replace(microsecond=0))))

            # 2. value of inc_type_field is in the list of performance_indicators
            inc_lyr_count = arcpy.management.GetCount(incident_lyr)

            inc_type_field = indicator_values["inc_type_field"]
            if not inc_type_field.upper() == "NONE":
                perform_indicators = indicator_values["perform_indicators"]
                if type(perform_indicators) == tuple:
                    arcpy.management.SelectLayerByAttribute(incident_lyr, "SUBSET_SELECTION",
                                    """{0} IN {1}""".format(inc_type_field, perform_indicators))
                else:
                    arcpy.management.SelectLayerByAttribute(incident_lyr, "SUBSET_SELECTION",
                                    """{0} = '{1}'""".format(inc_type_field, perform_indicators))

            # Populate OBSERVCURR with a count of the selected incident features.
            inc_lyr_count = arcpy.management.GetCount(incident_lyr)
            arcpy.management.CalculateField(perform_ind_lyr, observcurr, int(inc_lyr_count[0]), "PYTHON")

            # Populate DATECURR with current date.
            arcpy.management.CalculateField(perform_ind_lyr, datecurr, """datetime.datetime.now()""", "PYTHON")

            # Update LASTUPDATE and LASTEDITOR with the current date and username.
            arcpy.management.CalculateField(perform_ind_lyr, last_editor, "'{0}'".format(getpass.getuser()), "PYTHON")
            arcpy.management.CalculateField(perform_ind_lyr, last_update, """datetime.datetime.now()""", "PYTHON")

            # Populate STARTDATE and ENDDATE with the date range of the data used.
            with arcpy.da.UpdateCursor(incident_lyr, inc_time_field, sql_clause=(None, "ORDER BY {0} DESC".format(inc_time_field))) as dates:
                startdate = d
            arcpy.management.CalculateField(perform_ind_lyr, start_date, repr(startdate), "PYTHON")
            with arcpy.da.UpdateCursor(incident_lyr, inc_time_field, sql_clause=(None, "ORDER BY {0} ASC".format(inc_time_field))) as dates:
                enddate = datetime.datetime.now()
            arcpy.management.CalculateField(perform_ind_lyr, end_date, repr(enddate), "PYTHON")

            # Get the average distance to the specified Nth nearest neighbor for the selected incident features.
            if not int(inc_lyr_count[0]) > 0:
                raise Exception(e6.format(indicator_value))

            distances = arcpy.stats.CalculateDistanceBand(incident_lyr, indicator_values["neighbors_value"],"EUCLIDEAN_DISTANCE")
            avg_distance = float(distances[1])

            # Calculates the density of incident features in a neighborhood using the avg distance (from above).
            output_density = indicator_values["output_density"]
            arcpy.gp.KernelDensity_sa(incident_lyr, "NONE", output_density, "", avg_distance, "SQUARE_MILES")

            # Retrieve the mean and standard deviation of the raster pixel values.
            mean = arcpy.management.GetRasterProperties(output_density, "MEAN")
            std = arcpy.management.GetRasterProperties(output_density, "STD")

            # Exclude from rendering all values from 0 => (mean + (stdev) x (excluded_raster_values))
            exclusion_value = indicator_values["exclusion_value"]
            if not exclusion_value == str(0):
                mean = float(mean[0])
                std = float(std[0])
                exclude_values = "0 - {0}".format(str(mean + (std) * (int(exclusion_value))))
            else:
                exclude_values = "0"

            mxd = arcpy.mapping.MapDocument(indicator_values["map_document"])
            raster_layer = arcpy.mapping.ListLayers(mxd)[0]
            raster_layer.symbology.excludedValues = exclude_values
            mxd.save()

#TODO: Please review messaging for publishing the services

            # Publish the services.
            dt = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%M:%S")
            log_file.write(m1.format(dt, indicator_values["service_name"]))
            print m1.format(dt, indicator_values["service_name"])
            serviceutils.publish_service(indicator_values["service_name"], mxd, "Map")

            dt = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%M:%S")
            log_file.write(m1.format(dt, ic.stats_service_name))
            print m1.format(dt, ic.stats_service_name)
            serviceutils.publish_service(ic.stats_service_name, ic.stats_mxd, "Feature")

            # Log the results.
            dt = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%M:%S")
            log_file.write(m2.format(dt, indicator_value))

        except arcpy.ExecuteError:
            print("{}\n{}\n".format(gp_error, arcpy.GetMessages(2)))
            dt = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%M:%S")
            log_file.write("{} ({}):\n".format(gp_error, dt))
            log_file.write("{}\n".format(arcpy.GetMessages(2)))
        except KeyError as ke:
            print("{} {}\n".format(py_error, ke[0]))
            dt = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%M:%S")
            log_file.write("{} ({}):\n".format(py_error, dt))
            log_file.write('{}\n'.format(ke[0]))
        except serviceutils.AGSConnectionError:
            print("{}: {}\n".format(py_error, e7))
            dt = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%M:%S")
            log_file.write("{} ({}):\n".format(py_error, dt))
            log_file.write("{}\n".format(e7))
        except Exception as ex:
            print("{}: {}\n".format(py_error, ex[0]))
            dt = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%M:%S")
            log_file.write("{} ({}):\n".format(py_error, dt))
            log_file.write("{}\n".format(ex[0]))
Exemple #4
0
def main(indicator_value, *args):
    """
    Main function to record the number of incidents for a selected performance indicator value since
    the data/time the previous count of this value was performed, or within a specified number of days.
    It will also maintain a record of the previous 4 count records. Density of these new incidents
    will be mapped using the Kernel Density tool.

    Required arguments:
            indicator_value -- Performance Indicator value (i.e. Violent Crime)

    """

    # Open log file for reporting.
    with open(join(dirname(__file__), 'ed_log.log'), 'a') as log_file:
        try:
            # Check out the Spatial analyst extension.
            if arcpy.CheckExtension('Spatial') == 'Available':
                arcpy.CheckOutExtension('Spatial')
            else:
                raise Exception(e1)

            # Set overwrite output option to True.
            arcpy.env.overwriteOutput = True

            # Set performance indicators (tuples) based on the indicator value.
            try:
                indicator_values = ic.indicators[indicator_value]
            except KeyError:
                raise KeyError(e2.format(indicator_value))

            # Select perform_ind_features where PERFORMIND == indicator_value.
            dsc = arcpy.Describe(ic.perform_ind_features)
            cp = dirname(dsc.catalogPath)
            if cp.endswith('.gdb'):
                domains = arcpy.da.ListDomains(cp)
            else:
                domains = arcpy.da.ListDomains(dirname(cp))
            for domain in domains:
                if domain.name == 'PerformanceIndicators':
                    c = find_key(domain.codedValues, indicator_value.lower())
                    if not c:
                        raise Exception(e3.format(indicator_value))
                    else:
                        break
            # Create a copy in memory instead using Select
            perform_ind_lyr = arcpy.management.MakeFeatureLayer(
                ic.perform_ind_features, "perform_ind_lyr",
                """{0} = {1}""".format(performind, c))

            # Update historical count and date fields.
            row_cnt = arcpy.management.GetCount(perform_ind_lyr)
            if int(row_cnt[0]) > 0:
                update_observ_fields(perform_ind_lyr)
            else:
                raise Exception(
                    e4.format(indicator_value, ic.perform_ind_features,
                              indicator_value))

            # Select all incident features where:

            # 1. If number of days parameter is None, do this.
            # Else, grab last # of days from now.
            inc_time_field = indicator_values["inc_time_field"]

            if indicator_values["number_of_days"] == '':
                # a. calltime is more recent than ENDDATE value from PerformanceIndicator where PERFORMIND == domain value
                with arcpy.da.SearchCursor(
                        perform_ind_lyr,
                        end_date,
                        sql_clause=(
                            None,
                            "ORDER BY {0} DESC".format(end_date))) as dates:
                    last_update_value = dates.next()[0]
                    d = last_update_value
                if not last_update_value is None:
                    arcpy.management.MakeFeatureLayer(
                        indicator_values["inc_features"], "inc_lyr")
                    incident_lyr = arcpy.management.SelectLayerByAttribute(
                        "inc_lyr", "NEW_SELECTION",
                        """{0} > date '{1}'""".format(
                            inc_time_field,
                            str(last_update_value.replace(microsecond=0))))
                else:
                    raise Exception(e5)
            else:
                # b. Value of inc_time_field is >= the current date minus the number of days specified in number_of_days.
                d = datetime.datetime.now() - datetime.timedelta(
                    days=int(indicator_values["number_of_days"]))
                arcpy.management.MakeFeatureLayer(
                    indicator_values["inc_features"], "inc_lyr")
                incident_lyr = arcpy.management.SelectLayerByAttribute(
                    "inc_lyr", "NEW_SELECTION", """{0} >= date '{1}'""".format(
                        inc_time_field, str(d.replace(microsecond=0))))

            # 2. value of inc_type_field is in the list of performance_indicators
            inc_lyr_count = arcpy.management.GetCount(incident_lyr)

            inc_type_field = indicator_values["inc_type_field"]
            if not inc_type_field.upper() == "NONE":
                perform_indicators = indicator_values["perform_indicators"]
                if type(perform_indicators) == tuple:
                    arcpy.management.SelectLayerByAttribute(
                        incident_lyr, "SUBSET_SELECTION",
                        """{0} IN {1}""".format(inc_type_field,
                                                perform_indicators))
                else:
                    arcpy.management.SelectLayerByAttribute(
                        incident_lyr, "SUBSET_SELECTION",
                        """{0} = '{1}'""".format(inc_type_field,
                                                 perform_indicators))

            # Calculate OBSERVCURR with a count of selected incident features.
            inc_lyr_count = arcpy.management.GetCount(incident_lyr)
            arcpy.management.CalculateField(perform_ind_lyr, observcurr,
                                            int(inc_lyr_count[0]), "PYTHON")

            # Populate DATECURR with current date.
            arcpy.management.CalculateField(perform_ind_lyr, datecurr,
                                            """datetime.datetime.now()""",
                                            "PYTHON")

            # Update LASTUPDATE, LASTEDITOR with the current date & username.
            arcpy.management.CalculateField(perform_ind_lyr, last_editor,
                                            "'{0}'".format(getpass.getuser()),
                                            "PYTHON")

            arcpy.management.CalculateField(perform_ind_lyr, last_update,
                                            """datetime.datetime.now()""",
                                            "PYTHON")

            # Update STARTDATE, ENDDATE with the date range of the data used.
            arcpy.management.CalculateField(perform_ind_lyr, start_date,
                                            repr(d), "PYTHON")

            arcpy.management.CalculateField(perform_ind_lyr, end_date,
                                            repr(datetime.datetime.now()),
                                            "PYTHON")

            # Get the average distance to the specified Nth nearest neighbor
            # for the selected incident features.
            if not int(inc_lyr_count[0]) > 0:
                raise Exception(e6.format(indicator_value))

            distances = arcpy.stats.CalculateDistanceBand(
                incident_lyr, indicator_values["neighbors_value"],
                "EUCLIDEAN_DISTANCE")

            # Calculates the density of incident features in a neighborhood using the avg distance (from above).
            output_density = indicator_values["output_density"]
            arcpy.gp.KernelDensity_sa(incident_lyr, "NONE", output_density, "",
                                      float(distances[1]), "SQUARE_MILES")

            # Retrieve the mean & standard dev. of the raster pixel values.
            mean = arcpy.management.GetRasterProperties(output_density, "MEAN")
            std = arcpy.management.GetRasterProperties(output_density, "STD")

            # Exclude from rendering all values from 0 => (mean + (stdev) x (excluded_raster_values))
            exclusion_value = indicator_values["exclusion_value"]
            if not exclusion_value == str(0):
                mean = float(mean[0])
                std = float(std[0])
                exclude_values = "0 - {0}".format(
                    str(mean + (std) * (int(exclusion_value))))
            else:
                exclude_values = "0"

            mxd = arcpy.mapping.MapDocument(indicator_values["map_document"])
            raster_layer = arcpy.mapping.ListLayers(mxd)[0]
            raster_layer.symbology.excludedValues = exclude_values
            mxd.save()

            # Publish the services.
            dt = datetime.datetime.strftime(datetime.datetime.now(),
                                            "%Y-%m-%d %H:%M:%S")
            log_file.write(m1.format(dt, indicator_values["service_name"]))
            print(m1.format(dt, indicator_values["service_name"]))
            serviceutils.publish_service(indicator_values["service_name"], mxd,
                                         "Map")

            dt = datetime.datetime.strftime(datetime.datetime.now(),
                                            "%Y-%m-%d %H:%M:%S")
            log_file.write(m1.format(dt, ic.stats_service_name))
            print(m1.format(dt, ic.stats_service_name))
            serviceutils.publish_service(ic.stats_service_name, ic.stats_mxd,
                                         "Feature")

            # Log the results.
            dt = datetime.datetime.strftime(datetime.datetime.now(),
                                            "%Y-%m-%d %H:%M:%S")
            log_file.write(m2.format(dt, indicator_value))

        except arcpy.ExecuteError:
            print("{}\n{}\n".format(gp_error, arcpy.GetMessages(2)))
            dt = datetime.datetime.strftime(datetime.datetime.now(),
                                            "%Y-%m-%d %H:%M:%S")
            log_file.write("{} ({}):\n".format(gp_error, dt))
            log_file.write("{}\n".format(arcpy.GetMessages(2)))

        except KeyError as ke:
            print("{} {}\n".format(py_error, ke[0]))
            dt = datetime.datetime.strftime(datetime.datetime.now(),
                                            "%Y-%m-%d %H:%M:%S")
            log_file.write("{} ({}):\n".format(py_error, dt))
            log_file.write('{}\n'.format(ke[0]))

        except Exception as ex:
            print("{}: {}\n".format(py_error, ex[0]))
            dt = datetime.datetime.strftime(datetime.datetime.now(),
                                            "%Y-%m-%d %H:%M:%S")
            log_file.write("{} ({}):\n".format(py_error, dt))
            log_file.write("{}\n".format(ex[0]))