コード例 #1
1
def main():

    args = cli_args()

    app_name = args.app_name

    CONFIG = cfg["dms"]
    KNACK_CREDS = KNACK_CREDENTIALS[app_name]

    kits_query = """
        SELECT DMSID as KITS_ID
        ,Multistring as DMS_MESSAGE
        ,LastUpdated as MESSAGE_TIME
        FROM [KITS].[DMS_RealtimeData]
        """

    kits_data = kitsutil.data_as_dict(KITS_CREDENTIALS, kits_query)

    for record in kits_data:
        new_date = arrow.get(record["MESSAGE_TIME"])
        record["MESSAGE_TIME"] = new_date.timestamp * 1000

    kn = knackpy.Knack(
        scene=CONFIG["scene"],
        view=CONFIG["view"],
        ref_obj=CONFIG["ref_obj"],
        app_id=KNACK_CREDS["app_id"],
        api_key=KNACK_CREDS["api_key"],
    )

    #  hack to avoid ref_obj field meta replacing primary obj modified date
    #  this is a knackpy issue
    #  TODO: fix knackpy field meta handling
    kn.field_map[
        CONFIG["modified_date_field"]] = CONFIG["modified_date_field_id"]

    knack_data = kn.data

    if kits_data:
        new_data = datautil.merge_dicts(knack_data, kits_data, "KITS_ID",
                                        ["DMS_MESSAGE", "MESSAGE_TIME"])

    for record in new_data:
        #  remove DMS formatting artifacts
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[np]", "\n")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[nl]", " ")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt40o0]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt30o0]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[fo13]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[fo2]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[jl3]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt30]", "")

        record[CONFIG["modified_date_field"]] = datautil.local_timestamp()

    new_data = datautil.reduce_to_keys(
        new_data,
        ["id", "MESSAGE_TIME", "DMS_MESSAGE", CONFIG["modified_date_field"]])

    new_data = datautil.replace_keys(new_data, kn.field_map)

    count = 0

    for record in new_data:
        count += 1
        print("updating record {} of {}".format(count, len(new_data)))

        res = knackpy.record(
            record,
            obj_key=CONFIG["ref_obj"][0],
            app_id=KNACK_CREDS["app_id"],
            api_key=KNACK_CREDS["api_key"],
            method="update",
        )

    return len(new_data)
コード例 #2
0
def get_status(device):
    #  get old IP status, setting it to NO COMMUNICATION if not present
    state_previous = device.setdefault("IP_COMM_STATUS", "NO COMMUNICATION")

    ip_field = device["ip_field"]
    ip = device.get(ip_field)
    device_type = device["device_type"]

    if ip:
        if device_type != "gridsmart":
            state_new = ping_ip(device[ip_field], timeout)

        else:
            """
            Gridsmart default port is 8902
            """
            state_new = open_socket(device[ip_field], timeout)

    else:
        #  set to NO COMMUINICATION if no IP address
        state_new = "NO COMMUNICATION"

    if state_previous != state_new:

        device["IP_COMM_STATUS"] = state_new
        #  timestamps into and out of knack are naive
        #  so we create a naive local timestamp by replacing
        #  a localized timestamp's timezone info with UTC
        device["COMM_STATUS_DATETIME_UTC"] = datautil.local_timestamp()

        return device

    else:
        return None
コード例 #3
0
def apply_modified_date(dicts, key="MODIFIED_DATE", offset=600000):
    #  set the record modified date as a "local" timestamp (knack-friendly)
    #  also apply a forward offset to ensure modified records are picked up
    #  by the publishing scripts which are checking for recently modded data
    for record in dicts:
        record[key] = datautil.local_timestamp() + offset

    return dicts
コード例 #4
0
def prepare_signals_payloads(payloads, signals_records):
    """Summary
    Prepare signal payloads by change data type and map new signal information to
    the signal object. 
    Args:
        payloads (TYPE): the raw payload in list of dictionaries format 
    
    Returns:
        TYPE: a list of dictionary
    """

    # signals_records = get_signals_records()
    signals_records_df = pd.DataFrame.from_dict(signals_records.data)

    payloads = pd.DataFrame.from_dict(payloads)

    signals_records_df["SIGNAL_ID"] = signals_records_df["SIGNAL_ID"].astype(
        "str")

    payloads["SIGNAL_ID"] = payloads["SIGNAL_ID"].astype("str")

    signals_payloads = payloads.merge(right=signals_records_df,
                                      left_on="SIGNAL_ID",
                                      right_on="SIGNAL_ID",
                                      how="left")

    signals_payloads["MODIFIED_DATE"] = datautil.local_timestamp()

    signals_payloads = signals_payloads[["SIGNAL_ID", "MODIFIED_DATE", "id"]]

    signals_payloads = signals_payloads.rename(
        columns={"MODIFIED_DATE": "MODIFIED_DATE"})

    signals_payloads = signals_payloads.to_dict(orient="records")

    return signals_payloads
コード例 #5
0
def main():

    args = cli_args()
    app_name = args.app_name
    last_run_date = args.last_run_date

    knack_creds = KNACK_CREDENTIALS[app_name]

    if not last_run_date:
        # replace dataset by setting the last run date to a long, long time ago
        # the arrow package needs a specific date and timeformat
        last_run_date = "1970-01-01"

    filters = knackutil.date_filter_on_or_after(
        last_run_date, config["modified_date_field_id"])
    """
    We include a filter in our API call to limit to records which have
    been modified on or after the date the last time this job ran
    successfully. The Knack API supports filter requests by date only
    (not time), so we must apply an additional filter on the data after
    we receive it.
    """
    kn = knackpy.Knack(
        scene=config["scene"],
        view=config["view"],
        ref_obj=config["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        filters=filters,
    )

    if kn.data:
        # Filter data for records that have been modifed after the last
        # job run (see comment above)
        last_run_timestamp = arrow.get(last_run_date).timestamp * 1000
        kn.data = filter_by_date(kn.data, config["modified_date_field"],
                                 last_run_timestamp)

    payload = []
    unmatched_segments = []

    if not kn.data:
        # logger.info('No records to update.')
        return 0

    for street_segment in kn.data:

        token = agolutil.get_token(AGOL_CREDENTIALS)
        features = agolutil.query_atx_street(
            street_segment[config["primary_key"]], token)

        if features.get("features"):
            if len(features["features"]) > 0:
                segment_data = features["features"][0]["attributes"]
            else:
                unmatched_segments.append(
                    street_segment[config["primary_key"]])
                continue
        else:
            unmatched_segments.append(street_segment[config["primary_key"]])
            continue

        #  we don't want to compare modified dates
        #  because we don't keep that value in sync with the source data on AGOL
        #  because we use our own modified date set in the data tracker
        segment_data.pop(config["modified_date_field"])
        street_segment.pop(config["modified_date_field"])

        #  compare new data (segment data) against old (street_segment)
        #  we only want to upload values that have changed
        if not are_equal(street_segment, segment_data):
            segment_data["id"] = street_segment["id"]
            segment_data[
                config["modified_date_field"]] = datautil.local_timestamp()
            payload.append(segment_data)

    payload = datautil.reduce_to_keys(payload, kn.fieldnames)
    payload = datautil.replace_keys(payload, kn.field_map)

    update_response = []
    count = 0

    for record in payload:
        count += 1

        print("updating record {} of {}".format(count, len(payload)))

        #  remove whitespace from janky Esri attributes
        for field in record:
            if type(record[field]) == str:
                record[field] = record[field].strip()

        res = knackpy.record(
            record,
            obj_key=config["ref_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

        update_response.append(res)

    if len(unmatched_segments) > 0:
        error_text = "Unmatched street segments: {}".format(", ".join(
            str(x) for x in unmatched_segments))

        raise Exception(error_text)

    return count
コード例 #6
0
def main():

    args = cli_args()

    app_name = args.app_name

    eval_type = args.eval_type

    obj = cfg["eval_types"][eval_type]

    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        obj=cfg["eval_types"][eval_type],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    data = datautil.filter_by_val(kn.data, cfg["status_key"],
                                  cfg["status_vals"])

    #  new records will not have a score key. add it here.
    data = datautil.add_missing_keys(data, {cfg["score_key"]: 0})

    #  create a ranking month_year field
    data = datautil.concat_key_values(data, cfg["concat_keys"],
                                      cfg["group_key"], "_")

    knack_data_exclude = [
        record for record in data if record["EXCLUDE_FROM_RANKING"] == True
    ]
    knack_data_include = [
        record for record in data if record["EXCLUDE_FROM_RANKING"] == False
    ]

    #  create list of scores grouped by group key
    score_dict = {}

    for row in knack_data_include:
        key = row[cfg["group_key"]]
        score = int(row[cfg["score_key"]])

        if key not in score_dict:
            score_dict[key] = []

        score_dict[key].append(score)

    for key in score_dict:
        score_dict[key].sort()
        score_dict[key].reverse()

    #  get score rank and append record to payload
    payload = []

    for record in knack_data_include:
        score = int(record[cfg["score_key"]])
        key = record[cfg["group_key"]]
        rank = (datautil.min_index(score_dict[key], score) + 1
                )  #  add one because list indices start at 0

        if cfg["rank_key"] in record:
            if record[cfg["rank_key"]] != rank:
                record[cfg["rank_key"]] = rank
                record[cfg["modified_date_key"]] = datautil.local_timestamp()
                payload.append(record)

        else:
            record[cfg["rank_key"]] = rank

    #  assign null ranks to records flagged as exclude from ranking
    for record in knack_data_exclude:

        if cfg["rank_key"] in record:
            #  update excluded records if rank found
            if record[cfg["rank_key"]] != "":
                record[cfg["rank_key"]] = ""
                record[cfg["modified_date_key"]] = datautil.local_timestamp()
                payload.append(record)

    if payload:
        payload = datautil.reduce_to_keys(
            payload, [cfg["rank_key"], "id", cfg["modified_date_key"]])

        payload = datautil.replace_keys(payload, kn.field_map)

        update_response = []

        count = 0
        for record in payload:
            count += 1

            print("Updating record {} of {}".format(count, len(payload)))

            res = knackpy.record(
                record,
                obj_key=obj,
                app_id=knack_creds["app_id"],
                api_key=knack_creds["api_key"],
                method="update",
            )

            update_response.append(res)

        return len(payload)

    else:
        return 0
コード例 #7
0
def main():

    args = cli_args()

    app_name = args.app_name

    update_fields = [
        field for layer in cfg["layers"] for field in layer["updateFields"]
    ]

    kn = knackpy.Knack(
        obj=cfg["obj"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
        filters=cfg["filters"],
        timeout=30,
    )

    unmatched_locations = []

    if not kn.data:
        return 0
    """
    Remove "update fields" from record. these are re-appended via
    spatial lookup and thus the fieldnames must match those of the source
    dataset or be mapped in the field map config dict.
    """
    keep_fields = [
        field for field in kn.fieldnames if field not in update_fields
    ]
    kn.data = datautil.reduce_to_keys(kn.data, keep_fields)

    for location in kn.data:

        point = [location["LOCATION_longitude"], location["LOCATION_latitude"]]

        for layer in cfg["layers"]:
            layer["geometry"] = point
            field_map = cfg["field_maps"].get(layer["service_name"])
            params = get_params(layer)

            try:
                res = agolutil.point_in_poly(layer["service_name"],
                                             layer["layer_id"], params)

                if res.get("error"):
                    raise Exception(str(res))

                if res.get("features"):
                    location = join_features_to_record(res["features"], layer,
                                                       location)

                    if field_map:
                        location = map_fields(location, field_map)

                    continue

                if "service_name_secondary" in layer:
                    res = agolutil.point_in_poly(
                        layer["service_name_secondary"], layer["layer_id"],
                        params)

                    if len(res["features"]) > 0:
                        location = join_features_to_record(
                            res["features"], layer, location)

                        if field_map:
                            location = map_fields(location, field_map)
                            continue

                #  no intersecting features found
                for field in layer["updateFields"]:
                    """
                    set corresponding fields on location record to null to
                    overwrite any existing data
                    """
                    location[field] = ""

                continue

            except Exception as e:
                unmatched_locations.append(location)
                continue

        location["UPDATE_PROCESSED"] = True

        location["MODIFIED_DATE"] = datautil.local_timestamp()

        location = datautil.reduce_to_keys(
            [location],
            update_fields + ["id", "UPDATE_PROCESSED", "MODIFIED_DATE"])
        location = datautil.replace_keys(location, kn.field_map)

        res = knackpy.record(
            location[0],
            obj_key=cfg["obj"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            method="update",
        )

    if len(unmatched_locations) > 0:
        error_text = "Location Point/Poly Match Failure(s): {}".format(
            ", ".join(str(x) for x in unmatched_locations))
        raise Exception(error_text)

    else:
        return len(kn.data)
コード例 #8
0
def replace_pm_records(postgre_records, knack_pm_records, signal_records,
                       knack_technicians, app_name):
    """Summary
    
    Args:
        postgre_records (TYPE): Description
        knack_pm_records (TYPE): Description
        signal_records (TYPE): Description
    
    Returns:
        TYPE: Description
    """
    postgre_records_df = pd.DataFrame.from_dict(postgre_records)
    knack_pm_records_df = pd.DataFrame.from_dict(knack_pm_records.data)

    pm_insert_payloads = postgre_records_df[
        ~postgre_records_df["fulcrum_id"].
        isin(knack_pm_records_df["FULCRUM_ID"])].copy()

    pm_update_payloads = postgre_records_df[
        postgre_records_df["fulcrum_id"].isin(
            knack_pm_records_df["FULCRUM_ID"])].copy()

    pm_insert_payloads["MODIFIED_DATE"] = datautil.local_timestamp()
    pm_update_payloads["MODIFIED_DATE"] = datautil.local_timestamp()

    pm_insert_payloads = map_knack_id_signal_id(signal_records,
                                                pm_insert_payloads)
    pm_update_payloads = map_knack_id_signal_id(signal_records,
                                                pm_update_payloads)

    knack_pm_records_id_df = knack_pm_records_df[["FULCRUM_ID", "id"]]
    pm_update_payloads = pm_update_payloads.merge(
        right=knack_pm_records_id_df,
        left_on="fulcrum_id",
        right_on="FULCRUM_ID",
        how="left",
    )

    pm_insert_payloads["PM_STATUS"] = "COMPLETED"
    pm_update_payloads["PM_STATUS"] = "COMPLETED"

    pm_insert_payloads.columns = map(str.upper, pm_insert_payloads.columns)
    pm_update_payloads.columns = map(str.upper, pm_update_payloads.columns)

    pm_update_payloads = pm_update_payloads.rename(columns={"ID": "id"})

    pm_insert_payloads = pm_insert_payloads.to_dict(orient="records")
    pm_update_payloads = pm_update_payloads.to_dict(orient="records")

    if len(pm_insert_payloads) != 0:
        pm_insert_payloads = map_technicians_id_pm_payloads(
            pm_insert_payloads, knack_technicians)

    pm_update_payloads = map_technicians_id_pm_payloads(
        pm_update_payloads, knack_technicians)

    # update signal modified time in replace method

    pm_replace_payloads_shallow = pm_update_payloads + pm_insert_payloads
    pm_replace_payloads = copy.deepcopy(pm_replace_payloads_shallow)

    for d in pm_replace_payloads:
        if "id" in d:
            del d["id"]

    signal_payloads = prepare_signals_payloads(pm_replace_payloads,
                                               signal_records)
    signals_payloads = datautil.replace_keys(signal_payloads,
                                             signal_records.field_map)
    signal_results = update_signals_modified_time(signals_payloads, app_name)

    # end update signal modified time in replace method

    pm_insert_payloads = datautil.replace_keys(pm_insert_payloads,
                                               knack_pm_records.field_map)

    pm_update_payloads = datautil.replace_keys(pm_update_payloads,
                                               knack_pm_records.field_map)

    for payload in pm_insert_payloads:
        print("inserting", payload)

        insert_res = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="create",
        )

    for payload in pm_update_payloads:
        print("updating", payload)

        update_res = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="update",
        )

    return len(pm_insert_payloads) + len(pm_update_payloads)