def main():
    """Summary
    
    Returns:
        TYPE: Description
    """

    args = cli_args()
    app_name = args.app_name

    pgrest_records = get_postgre_records()
    knack_records = get_knack_pm_records(app_name)
    signals_records = get_signals_records(app_name)
    knack_technicians_records = get_technicians_records(app_name)

    last_run_date = get_last_run(args, knack_records)

    if args.replace:
        signal_results = replace_pm_records(
            pgrest_records,
            knack_records,
            signals_records,
            knack_technicians_records,
            app_name,
        )

    else:
        pm_payloads = prepare_pm_payloads(
            last_run_date,
            pgrest_records,
            signals_records,
            knack_records,
            knack_technicians_records,
        )

        if len(pm_payloads) == 0:

            return 0
        else:
            signal_payloads = prepare_signals_payloads(pm_payloads,
                                                       signals_records)

            pm_payloads = datautil.replace_keys(pm_payloads,
                                                knack_records.field_map)

            signals_payloads = datautil.replace_keys(signal_payloads,
                                                     signals_records.field_map)

            signal_results = update_signals_modified_time(
                signals_payloads, app_name)

            results = insert_pms(pm_payloads, app_name)

            results = len(results)

    return signal_results
Example #2
0
def main():

    args = cli_args()
    app_name = args.app_name

    creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        scene=config["scene"],
        view=config["view"],
        ref_obj=[config["obj"]],
        app_id=creds["app_id"],
        api_key=creds["api_key"],
    )

    calendar = get_calendar()

    kn.data = handle_records(kn.data, config["start_key"], config["end_key"],
                             config["elapsed_key"], calendar)

    # logger.info( '{} Records to Update'.format(len(kn.data) ))

    if kn.data:
        kn.data = datautil.reduce_to_keys(kn.data, config["update_fields"])
        kn.data = datautil.replace_keys(kn.data, kn.field_map)

        for i, record in enumerate(kn.data):
            print("Update record {} of {}".format(i, len(kn.data)))
            update_record(record, config["obj"], creds)
    return len(kn.data)
def main():

    args = cli_args()
    app_name = args.app_name

    CONFIG = cfg["task_orders"]
    KNACK_CREDS = KNACK_CREDENTIALS[app_name]

    html = get_html(TASK_ORDERS_ENDPOINT)
    data = handle_html(html)
    rows = handle_rows(data)

    kn = knackpy.Knack(
        scene=CONFIG["scene"],
        view=CONFIG["view"],
        ref_obj=CONFIG["ref_obj"],
        app_id=KNACK_CREDS["app_id"],
        api_key=KNACK_CREDS["api_key"],
    )

    new_rows = compare(rows, kn.data)

    new_rows = datautil.replace_keys(new_rows, kn.field_map)

    for record in new_rows:

        res = knackpy.record(
            record,
            obj_key=CONFIG["ref_obj"][0],
            app_id=KNACK_CREDS["app_id"],
            api_key=KNACK_CREDS["api_key"],
            method="create",
        )

    return len(new_rows)
Example #4
0
def main():
    """Summary
    
    Args:
        jobs (TYPE): Description
        **kwargs: Description
    
    Returns:
        TYPE: Descriptio
    """
    script_name = os.path.basename(__file__).replace(".py", "")

    # job_agol = jobutil.Job(
    #     name=f"{script_name}_agol",
    #     url=JOB_DB_API_URL,
    #     source="dropbox",
    #     destination="agol",
    #     auth=JOB_DB_API_TOKEN,
    # )

    # job_agol.start()

    # job_socrata = jobutil.Job(
    #     name=f"{script_name}_socrata",
    #     url=JOB_DB_API_URL,
    #     source="dropbox",
    #     destination="socrata",
    #     auth=JOB_DB_API_TOKEN,
    # )

    # job_socrata.start()

    data = get_data(dropbox_path, DROPBOX_BCYCLE_TOKEN)
    data = handle_data(data)
    data = datautil.upper_case_keys(data)

    data = datautil.replace_keys(data, {"STATUS": "KIOSK_STATUS"})

    layer = agolutil.get_item(auth=AGOL_CREDENTIALS, service_id=service_id)

    res = layer.manager.truncate()
    agolutil.handle_response(res)

    adds = agolutil.feature_collection(data)

    res = layer.edit_features(adds=adds)
    agolutil.handle_response(res)

    socratautil.Soda(
        auth=SOCRATA_CREDENTIALS,
        records=data,
        resource=socrata_resource_id,
        lat_field="latitude",
        lon_field="longitude",
        location_field="location",
        replace=True,
    )

    return len(data)
Example #5
0
def main():
    """Summary
    
    Args:
        job (TYPE): Description
        **kwargs: Description
    
    Returns:
        TYPE: Description
    """

    args = cli_args()

    device_type = args.device_type
    app_name = args.app_name

    primary_key = cfg[device_type]["primary_key"]
    status_field = cfg[device_type]["status_field"]
    status_filters = cfg[device_type]["status_filter_comm_status"]

    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        obj=cfg[device_type]["obj"],
        scene=cfg[device_type]["scene"],
        view=cfg[device_type]["view"],
        ref_obj=cfg[device_type]["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    kn_log = get_log_data(knack_creds)

    stats = defaultdict(int)

    stats["DEVICE_TYPE"] = device_type

    for device in kn.data:
        #  count stats only for devices that are TURNED_ON
        if device[status_field] in status_filters:
            status = device["IP_COMM_STATUS"]
            stats[status] += 1

    payload = build_payload(stats, args.device_type)
    payload = datautil.replace_keys([payload], kn_log.field_map)

    res = knackpy.record(
        payload[0],
        obj_key=LOG_OBJ,
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        method="create",
    )

    return len(payload)
def main():

    args = cli_args()

    device_type = args.device_type
    app_name = args.app_name

    primary_key = cfg[device_type]["primary_key"]
    ip_field = cfg[device_type]["ip_field"]

    global timeout
    timeout = cfg[device_type].get("timeout")

    if not timeout:
        timeout = 3

    knack_creds = KNACK_CREDENTIALS[app_name]

    out_fields_upload = [
        "id",
        ip_field,
        "IP_COMM_STATUS",
        "COMM_STATUS_DATETIME_UTC",
        "MODIFIED_DATE",
        "MODIFIED_BY",
    ]

    #  get device data from Knack application
    kn = knackpy.Knack(
        obj=cfg[device_type]["obj"],
        scene=cfg[device_type]["scene"],
        view=cfg[device_type]["view"],
        ref_obj=cfg[device_type]["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    #  append config data to each item to be processed
    #  this is a hacky way to pass args to each thread
    for i in kn.data:
        i["ip_field"] = ip_field
        i["device_type"] = device_type

    pool = ThreadPool(8)

    results = pool.map(get_status, kn.data)

    for result in results:
        """
        Result is None if status has not changed. Otherwise result
        is device record dict
        """
        if result:
            #  format for upload to Knack
            result = [result]
            result = apply_modified_date(result)
            result = apply_modified_by(result)
            result = datautil.reduce_to_keys(result, out_fields_upload)
            result = datautil.replace_keys(result, kn.field_map)

            res = knackpy.record(
                result[0],
                obj_key=cfg[device_type]["ref_obj"][
                    0
                ],  #  assumes record object is included in config ref_obj and is the first elem in array,
                app_id=knack_creds["app_id"],
                api_key=knack_creds["api_key"],
                method="update",
            )

    # close the pool and wait for the work to finish
    pool.close()
    pool.join()

    return len([record for record in results if record])
def main():
    args = cli_args()

    app_name = args.app_name

    records_processed = 0

    record_types = ["objects", "fields"]

    app_data = get_app_data(KNACK_CREDENTIALS[app_name]["app_id"])

    for record_type in record_types:

        data_new = app_data["objects"]

        if record_type == "fields":
            #  we get latest object data within this for loop
            #  because it may change when objects are processed
            data_existing_objects = get_existing_data(
                cfg["objects"]["obj"],
                KNACK_CREDENTIALS[app_name]["app_id"],
                KNACK_CREDENTIALS[app_name]["api_key"],
            )

            obj_row_id_lookup = get_object_row_ids(
                data_existing_objects.data_raw, cfg["objects"]["id_field_key"])

            data_new = parse_fields(
                data_new, cfg[record_type]["object_connection_field"],
                obj_row_id_lookup)

        data_existing = get_existing_data(
            cfg[record_type]["obj"],
            KNACK_CREDENTIALS[app_name]["app_id"],
            KNACK_CREDENTIALS[app_name]["api_key"],
        )

        payload = evaluate_ids(data_new, data_existing.data_raw,
                               cfg[record_type]["id_field_key"])

        for method in payload.keys():

            if record_type == "fields":

                data_existing.data_raw = format_connections(
                    data_existing.data_raw,
                    cfg[record_type]["object_connection_field"])

            payload[method] = convert_bools_nones_arrays(payload[method])
            data_existing.data_raw = convert_bools_nones_arrays(
                data_existing.data_raw)

            payload[method] = datautil.stringify_key_values(
                payload[method], cfg[record_type]["stringify_keys"])

            payload[method] = datautil.replace_keys(payload[method],
                                                    data_existing.field_map)

            if method == "update":
                # verify if data has changed
                changed = []

                for rec_new in payload[method]:
                    rec_old = [
                        record for record in data_existing.data_raw
                        if record["id"] == rec_new["id"]
                    ][0]

                    # format connection fields

                    # identify fields whose contents don't match
                    diff = [
                        k for k in rec_new.keys() if rec_old[k] != rec_new[k]
                    ]

                    if diff:
                        changed.append(rec_new)
                    else:
                        continue

                payload[method] = changed

            update_records(payload[method], cfg[record_type]["obj"], method,
                           app_name)

        records_processed += sum([
            len(payload["create"]),
            len(payload["update"]),
            len(payload["delete"])
        ])

    return records_processed
def replace_pm_records(postgre_records, knack_pm_records, signal_records,
                       knack_technicians, app_name):
    """Summary
    
    Args:
        postgre_records (TYPE): Description
        knack_pm_records (TYPE): Description
        signal_records (TYPE): Description
    
    Returns:
        TYPE: Description
    """

    postgre_records_df = pd.DataFrame.from_dict(postgre_records)
    knack_pm_records_df = pd.DataFrame.from_dict(knack_pm_records.data)

    pm_insert_payloads = postgre_records_df[
        ~postgre_records_df["fulcrum_id"].
        isin(knack_pm_records_df["FULCRUM_ID"])].copy()

    pm_update_payloads = postgre_records_df[
        postgre_records_df["fulcrum_id"].isin(
            knack_pm_records_df["FULCRUM_ID"])].copy()

    pm_insert_payloads["MODIFIED_DATE"] = datautil.local_timestamp()
    pm_update_payloads["MODIFIED_DATE"] = datautil.local_timestamp()

    pm_insert_payloads = map_knack_id_signal_id(signal_records,
                                                pm_insert_payloads)
    pm_update_payloads = map_knack_id_signal_id(signal_records,
                                                pm_update_payloads)

    knack_pm_records_id_df = knack_pm_records_df[["FULCRUM_ID", "id"]]
    pm_update_payloads = pm_update_payloads.merge(
        right=knack_pm_records_id_df,
        left_on="fulcrum_id",
        right_on="FULCRUM_ID",
        how="left",
    )

    pm_insert_payloads["PM_STATUS"] = "COMPLETED"
    pm_update_payloads["PM_STATUS"] = "COMPLETED"

    pm_insert_payloads.columns = map(str.upper, pm_insert_payloads.columns)
    pm_update_payloads.columns = map(str.upper, pm_update_payloads.columns)

    pm_update_payloads = pm_update_payloads.rename(columns={"ID": "id"})

    pm_insert_payloads = pm_insert_payloads.to_dict(orient="records")
    pm_update_payloads = pm_update_payloads.to_dict(orient="records")

    if len(pm_insert_payloads) != 0:
        pm_insert_payloads = map_technicians_id_pm_payloads(
            pm_insert_payloads, knack_technicians)

    pm_update_payloads = map_technicians_id_pm_payloads(
        pm_update_payloads, knack_technicians)

    # update signal modified time in replace method

    pm_replace_payloads_shallow = pm_update_payloads + pm_insert_payloads
    pm_replace_payloads = copy.deepcopy(pm_replace_payloads_shallow)

    for d in pm_replace_payloads:
        if "id" in d:
            del d["id"]

    signal_payloads = prepare_signals_payloads(pm_replace_payloads,
                                               signal_records)
    signals_payloads = datautil.replace_keys(signal_payloads,
                                             signal_records.field_map)
    signal_results = update_signals_modified_time(signals_payloads, app_name)

    # end update signal modified time in replace method

    pm_insert_payloads = datautil.replace_keys(pm_insert_payloads,
                                               knack_pm_records.field_map)

    pm_update_payloads = datautil.replace_keys(pm_update_payloads,
                                               knack_pm_records.field_map)

    for payload in pm_insert_payloads:
        print("inserting", payload)

        insert_res = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="create",
        )

    for payload in pm_update_payloads:
        print("updating", payload)

        update_res = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="update",
        )

    return len(pm_insert_payloads) + len(pm_update_payloads)
def main():

    args = cli_args()
    app_name = args.app_name

    knack_creds = KNACK_CREDENTIALS[app_name]

    knack_data_pm = knackpy.Knack(
        view=cfg["params_pm"]["view"],
        scene=cfg["params_pm"]["scene"],
        ref_obj=cfg["params_pm"]["field_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    data_pm = []

    if not knack_data_pm.data:
        return 0

    for pm in knack_data_pm.data:
        #  verify there is data that needs to be processed
        #  the source view is also filtered by these conditions
        #  so this is a redundant check, to be safe
        if (not pm["COPIED_TO_SECONDARY"] and pm["PM_STATUS"] == "COMPLETED"
                and int(pm["SECONDARY_SIGNALS_COUNT"]) > 0):

            data_pm.append(pm)

    if not data_pm:
        return 0

    #  get signal data
    #  TODO: filter for signals and their secondaries based on PM data
    #  this would reduce size of request
    knack_data_signals = knackpy.Knack(
        view=cfg["params_signal"]["view"],
        scene=cfg["params_signal"]["scene"],
        ref_obj=cfg["params_signal"]["field_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    primary_signals_with_children = get_prim_signals(knack_data_signals.data)

    pm_payload_insert = []
    pm_payload_update = []
    signals_update = []

    for pm in data_pm:
        """
        Check all preventative maintenance records at signals with secondary signals
        Copy pm record to secondary signal if needed
        """
        if "SIGNAL" in pm:

            primary_signal_id = pm["SIGNAL"][0]["id"]

            if primary_signal_id in primary_signals_with_children:
                #  update original pm record with copied to secondary = True
                pm_payload_update.append({
                    "id": pm["id"],
                    "COPIED_TO_SECONDARY": True
                })

                for secondary in primary_signals_with_children[
                        primary_signal_id]:
                    #  create new pm record for secondary signal(s)
                    new_record = copy_pm_record(secondary["id"], pm,
                                                cfg["copy_fields"])

                    signals_update.append({"id": secondary["id"]})
                    pm_payload_insert.append(new_record)

    # update modified date of secondary signals which have a new PM
    signals_payload_update = apply_modified_date(signals_update)

    signals_payload_update = datautil.replace_keys(
        signals_payload_update, knack_data_signals.field_map)

    pm_payload_update = datautil.replace_keys(pm_payload_update,
                                              knack_data_pm.field_map)

    pm_payload_insert = datautil.replace_keys(pm_payload_insert,
                                              knack_data_pm.field_map)

    for record in signals_payload_update:
        res = knackpy.record(
            record,
            obj_key=cfg["params_signal"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    for record in pm_payload_update:
        res = knackpy.record(
            record,
            obj_key=cfg["params_pm"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    for record in pm_payload_insert:
        res = knackpy.record(
            record,
            obj_key=cfg["params_pm"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="create",
        )

    return len(pm_payload_insert) + len(pm_payload_update) + len(
        signals_payload_update)
def main():
    """Summary
    
    Args:
        job (TYPE): Description
        **kwargs: Description
    
    Returns:
        TYPE: Description
    """
    kn = knackpy.Knack(
        scene=KITS_CONFIG.get("knack_scene"),
        view=KITS_CONFIG.get("knack_view"),
        ref_obj=["object_53", "object_11"],
        app_id=KNACK_CREDENTIALS[KITS_CONFIG.get("app_name")]["app_id"],
        api_key=KNACK_CREDENTIALS[KITS_CONFIG.get("app_name")]["api_key"],
    )

    field_names = kn.fieldnames
    kn.data = datautil.filter_by_key_exists(
        kn.data, KITS_CONFIG.get("primary_key_knack"))
    fieldmap_knack_kits = {
        fieldmap[x]["knack_id"]: x
        for x in fieldmap.keys() if fieldmap[x]["knack_id"] != None
    }

    for key in KITS_CONFIG["filters"].keys():
        knack_data_filtered = datautil.filter_by_key_exists(kn.data, key)

    for key in KITS_CONFIG["filters"].keys():
        knack_data_filtered = datautil.filter_by_val(
            knack_data_filtered, key, KITS_CONFIG["filters"][key])

    knack_data_repl = datautil.replace_keys(knack_data_filtered,
                                            fieldmap_knack_kits)

    knack_data_repl = datautil.reduce_to_keys(knack_data_repl,
                                              fieldmap_knack_kits.values())

    knack_data_def = setDefaults(knack_data_repl, fieldmap)
    knack_data_repl = create_cam_comment(knack_data_repl)

    camera_query = create_camera_query(KITS_CONFIG.get("kits_table_camera"))
    kits_data = kitsutil.data_as_dict(KITS_CREDENTIALS, camera_query)

    kits_data_conv = convert_data(kits_data, fieldmap)

    compare_keys = [
        key for key in fieldmap.keys() if fieldmap[key]["detect_changes"]
    ]
    data_cd = datautil.detect_changes(kits_data_conv,
                                      knack_data_repl,
                                      "CAMNUMBER",
                                      keys=compare_keys)

    if data_cd["new"]:
        # logger.info('new: {}'.format( len(data_cd['new']) ))

        max_cam_id = get_max_id(KITS_CONFIG.get("kits_table_camera"), "CAMID")
        data_cd["new"] = map_bools(data_cd["new"])

        for record in data_cd["new"]:
            time.sleep(
                1
            )  #  connection will fail if queries are pushed too frequently

            max_cam_id += 1
            record["CAMID"] = max_cam_id
            query_camera = create_insert_query(
                KITS_CONFIG.get("kits_table_camera"), record)

            record_geom = {}
            geometry = "geometry::Point({}, {}, 4326)".format(
                record["LONGITUDE"], record["LATITUDE"])
            record_geom["GeometryItem"] = geometry
            record_geom["CamID"] = max_cam_id
            query_geom = create_insert_query(
                KITS_CONFIG.get("kits_table_geom"), record_geom)
            query_geom = query_geom.replace(
                "'", "")  #  strip single quotes from geometry value

            record_web = {}
            record_web["WebType"] = 2
            record_web["WebComments"] = ""
            record_web["WebID"] = max_cam_id
            record_web["WebURL"] = "http://{}".format(record["VIDEOIP"])
            query_web = create_insert_query(KITS_CONFIG.get("kits_table_web"),
                                            record_web)

            insert_results = kitsutil.insert_multi_table(
                KITS_CREDENTIALS, [query_camera, query_geom, query_web])

    if data_cd["change"]:

        data_cd["change"] = map_bools(data_cd["change"])

        # logger.info('change: {}'.format( len(data_cd['change']) ))

        for record in data_cd["change"]:
            time.sleep(
                1
            )  #  connection will fail if queried are pushed too frequently
            # fetch camid field, which relates camera, geometry, and webconfig table records
            match_query = create_match_query(
                KITS_CONFIG.get("kits_table_camera"),
                "CAMID",
                "CAMNUMBER",
                record["CAMNUMBER"],
            )
            match_id = kitsutil.data_as_dict(KITS_CREDENTIALS, match_query)
            match_id = int(match_id[0]["CAMID"])

            query_camera = create_update_query(
                KITS_CONFIG.get("kits_table_camera"), record, "CAMNUMBER")

            record_geom = {}
            geometry = "geometry::Point({}, {}, 4326)".format(
                record["LONGITUDE"], record["LATITUDE"])
            record_geom["GeometryItem"] = geometry
            record_geom["CamID"] = match_id
            query_geom = create_update_query(
                KITS_CONFIG.get("kits_table_geom"), record_geom, "CamID")

            record_web = {}
            record_web["WebType"] = 2
            record_web["WebID"] = match_id
            record_web["WebURL"] = "http://{}".format(record["VIDEOIP"])
            query_web = create_update_query(KITS_CONFIG.get("kits_table_web"),
                                            record_web, "WebID")

            insert_results = kitsutil.insert_multi_table(
                KITS_CREDENTIALS, [query_camera, query_geom, query_web])

    if data_cd["delete"]:

        # logger.info('delete: {}'.format( len(data_cd['delete']) ))

        for record in data_cd["delete"]:
            time.sleep(
                1
            )  #  connection will fail if queried are pushed too frequently
            # fetch camid field, which relates camera, geometry, and webconfig table records
            match_query = create_match_query(
                KITS_CONFIG.get("kits_table_camera"),
                "CAMID",
                "CAMNUMBER",
                record["CAMNUMBER"],
            )
            match_id = kitsutil.data_as_dict(KITS_CREDENTIALS, match_query)
            match_id = int(match_id[0]["CAMID"])

            query_camera = create_delete_query(
                KITS_CONFIG.get("kits_table_camera"), "CAMID", match_id)

            query_geo = create_delete_query(KITS_CONFIG.get("kits_table_geom"),
                                            "CamID", match_id)

            query_web = create_delete_query(KITS_CONFIG.get("kits_table_web"),
                                            "WebID", match_id)

            insert_results = kitsutil.insert_multi_table(
                KITS_CREDENTIALS, [query_camera, query_geo, query_web])

    # if data_cd['no_change']:
    # logger.info('no_change: {}'.format( len(data_cd['no_change']) ))

    # logger.info('END AT {}'.format( arrow.now().format() ))

    results = {"total": 0}

    for result in ["new", "change", "no_change", "delete"]:
        results["total"] += len(data_cd[result])
        results[result] = len(data_cd[result])

    return results.get("change")
Example #11
0
def main():

    args = cli_args()

    app_name = args.app_name

    update_fields = [
        field for layer in cfg["layers"] for field in layer["updateFields"]
    ]

    kn = knackpy.Knack(
        obj=cfg["obj"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
        filters=cfg["filters"],
        timeout=30,
    )

    unmatched_locations = []

    if not kn.data:
        return 0
    """
    Remove "update fields" from record. these are re-appended via
    spatial lookup and thus the fieldnames must match those of the source
    dataset or be mapped in the field map config dict.
    """
    keep_fields = [
        field for field in kn.fieldnames if field not in update_fields
    ]
    kn.data = datautil.reduce_to_keys(kn.data, keep_fields)

    for location in kn.data:

        point = [location["LOCATION_longitude"], location["LOCATION_latitude"]]

        for layer in cfg["layers"]:
            layer["geometry"] = point
            field_map = cfg["field_maps"].get(layer["service_name"])
            params = get_params(layer)

            try:
                res = agolutil.point_in_poly(layer["service_name"],
                                             layer["layer_id"], params)

                if res.get("error"):
                    raise Exception(str(res))

                if res.get("features"):
                    location = join_features_to_record(res["features"], layer,
                                                       location)

                    if field_map:
                        location = map_fields(location, field_map)

                    continue

                if "service_name_secondary" in layer:
                    res = agolutil.point_in_poly(
                        layer["service_name_secondary"], layer["layer_id"],
                        params)

                    if len(res["features"]) > 0:
                        location = join_features_to_record(
                            res["features"], layer, location)

                        if field_map:
                            location = map_fields(location, field_map)
                            continue

                #  no intersecting features found
                for field in layer["updateFields"]:
                    """
                    set corresponding fields on location record to null to
                    overwrite any existing data
                    """
                    location[field] = ""

                continue

            except Exception as e:
                unmatched_locations.append(location)
                continue

        location["UPDATE_PROCESSED"] = True

        location["MODIFIED_DATE"] = datautil.local_timestamp()

        location = datautil.reduce_to_keys(
            [location],
            update_fields + ["id", "UPDATE_PROCESSED", "MODIFIED_DATE"])
        location = datautil.replace_keys(location, kn.field_map)

        res = knackpy.record(
            location[0],
            obj_key=cfg["obj"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            method="update",
        )

    if len(unmatched_locations) > 0:
        error_text = "Location Point/Poly Match Failure(s): {}".format(
            ", ".join(str(x) for x in unmatched_locations))
        raise Exception(error_text)

    else:
        return len(kn.data)
Example #12
0
def main():

    app_name = "finance_admin_prod" #TODO: add to argutil

    if "finance" not in app_name:
        raise Exception('Unsupported application specified. Must be finance_admin_prod or finance_admin_test.')
    
    knack_creds = KNACK_CREDENTIALS[app_name]

    '''
    We start by making a "free" call to the API endpoint to check for records.
    This calls an endpoint that is not behind login, and we do not provide a
    reference object, which avoivds making a call for field data.

    This way we do not accure API usage when checking for records to process.
    '''
    free_creds = {
        'app_id' : knack_creds['app_id'],
        'api_key' : None
    }

    free_cfg = dict(cfg["purchase_requests"])

    free_cfg.pop('ref_obj')

    free_prs = knackpy_wrapper(
        free_cfg,
        knack_creds, 
        raw_connections=True,
    )
    
    if not free_prs.data_raw:
        return 0

    '''
    There is data to be processed, so make a standard request for the record
    and field data.
    '''
    prs = knackpy_wrapper(
        cfg["purchase_requests"],
        knack_creds, 
        raw_connections=True,
    )

    for record in prs.data:
        # this grabs the aut increment field value, which is then droppped
        pr_filter_id = record.get(cfg["purchase_requests"]['unique_id_field_name'])

        old_record_id = record.pop('id')

        record = handle_fields(record, prs.fields, prs.field_map)

        record = datautil.replace_keys(
            [record],
            prs.field_map
        )[0]

        record = assign_requester(cfg["purchase_requests"], record)

        #  Set the "copy" field to No
        record[cfg["purchase_requests"]['copy_field_id']] = False

        copied_record = knackpy.record(
            record,
            obj_key=cfg["purchase_requests"]['ref_obj'][0],
            app_id=knack_creds['app_id'],
            api_key=knack_creds['api_key'],
            method='create'
        )

        #  update the older record with need_to_copy=false
        old_record_payload = {
            'id' : old_record_id,
            cfg["purchase_requests"]['copy_field_id'] : False
        }

        old_record_update = knackpy.record(
            old_record_payload,
            obj_key=cfg["purchase_requests"]['ref_obj'][0],
            app_id=knack_creds['app_id'],
            api_key=knack_creds['api_key'],
            method='update'
        )

        # fetch item records related to the copied purchase request, and copy
        # them to the new purchase request
        item_filter = get_filter(cfg["items"]['pr_field_id'], pr_filter_id)
        items = get_items(cfg["items"], item_filter, knack_creds)

        for item in items.data:
            item = handle_fields(item, items.fields, items.field_map)
            
            # set item connection to copied purchase request record
            item[cfg["items"]['pr_connection_field_name']] = [copied_record['id']]

            item.pop('id')
            
            item = datautil.replace_keys(
                [item],
                items.field_map
            )[0]

            new_item = knackpy.record(
                item,
                obj_key=cfg["items"]['obj'],
                app_id=knack_creds['app_id'],
                api_key=knack_creds['api_key'],
                method='create'
            )

    return len(records)        
def main():
    """Summary
    
    Parameters
    ----------
    None
    
    Returns
    -------
    count_sig
        number of signals that has been updated
    """
    args = cli_args()
    app_name = args.app_name

    api_key = KNACK_CREDENTIALS[app_name]["api_key"]
    app_id = KNACK_CREDENTIALS[app_name]["app_id"]

    detectors = knackpy.Knack(
        scene=cfg["CONFIG_DETECTORS"]["scene"],
        view=cfg["CONFIG_DETECTORS"]["view"],
        ref_obj=cfg["CONFIG_DETECTORS"]["objects"],
        api_key=api_key,
        app_id=app_id,
        timeout=30,
    )

    signals = knackpy.Knack(
        scene=cfg["CONFIG_SIGNALS"]["scene"],
        view=cfg["CONFIG_SIGNALS"]["view"],
        ref_obj=cfg["CONFIG_SIGNALS"]["objects"],
        api_key=api_key,
        app_id=app_id,
        timeout=30,
    )

    signals.data = datautil.filter_by_key_exists(signals.data, "SIGNAL_STATUS")
    signals.data = datautil.filter_by_val(signals.data, "SIGNAL_STATUS", ["TURNED_ON"])

    lookup = groupBySignal(detectors.data)

    count_sig = 0
    count_status = 0

    for sig in signals.data:

        old_status = None
        new_status = getStatus(sig, lookup)
        new_status_date = getMaxDate(sig, lookup)

        if cfg["SIG_STATUS_LABEL"] in sig:
            old_status = sig[cfg["SIG_STATUS_LABEL"]]

            if old_status == new_status:
                continue

        payload_signals = {
            "id": sig["id"],
            cfg["SIG_STATUS_LABEL"]: new_status,
            cfg["SIG_DATE_LABEL"]: getMaxDate(sig, lookup),
        }

        payload_signals = datautil.replace_keys([payload_signals], signals.field_map)

        #  update signal record with detection status and date
        res = knackpy.record(
            payload_signals[0],
            obj_key=cfg["CONFIG_SIGNALS"]["objects"][0],
            app_id=app_id,
            api_key=api_key,
            method="update",
        )

        count_sig += 1

    return count_sig
def main():

    args = cli_args()

    app_name = args.app_name

    CONFIG = cfg["dms"]
    KNACK_CREDS = KNACK_CREDENTIALS[app_name]

    kits_query = """
        SELECT DMSID as KITS_ID
        ,Multistring as DMS_MESSAGE
        ,LastUpdated as MESSAGE_TIME
        FROM [KITS].[DMS_RealtimeData]
        """

    kits_data = kitsutil.data_as_dict(KITS_CREDENTIALS, kits_query)

    for record in kits_data:
        new_date = arrow.get(record["MESSAGE_TIME"])
        record["MESSAGE_TIME"] = new_date.timestamp * 1000

    kn = knackpy.Knack(
        scene=CONFIG["scene"],
        view=CONFIG["view"],
        ref_obj=CONFIG["ref_obj"],
        app_id=KNACK_CREDS["app_id"],
        api_key=KNACK_CREDS["api_key"],
    )

    #  hack to avoid ref_obj field meta replacing primary obj modified date
    #  this is a knackpy issue
    #  TODO: fix knackpy field meta handling
    kn.field_map[
        CONFIG["modified_date_field"]] = CONFIG["modified_date_field_id"]

    knack_data = kn.data

    if kits_data:
        new_data = datautil.merge_dicts(knack_data, kits_data, "KITS_ID",
                                        ["DMS_MESSAGE", "MESSAGE_TIME"])

    for record in new_data:
        #  remove DMS formatting artifacts
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[np]", "\n")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[nl]", " ")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt40o0]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt30o0]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[fo13]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[fo2]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[jl3]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt30]", "")

        record[CONFIG["modified_date_field"]] = datautil.local_timestamp()

    new_data = datautil.reduce_to_keys(
        new_data,
        ["id", "MESSAGE_TIME", "DMS_MESSAGE", CONFIG["modified_date_field"]])

    new_data = datautil.replace_keys(new_data, kn.field_map)

    count = 0

    for record in new_data:
        count += 1
        print("updating record {} of {}".format(count, len(new_data)))

        res = knackpy.record(
            record,
            obj_key=CONFIG["ref_obj"][0],
            app_id=KNACK_CREDS["app_id"],
            api_key=KNACK_CREDS["api_key"],
            method="update",
        )

    return len(new_data)
Example #15
0
def main():

    args = cli_args()
    app_name = args.app_name
    last_run_date = args.last_run_date

    knack_creds = KNACK_CREDENTIALS[app_name]

    if not last_run_date:
        # replace dataset by setting the last run date to a long, long time ago
        # the arrow package needs a specific date and timeformat
        last_run_date = "1970-01-01"

    filters = knackutil.date_filter_on_or_after(
        last_run_date, config["modified_date_field_id"])
    """
    We include a filter in our API call to limit to records which have
    been modified on or after the date the last time this job ran
    successfully. The Knack API supports filter requests by date only
    (not time), so we must apply an additional filter on the data after
    we receive it.
    """
    kn = knackpy.Knack(
        scene=config["scene"],
        view=config["view"],
        ref_obj=config["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        filters=filters,
    )

    if kn.data:
        # Filter data for records that have been modifed after the last
        # job run (see comment above)
        last_run_timestamp = arrow.get(last_run_date).timestamp * 1000
        kn.data = filter_by_date(kn.data, config["modified_date_field"],
                                 last_run_timestamp)

    payload = []
    unmatched_segments = []

    if not kn.data:
        # logger.info('No records to update.')
        return 0

    for street_segment in kn.data:

        token = agolutil.get_token(AGOL_CREDENTIALS)
        features = agolutil.query_atx_street(
            street_segment[config["primary_key"]], token)

        if features.get("features"):
            if len(features["features"]) > 0:
                segment_data = features["features"][0]["attributes"]
            else:
                unmatched_segments.append(
                    street_segment[config["primary_key"]])
                continue
        else:
            unmatched_segments.append(street_segment[config["primary_key"]])
            continue

        #  we don't want to compare modified dates
        #  because we don't keep that value in sync with the source data on AGOL
        #  because we use our own modified date set in the data tracker
        segment_data.pop(config["modified_date_field"])
        street_segment.pop(config["modified_date_field"])

        #  compare new data (segment data) against old (street_segment)
        #  we only want to upload values that have changed
        if not are_equal(street_segment, segment_data):
            segment_data["id"] = street_segment["id"]
            segment_data[
                config["modified_date_field"]] = datautil.local_timestamp()
            payload.append(segment_data)

    payload = datautil.reduce_to_keys(payload, kn.fieldnames)
    payload = datautil.replace_keys(payload, kn.field_map)

    update_response = []
    count = 0

    for record in payload:
        count += 1

        print("updating record {} of {}".format(count, len(payload)))

        #  remove whitespace from janky Esri attributes
        for field in record:
            if type(record[field]) == str:
                record[field] = record[field].strip()

        res = knackpy.record(
            record,
            obj_key=config["ref_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

        update_response.append(res)

    if len(unmatched_segments) > 0:
        error_text = "Unmatched street segments: {}".format(", ".join(
            str(x) for x in unmatched_segments))

        raise Exception(error_text)

    return count
Example #16
0
def main():

    args = cli_args()

    app_name = args.app_name

    eval_type = args.eval_type

    obj = cfg["eval_types"][eval_type]

    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        obj=cfg["eval_types"][eval_type],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    data = datautil.filter_by_val(kn.data, cfg["status_key"],
                                  cfg["status_vals"])

    #  new records will not have a score key. add it here.
    data = datautil.add_missing_keys(data, {cfg["score_key"]: 0})

    #  create a ranking month_year field
    data = datautil.concat_key_values(data, cfg["concat_keys"],
                                      cfg["group_key"], "_")

    knack_data_exclude = [
        record for record in data if record["EXCLUDE_FROM_RANKING"] == True
    ]
    knack_data_include = [
        record for record in data if record["EXCLUDE_FROM_RANKING"] == False
    ]

    #  create list of scores grouped by group key
    score_dict = {}

    for row in knack_data_include:
        key = row[cfg["group_key"]]
        score = int(row[cfg["score_key"]])

        if key not in score_dict:
            score_dict[key] = []

        score_dict[key].append(score)

    for key in score_dict:
        score_dict[key].sort()
        score_dict[key].reverse()

    #  get score rank and append record to payload
    payload = []

    for record in knack_data_include:
        score = int(record[cfg["score_key"]])
        key = record[cfg["group_key"]]
        rank = (datautil.min_index(score_dict[key], score) + 1
                )  #  add one because list indices start at 0

        if cfg["rank_key"] in record:
            if record[cfg["rank_key"]] != rank:
                record[cfg["rank_key"]] = rank
                record[cfg["modified_date_key"]] = datautil.local_timestamp()
                payload.append(record)

        else:
            record[cfg["rank_key"]] = rank

    #  assign null ranks to records flagged as exclude from ranking
    for record in knack_data_exclude:

        if cfg["rank_key"] in record:
            #  update excluded records if rank found
            if record[cfg["rank_key"]] != "":
                record[cfg["rank_key"]] = ""
                record[cfg["modified_date_key"]] = datautil.local_timestamp()
                payload.append(record)

    if payload:
        payload = datautil.reduce_to_keys(
            payload, [cfg["rank_key"], "id", cfg["modified_date_key"]])

        payload = datautil.replace_keys(payload, kn.field_map)

        update_response = []

        count = 0
        for record in payload:
            count += 1

            print("Updating record {} of {}".format(count, len(payload)))

            res = knackpy.record(
                record,
                obj_key=obj,
                app_id=knack_creds["app_id"],
                api_key=knack_creds["api_key"],
                method="update",
            )

            update_response.append(res)

        return len(payload)

    else:
        return 0