예제 #1
0
def get_records_knack(app_name, config, endpoint_type="private"):
    """Summary
    
    Args:
        app_name (TYPE): Description
        config (TYPE): Description
        endpoint_type (str, optional): Description
    
    Returns:
        TYPE: Description
    """
    api_key = KNACK_CREDENTIALS[app_knack]["api_key"]
    app_id = KNACK_CREDENTIALS[app_knack]["app_id"]

    if endpoint_type == "public":
        return knackpy.Knack(scene=config["scene"], view=config["view"], app_id=app_id)

    else:
        return knackpy.Knack(
            scene=config["scene"],
            view=config["view"],
            ref_obj=config["ref_obj"],
            api_key=api_key,
            app_id=app_id,
        )
예제 #2
0
def main():

    args = cli_args()
    app_name = args.app_name

    CONFIG = cfg["task_orders"]
    KNACK_CREDS = KNACK_CREDENTIALS[app_name]

    html = get_html(TASK_ORDERS_ENDPOINT)
    data = handle_html(html)
    rows = handle_rows(data)

    kn = knackpy.Knack(
        scene=CONFIG["scene"],
        view=CONFIG["view"],
        ref_obj=CONFIG["ref_obj"],
        app_id=KNACK_CREDS["app_id"],
        api_key=KNACK_CREDS["api_key"],
    )

    new_rows = compare(rows, kn.data)

    new_rows = datautil.replace_keys(new_rows, kn.field_map)

    for record in new_rows:

        res = knackpy.record(
            record,
            obj_key=CONFIG["ref_obj"][0],
            app_id=KNACK_CREDS["app_id"],
            api_key=KNACK_CREDS["api_key"],
            method="create",
        )

    return len(new_rows)
예제 #3
0
def get_knack_data(app_name, config):
    return knackpy.Knack(
        scene=config["scene"],
        view=config["view"],
        ref_obj=config["ref_obj"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
    )
예제 #4
0
def knackpy_wrapper(cfg, auth, filters=None, raw_connections=False):

    return knackpy.Knack(obj=cfg.get("obj"),
                         scene=cfg.get("scene"),
                         view=cfg.get("view"),
                         ref_obj=cfg.get("ref_obj"),
                         app_id=auth["app_id"],
                         api_key=auth["api_key"],
                         filters=filters,
                         raw_connections=raw_connections)
예제 #5
0
def main():
    """Summary
    
    Args:
        job (TYPE): Description
        **kwargs: Description
    
    Returns:
        TYPE: Description
    """

    args = cli_args()

    device_type = args.device_type
    app_name = args.app_name

    primary_key = cfg[device_type]["primary_key"]
    status_field = cfg[device_type]["status_field"]
    status_filters = cfg[device_type]["status_filter_comm_status"]

    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        obj=cfg[device_type]["obj"],
        scene=cfg[device_type]["scene"],
        view=cfg[device_type]["view"],
        ref_obj=cfg[device_type]["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    kn_log = get_log_data(knack_creds)

    stats = defaultdict(int)

    stats["DEVICE_TYPE"] = device_type

    for device in kn.data:
        #  count stats only for devices that are TURNED_ON
        if device[status_field] in status_filters:
            status = device["IP_COMM_STATUS"]
            stats[status] += 1

    payload = build_payload(stats, args.device_type)
    payload = datautil.replace_keys([payload], kn_log.field_map)

    res = knackpy.record(
        payload[0],
        obj_key=LOG_OBJ,
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        method="create",
    )

    return len(payload)
예제 #6
0
def get_data(app_name, cfg):
    #  get data at public enpoint and also get
    #  necessary field metadata (which is not public)
    #  field dat ais fetched because we provide a ref_obj array
    return knackpy.Knack(
        ref_obj=cfg["ref_obj"],
        view=cfg["view"],
        scene=cfg["scene"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
    )
예제 #7
0
def get_log_data(knack_creds):
    """Summary
    
    Args:
        knack_creds (TYPE): Description
    
    Returns:
        TYPE: Description
    """
    return knackpy.Knack(
        obj=LOG_OBJ,
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        rows_per_page=1,
        page_limit=1,
    )
예제 #8
0
def knackpy_wrapper(cfg, auth, filters=None, raw_connections=False):
    """
    Fetch records which need to be processed from a pre-filtered
    Knack view which does not require authentication.
    """
    return knackpy.Knack(
        scene=cfg.get("scene"),
        view=cfg.get("view"),
        obj=cfg.get("obj"),
        ref_obj=cfg.get("ref_obj"),
        app_id=auth["app_id"],
        api_key=auth["api_key"],
        page_limit=100,
        rows_per_page=1000,
        filters=filters,
        raw_connections=raw_connections,
    )
예제 #9
0
def check_for_data(app_name, cfg):
    #  check for data at public endpoint
    #  this api call does not count against
    #  daily subscription limit because we do not
    #  provide reference objects
    kn = knackpy.Knack(
        view=cfg["view"],
        scene=cfg["scene"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key="knack",
        page_limit=1,
        rows_per_page=1,
    )

    if kn.data_raw:
        return True
    else:
        return False
예제 #10
0
def knackpy_wrapper(cfg_dataset, auth, filters=None):
    """Summary
    
    Args:
        cfg_dataset (TYPE): Description
        auth (TYPE): Description
        filters (None, optional): Description
    
    Returns:
        TYPE: Description
    """
    return knackpy.Knack(
        obj=cfg_dataset["obj"],
        scene=cfg_dataset["scene"],
        view=cfg_dataset["view"],
        ref_obj=cfg_dataset["ref_obj"],
        app_id=auth["app_id"],
        api_key=auth["api_key"],
        filters=filters,
        page_limit=10000,
    )
def main():

    args = cli_args()

    app_name = args.app_name
    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        scene=cfg["scene"],
        view=cfg["view"],
        ref_obj=cfg["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    primary_signals_old = get_old_prim_signals(kn.data)
    primary_signals_new = get_new_prim_signals(kn.data)

    payload = []

    for signal_id in primary_signals_new:
        """
        identify new and changed primary-secondary relationships
        """
        if signal_id in primary_signals_old:
            new_secondaries = collections.Counter(primary_signals_new[signal_id])
            old_secondaries = collections.Counter(primary_signals_old[signal_id])

            if old_secondaries != new_secondaries:

                payload.append(
                    {
                        "id": signal_id,
                        cfg["update_field"]: primary_signals_new[signal_id],
                    }
                )

        else:
            payload.append(
                {"id": signal_id, cfg["update_field"]: primary_signals_new[signal_id]}
            )

    for signal_id in primary_signals_old:
        """
        identify primary-secondary relationships that have been removed
        """
        if signal_id not in primary_signals_new:
            payload.append({"id": signal_id, cfg["update_field"]: []})

    if len(payload) == 0:
        return 0

    for record in payload:

        res = knackpy.record(
            record,
            obj_key=cfg["ref_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    return len(payload)
예제 #12
0
def main():

    args = cli_args()

    app_name = args.app_name

    # look up the corresponding configuration based on app name
    # with this pattern, "data_tracker_prod" and "data_tracker_test"
    # returns the same config, which is what we want
    for cfg_name in SR_DUE_DATE.keys():
        if cfg_name in app_name:
            cfg = SR_DUE_DATE[cfg_name]

    srs = knackpy.Knack(
        view=cfg["issues"]["view"],
        scene=cfg["issues"]["scene"],
        ref_obj=cfg["issues"]["ref_obj"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
    )

    count = 0

    if not srs.data:
        return 0

    for sr in srs.data_raw:

        filters = sr_filter(
            sr[cfg["issues"]["sr_field_id"]],
            cfg["flex_notes"]["flex_question_code_field_id"],
            cfg["flex_notes"]["sr_id_field"],
        )

        flex_note = knackpy.Knack(
            view=cfg["flex_notes"]["view"],
            scene=cfg["flex_notes"]["scene"],
            ref_obj=cfg["flex_notes"]["ref_obj"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            filters=filters,
            page_limit=1,  # limit records, to be safe (there are lots)
            rows_per_page=10,
        )

        if not flex_note.data:
            continue
        """
        Always take the first due date in the list. there are occasionally duplicate
        due date flex records for one SR. We don't know why.
        """
        due_date = get_due_date(flex_note.data[0]["FLEX_ATTRIBUTE_VALUE"])

        record = {cfg["issues"]["due_date_field_id"]: due_date, "id": sr["id"]}

        res = knackpy.record(
            record,
            obj_key=cfg["issues"]["ref_obj"][0],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            method="update",
        )

        count += 1

    return count
예제 #13
0
def main():

    args = cli_args()

    app_name = args.app_name

    update_fields = [
        field for layer in cfg["layers"] for field in layer["updateFields"]
    ]

    kn = knackpy.Knack(
        obj=cfg["obj"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
        filters=cfg["filters"],
        timeout=30,
    )

    unmatched_locations = []

    if not kn.data:
        return 0
    """
    Remove "update fields" from record. these are re-appended via
    spatial lookup and thus the fieldnames must match those of the source
    dataset or be mapped in the field map config dict.
    """
    keep_fields = [
        field for field in kn.fieldnames if field not in update_fields
    ]
    kn.data = datautil.reduce_to_keys(kn.data, keep_fields)

    for location in kn.data:

        point = [location["LOCATION_longitude"], location["LOCATION_latitude"]]

        for layer in cfg["layers"]:
            layer["geometry"] = point
            field_map = cfg["field_maps"].get(layer["service_name"])
            params = get_params(layer)

            try:
                res = agolutil.point_in_poly(layer["service_name"],
                                             layer["layer_id"], params)

                if res.get("error"):
                    raise Exception(str(res))

                if res.get("features"):
                    location = join_features_to_record(res["features"], layer,
                                                       location)

                    if field_map:
                        location = map_fields(location, field_map)

                    continue

                if "service_name_secondary" in layer:
                    res = agolutil.point_in_poly(
                        layer["service_name_secondary"], layer["layer_id"],
                        params)

                    if len(res["features"]) > 0:
                        location = join_features_to_record(
                            res["features"], layer, location)

                        if field_map:
                            location = map_fields(location, field_map)
                            continue

                #  no intersecting features found
                for field in layer["updateFields"]:
                    """
                    set corresponding fields on location record to null to
                    overwrite any existing data
                    """
                    location[field] = ""

                continue

            except Exception as e:
                unmatched_locations.append(location)
                continue

        location["UPDATE_PROCESSED"] = True

        location["MODIFIED_DATE"] = datautil.local_timestamp()

        location = datautil.reduce_to_keys(
            [location],
            update_fields + ["id", "UPDATE_PROCESSED", "MODIFIED_DATE"])
        location = datautil.replace_keys(location, kn.field_map)

        res = knackpy.record(
            location[0],
            obj_key=cfg["obj"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            method="update",
        )

    if len(unmatched_locations) > 0:
        error_text = "Location Point/Poly Match Failure(s): {}".format(
            ", ".join(str(x) for x in unmatched_locations))
        raise Exception(error_text)

    else:
        return len(kn.data)
def main():

    args = cli_args()
    app_name = args.app_name

    knack_creds = KNACK_CREDENTIALS[app_name]

    knack_data_pm = knackpy.Knack(
        view=cfg["params_pm"]["view"],
        scene=cfg["params_pm"]["scene"],
        ref_obj=cfg["params_pm"]["field_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    data_pm = []

    if not knack_data_pm.data:
        return 0

    for pm in knack_data_pm.data:
        #  verify there is data that needs to be processed
        #  the source view is also filtered by these conditions
        #  so this is a redundant check, to be safe
        if (not pm["COPIED_TO_SECONDARY"] and pm["PM_STATUS"] == "COMPLETED"
                and int(pm["SECONDARY_SIGNALS_COUNT"]) > 0):

            data_pm.append(pm)

    if not data_pm:
        return 0

    #  get signal data
    #  TODO: filter for signals and their secondaries based on PM data
    #  this would reduce size of request
    knack_data_signals = knackpy.Knack(
        view=cfg["params_signal"]["view"],
        scene=cfg["params_signal"]["scene"],
        ref_obj=cfg["params_signal"]["field_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    primary_signals_with_children = get_prim_signals(knack_data_signals.data)

    pm_payload_insert = []
    pm_payload_update = []
    signals_update = []

    for pm in data_pm:
        """
        Check all preventative maintenance records at signals with secondary signals
        Copy pm record to secondary signal if needed
        """
        if "SIGNAL" in pm:

            primary_signal_id = pm["SIGNAL"][0]["id"]

            if primary_signal_id in primary_signals_with_children:
                #  update original pm record with copied to secondary = True
                pm_payload_update.append({
                    "id": pm["id"],
                    "COPIED_TO_SECONDARY": True
                })

                for secondary in primary_signals_with_children[
                        primary_signal_id]:
                    #  create new pm record for secondary signal(s)
                    new_record = copy_pm_record(secondary["id"], pm,
                                                cfg["copy_fields"])

                    signals_update.append({"id": secondary["id"]})
                    pm_payload_insert.append(new_record)

    # update modified date of secondary signals which have a new PM
    signals_payload_update = apply_modified_date(signals_update)

    signals_payload_update = datautil.replace_keys(
        signals_payload_update, knack_data_signals.field_map)

    pm_payload_update = datautil.replace_keys(pm_payload_update,
                                              knack_data_pm.field_map)

    pm_payload_insert = datautil.replace_keys(pm_payload_insert,
                                              knack_data_pm.field_map)

    for record in signals_payload_update:
        res = knackpy.record(
            record,
            obj_key=cfg["params_signal"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    for record in pm_payload_update:
        res = knackpy.record(
            record,
            obj_key=cfg["params_pm"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    for record in pm_payload_insert:
        res = knackpy.record(
            record,
            obj_key=cfg["params_pm"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="create",
        )

    return len(pm_payload_insert) + len(pm_payload_update) + len(
        signals_payload_update)
예제 #15
0
def main():
    """Summary
    
    Args:
        job (TYPE): Description
        **kwargs: Description
    
    Returns:
        TYPE: Description
    """
    kn = knackpy.Knack(
        scene=KITS_CONFIG.get("knack_scene"),
        view=KITS_CONFIG.get("knack_view"),
        ref_obj=["object_53", "object_11"],
        app_id=KNACK_CREDENTIALS[KITS_CONFIG.get("app_name")]["app_id"],
        api_key=KNACK_CREDENTIALS[KITS_CONFIG.get("app_name")]["api_key"],
    )

    field_names = kn.fieldnames
    kn.data = datautil.filter_by_key_exists(
        kn.data, KITS_CONFIG.get("primary_key_knack"))
    fieldmap_knack_kits = {
        fieldmap[x]["knack_id"]: x
        for x in fieldmap.keys() if fieldmap[x]["knack_id"] != None
    }

    for key in KITS_CONFIG["filters"].keys():
        knack_data_filtered = datautil.filter_by_key_exists(kn.data, key)

    for key in KITS_CONFIG["filters"].keys():
        knack_data_filtered = datautil.filter_by_val(
            knack_data_filtered, key, KITS_CONFIG["filters"][key])

    knack_data_repl = datautil.replace_keys(knack_data_filtered,
                                            fieldmap_knack_kits)

    knack_data_repl = datautil.reduce_to_keys(knack_data_repl,
                                              fieldmap_knack_kits.values())

    knack_data_def = setDefaults(knack_data_repl, fieldmap)
    knack_data_repl = create_cam_comment(knack_data_repl)

    camera_query = create_camera_query(KITS_CONFIG.get("kits_table_camera"))
    kits_data = kitsutil.data_as_dict(KITS_CREDENTIALS, camera_query)

    kits_data_conv = convert_data(kits_data, fieldmap)

    compare_keys = [
        key for key in fieldmap.keys() if fieldmap[key]["detect_changes"]
    ]
    data_cd = datautil.detect_changes(kits_data_conv,
                                      knack_data_repl,
                                      "CAMNUMBER",
                                      keys=compare_keys)

    if data_cd["new"]:
        # logger.info('new: {}'.format( len(data_cd['new']) ))

        max_cam_id = get_max_id(KITS_CONFIG.get("kits_table_camera"), "CAMID")
        data_cd["new"] = map_bools(data_cd["new"])

        for record in data_cd["new"]:
            time.sleep(
                1
            )  #  connection will fail if queries are pushed too frequently

            max_cam_id += 1
            record["CAMID"] = max_cam_id
            query_camera = create_insert_query(
                KITS_CONFIG.get("kits_table_camera"), record)

            record_geom = {}
            geometry = "geometry::Point({}, {}, 4326)".format(
                record["LONGITUDE"], record["LATITUDE"])
            record_geom["GeometryItem"] = geometry
            record_geom["CamID"] = max_cam_id
            query_geom = create_insert_query(
                KITS_CONFIG.get("kits_table_geom"), record_geom)
            query_geom = query_geom.replace(
                "'", "")  #  strip single quotes from geometry value

            record_web = {}
            record_web["WebType"] = 2
            record_web["WebComments"] = ""
            record_web["WebID"] = max_cam_id
            record_web["WebURL"] = "http://{}".format(record["VIDEOIP"])
            query_web = create_insert_query(KITS_CONFIG.get("kits_table_web"),
                                            record_web)

            insert_results = kitsutil.insert_multi_table(
                KITS_CREDENTIALS, [query_camera, query_geom, query_web])

    if data_cd["change"]:

        data_cd["change"] = map_bools(data_cd["change"])

        # logger.info('change: {}'.format( len(data_cd['change']) ))

        for record in data_cd["change"]:
            time.sleep(
                1
            )  #  connection will fail if queried are pushed too frequently
            # fetch camid field, which relates camera, geometry, and webconfig table records
            match_query = create_match_query(
                KITS_CONFIG.get("kits_table_camera"),
                "CAMID",
                "CAMNUMBER",
                record["CAMNUMBER"],
            )
            match_id = kitsutil.data_as_dict(KITS_CREDENTIALS, match_query)
            match_id = int(match_id[0]["CAMID"])

            query_camera = create_update_query(
                KITS_CONFIG.get("kits_table_camera"), record, "CAMNUMBER")

            record_geom = {}
            geometry = "geometry::Point({}, {}, 4326)".format(
                record["LONGITUDE"], record["LATITUDE"])
            record_geom["GeometryItem"] = geometry
            record_geom["CamID"] = match_id
            query_geom = create_update_query(
                KITS_CONFIG.get("kits_table_geom"), record_geom, "CamID")

            record_web = {}
            record_web["WebType"] = 2
            record_web["WebID"] = match_id
            record_web["WebURL"] = "http://{}".format(record["VIDEOIP"])
            query_web = create_update_query(KITS_CONFIG.get("kits_table_web"),
                                            record_web, "WebID")

            insert_results = kitsutil.insert_multi_table(
                KITS_CREDENTIALS, [query_camera, query_geom, query_web])

    if data_cd["delete"]:

        # logger.info('delete: {}'.format( len(data_cd['delete']) ))

        for record in data_cd["delete"]:
            time.sleep(
                1
            )  #  connection will fail if queried are pushed too frequently
            # fetch camid field, which relates camera, geometry, and webconfig table records
            match_query = create_match_query(
                KITS_CONFIG.get("kits_table_camera"),
                "CAMID",
                "CAMNUMBER",
                record["CAMNUMBER"],
            )
            match_id = kitsutil.data_as_dict(KITS_CREDENTIALS, match_query)
            match_id = int(match_id[0]["CAMID"])

            query_camera = create_delete_query(
                KITS_CONFIG.get("kits_table_camera"), "CAMID", match_id)

            query_geo = create_delete_query(KITS_CONFIG.get("kits_table_geom"),
                                            "CamID", match_id)

            query_web = create_delete_query(KITS_CONFIG.get("kits_table_web"),
                                            "WebID", match_id)

            insert_results = kitsutil.insert_multi_table(
                KITS_CREDENTIALS, [query_camera, query_geo, query_web])

    # if data_cd['no_change']:
    # logger.info('no_change: {}'.format( len(data_cd['no_change']) ))

    # logger.info('END AT {}'.format( arrow.now().format() ))

    results = {"total": 0}

    for result in ["new", "change", "no_change", "delete"]:
        results["total"] += len(data_cd[result])
        results[result] = len(data_cd[result])

    return results.get("change")
예제 #16
0
 def pull(self):
     kn = knackpy.Knack(obj="object_1", app_id=self.id, api_key=self.secret)
     self.data = [kn.data, time.time()]