コード例 #1
1
def main():

    args = cli_args()

    app_name = args.app_name

    CONFIG = cfg["dms"]
    KNACK_CREDS = KNACK_CREDENTIALS[app_name]

    kits_query = """
        SELECT DMSID as KITS_ID
        ,Multistring as DMS_MESSAGE
        ,LastUpdated as MESSAGE_TIME
        FROM [KITS].[DMS_RealtimeData]
        """

    kits_data = kitsutil.data_as_dict(KITS_CREDENTIALS, kits_query)

    for record in kits_data:
        new_date = arrow.get(record["MESSAGE_TIME"])
        record["MESSAGE_TIME"] = new_date.timestamp * 1000

    kn = knackpy.Knack(
        scene=CONFIG["scene"],
        view=CONFIG["view"],
        ref_obj=CONFIG["ref_obj"],
        app_id=KNACK_CREDS["app_id"],
        api_key=KNACK_CREDS["api_key"],
    )

    #  hack to avoid ref_obj field meta replacing primary obj modified date
    #  this is a knackpy issue
    #  TODO: fix knackpy field meta handling
    kn.field_map[
        CONFIG["modified_date_field"]] = CONFIG["modified_date_field_id"]

    knack_data = kn.data

    if kits_data:
        new_data = datautil.merge_dicts(knack_data, kits_data, "KITS_ID",
                                        ["DMS_MESSAGE", "MESSAGE_TIME"])

    for record in new_data:
        #  remove DMS formatting artifacts
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[np]", "\n")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[nl]", " ")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt40o0]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt30o0]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[fo13]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[fo2]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[jl3]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt30]", "")

        record[CONFIG["modified_date_field"]] = datautil.local_timestamp()

    new_data = datautil.reduce_to_keys(
        new_data,
        ["id", "MESSAGE_TIME", "DMS_MESSAGE", CONFIG["modified_date_field"]])

    new_data = datautil.replace_keys(new_data, kn.field_map)

    count = 0

    for record in new_data:
        count += 1
        print("updating record {} of {}".format(count, len(new_data)))

        res = knackpy.record(
            record,
            obj_key=CONFIG["ref_obj"][0],
            app_id=KNACK_CREDS["app_id"],
            api_key=KNACK_CREDS["api_key"],
            method="update",
        )

    return len(new_data)
コード例 #2
0
def main():
    """Summary
    
    Returns:
        TYPE: Description
    """

    args = cli_args()
    app_name = args.app_name

    pgrest_records = get_postgre_records()
    knack_records = get_knack_pm_records(app_name)
    signals_records = get_signals_records(app_name)
    knack_technicians_records = get_technicians_records(app_name)

    last_run_date = get_last_run(args, knack_records)

    if args.replace:
        signal_results = replace_pm_records(
            pgrest_records,
            knack_records,
            signals_records,
            knack_technicians_records,
            app_name,
        )

    else:
        pm_payloads = prepare_pm_payloads(
            last_run_date,
            pgrest_records,
            signals_records,
            knack_records,
            knack_technicians_records,
        )

        if len(pm_payloads) == 0:

            return 0
        else:
            signal_payloads = prepare_signals_payloads(pm_payloads,
                                                       signals_records)

            pm_payloads = datautil.replace_keys(pm_payloads,
                                                knack_records.field_map)

            signals_payloads = datautil.replace_keys(signal_payloads,
                                                     signals_records.field_map)

            signal_results = update_signals_modified_time(
                signals_payloads, app_name)

            results = insert_pms(pm_payloads, app_name)

            results = len(results)

    return signal_results
コード例 #3
0
def main():

    args = cli_args()
    app_name = args.app_name

    creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        scene=config["scene"],
        view=config["view"],
        ref_obj=[config["obj"]],
        app_id=creds["app_id"],
        api_key=creds["api_key"],
    )

    calendar = get_calendar()

    kn.data = handle_records(kn.data, config["start_key"], config["end_key"],
                             config["elapsed_key"], calendar)

    if kn.data:
        kn.data = datautil.reduce_to_keys(kn.data, config["update_fields"])
        kn.data = datautil.replace_keys(kn.data, kn.field_map)

        for i, record in enumerate(kn.data):
            print("Update record {} of {}".format(i, len(kn.data)))
            update_record(record, config["obj"], creds)
    return len(kn.data)
コード例 #4
0
def main():
    """Summary
    
    Args:
        jobs (TYPE): Description
        **kwargs: Description
    
    Returns:
        TYPE: Descriptio
    """
    script_name = os.path.basename(__file__).replace(".py", "")

    # job_agol = jobutil.Job(
    #     name=f"{script_name}_agol",
    #     url=JOB_DB_API_URL,
    #     source="dropbox",
    #     destination="agol",
    #     auth=JOB_DB_API_TOKEN,
    # )

    # job_agol.start()

    # job_socrata = jobutil.Job(
    #     name=f"{script_name}_socrata",
    #     url=JOB_DB_API_URL,
    #     source="dropbox",
    #     destination="socrata",
    #     auth=JOB_DB_API_TOKEN,
    # )

    # job_socrata.start()

    data = get_data(dropbox_path, DROPBOX_BCYCLE_TOKEN)
    data = handle_data(data)
    data = datautil.upper_case_keys(data)

    data = datautil.replace_keys(data, {"STATUS": "KIOSK_STATUS"})

    layer = agolutil.get_item(auth=AGOL_CREDENTIALS, service_id=service_id)

    res = layer.manager.truncate()
    agolutil.handle_response(res)

    adds = agolutil.feature_collection(data)

    res = layer.edit_features(adds=adds)
    agolutil.handle_response(res)

    socratautil.Soda(
        auth=SOCRATA_CREDENTIALS,
        records=data,
        resource=socrata_resource_id,
        lat_field="latitude",
        lon_field="longitude",
        location_field="location",
        replace=True,
    )

    return len(data)
コード例 #5
0
def main():
    """Summary
    
    Args:
        job (TYPE): Description
        **kwargs: Description
    
    Returns:
        TYPE: Description
    """

    args = cli_args()

    device_type = args.device_type
    app_name = args.app_name

    primary_key = cfg[device_type]["primary_key"]
    status_field = cfg[device_type]["status_field"]
    status_filters = cfg[device_type]["status_filter_comm_status"]

    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        obj=cfg[device_type]["obj"],
        scene=cfg[device_type]["scene"],
        view=cfg[device_type]["view"],
        ref_obj=cfg[device_type]["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    kn_log = get_log_data(knack_creds)

    stats = defaultdict(int)

    stats["DEVICE_TYPE"] = device_type

    for device in kn.data:
        #  count stats only for devices that are TURNED_ON
        if device[status_field] in status_filters:
            status = device["IP_COMM_STATUS"]
            stats[status] += 1

    payload = build_payload(stats, args.device_type)
    payload = datautil.replace_keys([payload], kn_log.field_map)

    res = knackpy.record(
        payload[0],
        obj_key=LOG_OBJ,
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        method="create",
    )

    return len(payload)
コード例 #6
0
def main():

    args = cli_args()
    app_name = args.app_name

    CONFIG = cfg["task_orders"][app_name]

    KNACK_CREDS = KNACK_CREDENTIALS[app_name]

    html = get_html(TASK_ORDERS_ENDPOINT)
    data = handle_html(html)
    rows = handle_rows(data)

    kn = knackpy.Knack(
        scene=CONFIG["scene"],
        view=CONFIG["view"],
        ref_obj=CONFIG["ref_obj"],
        app_id=KNACK_CREDS["app_id"],
        api_key=KNACK_CREDS["api_key"],
    )

    rows = handle_bools(rows)

    new_rows = compare(rows, kn.data)

    payload = datautil.replace_keys(new_rows, kn.field_map)

    for record in payload:

        method = "update" if record.get("id") else "create"

        res = knackpy.record(
            record,
            obj_key=CONFIG["ref_obj"][0],
            app_id=KNACK_CREDS["app_id"],
            api_key=KNACK_CREDS["api_key"],
            method=method,
            timeout=20,
        )

    return len(new_rows)
コード例 #7
0
def main():

    args = cli_args(
        "finance_scraper.py",
        "Scrape financial codes from COA Controller website and upload to Knack application.",
    )

    config = CFG[args.resource]

    scraper = Scraper(config)

    kn = get_knack_data(args.app_name, config)

    new_records = compare(scraper.data, kn.data, config["primary_key"])

    new_records = datautil.replace_keys(new_records, kn.field_map)

    for record in new_records:

        create_record(record, args.app_name, config)

    return len(new_records)
コード例 #8
0
def main():

    args = cli_args()

    device_type = args.device_type
    app_name = args.app_name

    primary_key = cfg[device_type]["primary_key"]
    ip_field = cfg[device_type]["ip_field"]

    global timeout
    timeout = cfg[device_type].get("timeout")

    if not timeout:
        timeout = 3

    knack_creds = KNACK_CREDENTIALS[app_name]

    out_fields_upload = [
        "id",
        ip_field,
        "IP_COMM_STATUS",
        "COMM_STATUS_DATETIME_UTC",
        "MODIFIED_DATE",
        "MODIFIED_BY",
    ]

    #  get device data from Knack application
    kn = knackpy.Knack(
        obj=cfg[device_type]["obj"],
        scene=cfg[device_type]["scene"],
        view=cfg[device_type]["view"],
        ref_obj=cfg[device_type]["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    #  append config data to each item to be processed
    #  this is a hacky way to pass args to each thread
    for i in kn.data:
        i["ip_field"] = ip_field
        i["device_type"] = device_type

    pool = ThreadPool(8)

    results = pool.map(get_status, kn.data)

    for result in results:
        """
        Result is None if status has not changed. Otherwise result
        is device record dict
        """
        if result:
            #  format for upload to Knack
            result = [result]
            result = apply_modified_date(result)
            result = apply_modified_by(result)
            result = datautil.reduce_to_keys(result, out_fields_upload)
            result = datautil.replace_keys(result, kn.field_map)

            res = knackpy.record(
                result[0],
                obj_key=cfg[device_type]["ref_obj"][
                    0
                ],  #  assumes record object is included in config ref_obj and is the first elem in array,
                app_id=knack_creds["app_id"],
                api_key=knack_creds["api_key"],
                method="update",
            )

    # close the pool and wait for the work to finish
    pool.close()
    pool.join()

    return len([record for record in results if record])
コード例 #9
0
def main():

    args = cli_args()
    app_name = args.app_name
    last_run_date = args.last_run_date

    knack_creds = KNACK_CREDENTIALS[app_name]

    if not last_run_date:
        # replace dataset by setting the last run date to a long, long time ago
        # the arrow package needs a specific date and timeformat
        last_run_date = "1970-01-01"

    filters = knackutil.date_filter_on_or_after(
        last_run_date, config["modified_date_field_id"])
    """
    We include a filter in our API call to limit to records which have
    been modified on or after the date the last time this job ran
    successfully. The Knack API supports filter requests by date only
    (not time), so we must apply an additional filter on the data after
    we receive it.
    """
    kn = knackpy.Knack(
        scene=config["scene"],
        view=config["view"],
        ref_obj=config["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        filters=filters,
    )

    if kn.data:
        # Filter data for records that have been modifed after the last
        # job run (see comment above)
        last_run_timestamp = arrow.get(last_run_date).timestamp * 1000
        kn.data = filter_by_date(kn.data, config["modified_date_field"],
                                 last_run_timestamp)

    payload = []
    unmatched_segments = []

    if not kn.data:
        # logger.info('No records to update.')
        return 0

    for street_segment in kn.data:

        token = agolutil.get_token(AGOL_CREDENTIALS)
        features = agolutil.query_atx_street(
            street_segment[config["primary_key"]], token)

        if features.get("features"):
            if len(features["features"]) > 0:
                segment_data = features["features"][0]["attributes"]
            else:
                unmatched_segments.append(
                    street_segment[config["primary_key"]])
                continue
        else:
            unmatched_segments.append(street_segment[config["primary_key"]])
            continue

        #  we don't want to compare modified dates
        #  because we don't keep that value in sync with the source data on AGOL
        #  because we use our own modified date set in the data tracker
        segment_data.pop(config["modified_date_field"])
        street_segment.pop(config["modified_date_field"])

        #  compare new data (segment data) against old (street_segment)
        #  we only want to upload values that have changed
        if not are_equal(street_segment, segment_data):
            segment_data["id"] = street_segment["id"]
            segment_data[
                config["modified_date_field"]] = datautil.local_timestamp()
            payload.append(segment_data)

    payload = datautil.reduce_to_keys(payload, kn.fieldnames)
    payload = datautil.replace_keys(payload, kn.field_map)

    update_response = []
    count = 0

    for record in payload:
        count += 1

        print("updating record {} of {}".format(count, len(payload)))

        #  remove whitespace from janky Esri attributes
        for field in record:
            if type(record[field]) == str:
                record[field] = record[field].strip()

        res = knackpy.record(
            record,
            obj_key=config["ref_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

        update_response.append(res)

    if len(unmatched_segments) > 0:
        error_text = "Unmatched street segments: {}".format(", ".join(
            str(x) for x in unmatched_segments))

        raise Exception(error_text)

    return count
コード例 #10
0
def main():

    args = cli_args()

    app_name = args.app_name

    eval_type = args.eval_type

    obj = cfg["eval_types"][eval_type]

    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        obj=cfg["eval_types"][eval_type],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    data = datautil.filter_by_val(kn.data, cfg["status_key"],
                                  cfg["status_vals"])

    #  new records will not have a score key. add it here.
    data = datautil.add_missing_keys(data, {cfg["score_key"]: 0})

    #  create a ranking month_year field
    data = datautil.concat_key_values(data, cfg["concat_keys"],
                                      cfg["group_key"], "_")

    knack_data_exclude = [
        record for record in data if record["EXCLUDE_FROM_RANKING"] == True
    ]
    knack_data_include = [
        record for record in data if record["EXCLUDE_FROM_RANKING"] == False
    ]

    #  create list of scores grouped by group key
    score_dict = {}

    for row in knack_data_include:
        key = row[cfg["group_key"]]
        score = int(row[cfg["score_key"]])

        if key not in score_dict:
            score_dict[key] = []

        score_dict[key].append(score)

    for key in score_dict:
        score_dict[key].sort()
        score_dict[key].reverse()

    #  get score rank and append record to payload
    payload = []

    for record in knack_data_include:
        score = int(record[cfg["score_key"]])
        key = record[cfg["group_key"]]
        rank = (datautil.min_index(score_dict[key], score) + 1
                )  #  add one because list indices start at 0

        if cfg["rank_key"] in record:
            if record[cfg["rank_key"]] != rank:
                record[cfg["rank_key"]] = rank
                record[cfg["modified_date_key"]] = datautil.local_timestamp()
                payload.append(record)

        else:
            record[cfg["rank_key"]] = rank

    #  assign null ranks to records flagged as exclude from ranking
    for record in knack_data_exclude:

        if cfg["rank_key"] in record:
            #  update excluded records if rank found
            if record[cfg["rank_key"]] != "":
                record[cfg["rank_key"]] = ""
                record[cfg["modified_date_key"]] = datautil.local_timestamp()
                payload.append(record)

    if payload:
        payload = datautil.reduce_to_keys(
            payload, [cfg["rank_key"], "id", cfg["modified_date_key"]])

        payload = datautil.replace_keys(payload, kn.field_map)

        update_response = []

        count = 0
        for record in payload:
            count += 1

            print("Updating record {} of {}".format(count, len(payload)))

            res = knackpy.record(
                record,
                obj_key=obj,
                app_id=knack_creds["app_id"],
                api_key=knack_creds["api_key"],
                method="update",
            )

            update_response.append(res)

        return len(payload)

    else:
        return 0
コード例 #11
0
def main():
    args = cli_args()

    app_name = args.app_name

    records_processed = 0

    record_types = ["objects", "fields"]

    app_data = get_app_data(KNACK_CREDENTIALS[app_name]["app_id"])

    for record_type in record_types:

        data_new = app_data["objects"]

        if record_type == "fields":
            #  we get latest object data within this for loop
            #  because it may change when objects are processed
            data_existing_objects = get_existing_data(
                cfg["objects"]["obj"],
                KNACK_CREDENTIALS[app_name]["app_id"],
                KNACK_CREDENTIALS[app_name]["api_key"],
            )

            obj_row_id_lookup = get_object_row_ids(
                data_existing_objects.data_raw, cfg["objects"]["id_field_key"])

            data_new = parse_fields(
                data_new, cfg[record_type]["object_connection_field"],
                obj_row_id_lookup)

        data_existing = get_existing_data(
            cfg[record_type]["obj"],
            KNACK_CREDENTIALS[app_name]["app_id"],
            KNACK_CREDENTIALS[app_name]["api_key"],
        )

        payload = evaluate_ids(data_new, data_existing.data_raw,
                               cfg[record_type]["id_field_key"])

        for method in payload.keys():

            if record_type == "fields":

                data_existing.data_raw = format_connections(
                    data_existing.data_raw,
                    cfg[record_type]["object_connection_field"])

            payload[method] = convert_bools_nones_arrays(payload[method])
            data_existing.data_raw = convert_bools_nones_arrays(
                data_existing.data_raw)

            payload[method] = datautil.stringify_key_values(
                payload[method], cfg[record_type]["stringify_keys"])

            payload[method] = datautil.replace_keys(payload[method],
                                                    data_existing.field_map)

            if method == "update":
                # verify if data has changed
                changed = []

                for rec_new in payload[method]:
                    rec_old = [
                        record for record in data_existing.data_raw
                        if record["id"] == rec_new["id"]
                    ][0]

                    # format connection fields

                    # identify fields whose contents don't match
                    diff = [
                        k for k in rec_new.keys() if rec_old[k] != rec_new[k]
                    ]

                    if diff:
                        changed.append(rec_new)
                    else:
                        continue

                payload[method] = changed

            update_records(payload[method], cfg[record_type]["obj"], method,
                           app_name)

        records_processed += sum([
            len(payload["create"]),
            len(payload["update"]),
            len(payload["delete"])
        ])

    return records_processed
コード例 #12
0
def main():

    args = cli_args()

    app_name = args.app_name

    update_fields = [
        field for layer in cfg["layers"] for field in layer["updateFields"]
    ]

    kn = knackpy.Knack(
        obj=cfg["obj"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
        filters=cfg["filters"],
        timeout=30,
    )

    unmatched_locations = []

    if not kn.data:
        return 0
    """
    Remove "update fields" from record. these are re-appended via
    spatial lookup and thus the fieldnames must match those of the source
    dataset or be mapped in the field map config dict.
    """
    keep_fields = [
        field for field in kn.fieldnames if field not in update_fields
    ]
    kn.data = datautil.reduce_to_keys(kn.data, keep_fields)

    for location in kn.data:

        point = [location["LOCATION_longitude"], location["LOCATION_latitude"]]

        for layer in cfg["layers"]:
            layer["geometry"] = point
            field_map = cfg["field_maps"].get(layer["service_name"])
            params = get_params(layer)

            try:
                res = agolutil.point_in_poly(layer["service_name"],
                                             layer["layer_id"], params)

                if res.get("error"):
                    raise Exception(str(res))

                if res.get("features"):
                    location = join_features_to_record(res["features"], layer,
                                                       location)

                    if field_map:
                        location = map_fields(location, field_map)

                    continue

                if "service_name_secondary" in layer:
                    res = agolutil.point_in_poly(
                        layer["service_name_secondary"], layer["layer_id"],
                        params)

                    if len(res["features"]) > 0:
                        location = join_features_to_record(
                            res["features"], layer, location)

                        if field_map:
                            location = map_fields(location, field_map)
                            continue

                #  no intersecting features found
                for field in layer["updateFields"]:
                    """
                    set corresponding fields on location record to null to
                    overwrite any existing data
                    """
                    location[field] = ""

                continue

            except Exception as e:
                unmatched_locations.append(location)
                continue

        location["UPDATE_PROCESSED"] = True

        location["MODIFIED_DATE"] = datautil.local_timestamp()

        location = datautil.reduce_to_keys(
            [location],
            update_fields + ["id", "UPDATE_PROCESSED", "MODIFIED_DATE"])
        location = datautil.replace_keys(location, kn.field_map)

        res = knackpy.record(
            location[0],
            obj_key=cfg["obj"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            method="update",
        )

    if len(unmatched_locations) > 0:
        error_text = "Location Point/Poly Match Failure(s): {}".format(
            ", ".join(str(x) for x in unmatched_locations))
        raise Exception(error_text)

    else:
        return len(kn.data)
コード例 #13
0
def main():

    args = cli_args()
    app_name = args.app_name

    knack_creds = KNACK_CREDENTIALS[app_name]

    knack_data_pm = knackpy.Knack(
        view=cfg["params_pm"]["view"],
        scene=cfg["params_pm"]["scene"],
        ref_obj=cfg["params_pm"]["field_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    data_pm = []

    if not knack_data_pm.data:
        return 0

    for pm in knack_data_pm.data:
        #  verify there is data that needs to be processed
        #  the source view is also filtered by these conditions
        #  so this is a redundant check, to be safe
        if (not pm["COPIED_TO_SECONDARY"] and pm["PM_STATUS"] == "COMPLETED"
                and int(pm["SECONDARY_SIGNALS_COUNT"]) > 0):

            data_pm.append(pm)

    if not data_pm:
        return 0

    #  get signal data
    #  TODO: filter for signals and their secondaries based on PM data
    #  this would reduce size of request
    knack_data_signals = knackpy.Knack(
        view=cfg["params_signal"]["view"],
        scene=cfg["params_signal"]["scene"],
        ref_obj=cfg["params_signal"]["field_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    primary_signals_with_children = get_prim_signals(knack_data_signals.data)

    pm_payload_insert = []
    pm_payload_update = []
    signals_update = []

    for pm in data_pm:
        """
        Check all preventative maintenance records at signals with secondary signals
        Copy pm record to secondary signal if needed
        """
        if "SIGNAL" in pm:

            primary_signal_id = pm["SIGNAL"][0]["id"]

            if primary_signal_id in primary_signals_with_children:
                #  update original pm record with copied to secondary = True
                pm_payload_update.append({
                    "id": pm["id"],
                    "COPIED_TO_SECONDARY": True
                })

                for secondary in primary_signals_with_children[
                        primary_signal_id]:
                    #  create new pm record for secondary signal(s)
                    new_record = copy_pm_record(secondary["id"], pm,
                                                cfg["copy_fields"])

                    signals_update.append({"id": secondary["id"]})
                    pm_payload_insert.append(new_record)

    # update modified date of secondary signals which have a new PM
    signals_payload_update = apply_modified_date(signals_update)

    signals_payload_update = datautil.replace_keys(
        signals_payload_update, knack_data_signals.field_map)

    pm_payload_update = datautil.replace_keys(pm_payload_update,
                                              knack_data_pm.field_map)

    pm_payload_insert = datautil.replace_keys(pm_payload_insert,
                                              knack_data_pm.field_map)

    for record in signals_payload_update:
        res = knackpy.record(
            record,
            obj_key=cfg["params_signal"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    for record in pm_payload_update:
        res = knackpy.record(
            record,
            obj_key=cfg["params_pm"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    for record in pm_payload_insert:
        res = knackpy.record(
            record,
            obj_key=cfg["params_pm"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="create",
        )

    return len(pm_payload_insert) + len(pm_payload_update) + len(
        signals_payload_update)
コード例 #14
0
def main():

    app_name = "finance_admin_prod"  #TODO: add to argutil

    if "finance" not in app_name:
        raise Exception(
            'Unsupported application specified. Must be finance_admin_prod or finance_admin_test.'
        )

    knack_creds = KNACK_CREDENTIALS[app_name]
    '''
    We start by making a "free" call to the API endpoint to check for records.
    This calls an endpoint that is not behind login, and we do not provide a
    reference object, which avoivds making a call for field data.

    This way we do not accure API usage when checking for records to process.
    '''
    free_creds = {'app_id': knack_creds['app_id'], 'api_key': None}

    free_cfg = dict(cfg["purchase_requests"])

    free_cfg.pop('ref_obj')

    free_prs = knackpy_wrapper(
        free_cfg,
        knack_creds,
        raw_connections=True,
    )

    if not free_prs.data_raw:
        return 0
    '''
    There is data to be processed, so make a standard request for the record
    and field data.
    '''
    prs = knackpy_wrapper(
        cfg["purchase_requests"],
        knack_creds,
        raw_connections=True,
    )

    for record in prs.data:
        # this grabs the aut increment field value, which is then droppped
        pr_filter_id = record.get(
            cfg["purchase_requests"]['unique_id_field_name'])

        old_record_id = record.pop('id')

        record = handle_fields(record, prs.fields, prs.field_map)

        record = datautil.replace_keys([record], prs.field_map)[0]

        record = assign_requester(cfg["purchase_requests"], record)

        #  Set the "copy" field to No
        record[cfg["purchase_requests"]['copy_field_id']] = False

        copied_record = knackpy.record(
            record,
            obj_key=cfg["purchase_requests"]['ref_obj'][0],
            app_id=knack_creds['app_id'],
            api_key=knack_creds['api_key'],
            method='create')

        #  update the older record with need_to_copy=false
        old_record_payload = {
            'id': old_record_id,
            cfg["purchase_requests"]['copy_field_id']: False
        }

        old_record_update = knackpy.record(
            old_record_payload,
            obj_key=cfg["purchase_requests"]['ref_obj'][0],
            app_id=knack_creds['app_id'],
            api_key=knack_creds['api_key'],
            method='update')

        # fetch item records related to the copied purchase request, and copy
        # them to the new purchase request
        item_filter = get_filter(cfg["items"]['pr_field_id'], pr_filter_id)
        items = get_items(cfg["items"], item_filter, knack_creds)

        for item in items.data:
            item = handle_fields(item, items.fields, items.field_map)

            # set item connection to copied purchase request record
            item[cfg["items"]['pr_connection_field_name']] = [
                copied_record['id']
            ]

            item.pop('id')

            item = datautil.replace_keys([item], items.field_map)[0]

            new_item = knackpy.record(item,
                                      obj_key=cfg["items"]['obj'],
                                      app_id=knack_creds['app_id'],
                                      api_key=knack_creds['api_key'],
                                      method='create')

    return len(records)
コード例 #15
0
def main():
    """Summary
    
    Parameters
    ----------
    None
    
    Returns
    -------
    count_sig
        number of signals that has been updated
    """
    args = cli_args()
    app_name = args.app_name

    api_key = KNACK_CREDENTIALS[app_name]["api_key"]
    app_id = KNACK_CREDENTIALS[app_name]["app_id"]

    detectors = knackpy.Knack(
        scene=cfg["CONFIG_DETECTORS"]["scene"],
        view=cfg["CONFIG_DETECTORS"]["view"],
        ref_obj=cfg["CONFIG_DETECTORS"]["objects"],
        api_key=api_key,
        app_id=app_id,
        timeout=30,
    )

    signals = knackpy.Knack(
        scene=cfg["CONFIG_SIGNALS"]["scene"],
        view=cfg["CONFIG_SIGNALS"]["view"],
        ref_obj=cfg["CONFIG_SIGNALS"]["objects"],
        api_key=api_key,
        app_id=app_id,
        timeout=30,
    )

    signals.data = datautil.filter_by_key_exists(signals.data, "SIGNAL_STATUS")
    signals.data = datautil.filter_by_val(signals.data, "SIGNAL_STATUS",
                                          ["TURNED_ON"])

    lookup = groupBySignal(detectors.data)

    count_sig = 0
    count_status = 0

    for sig in signals.data:

        old_status = None
        new_status = getStatus(sig, lookup)
        new_status_date = getMaxDate(sig, lookup)

        if cfg["SIG_STATUS_LABEL"] in sig:
            old_status = sig[cfg["SIG_STATUS_LABEL"]]

            if old_status == new_status:
                continue

        payload_signals = {
            "id": sig["id"],
            cfg["SIG_STATUS_LABEL"]: new_status,
            cfg["SIG_DATE_LABEL"]: getMaxDate(sig, lookup),
        }

        payload_signals = datautil.replace_keys([payload_signals],
                                                signals.field_map)

        #  update signal record with detection status and date
        res = knackpy.record(
            payload_signals[0],
            obj_key=cfg["CONFIG_SIGNALS"]["objects"][0],
            app_id=app_id,
            api_key=api_key,
            method="update",
        )

        count_sig += 1

    return count_sig
コード例 #16
0
def replace_pm_records(postgre_records, knack_pm_records, signal_records,
                       knack_technicians, app_name):
    """Summary
    
    Args:
        postgre_records (TYPE): Description
        knack_pm_records (TYPE): Description
        signal_records (TYPE): Description
    
    Returns:
        TYPE: Description
    """
    postgre_records_df = pd.DataFrame.from_dict(postgre_records)
    knack_pm_records_df = pd.DataFrame.from_dict(knack_pm_records.data)

    pm_insert_payloads = postgre_records_df[
        ~postgre_records_df["fulcrum_id"].
        isin(knack_pm_records_df["FULCRUM_ID"])].copy()

    pm_update_payloads = postgre_records_df[
        postgre_records_df["fulcrum_id"].isin(
            knack_pm_records_df["FULCRUM_ID"])].copy()

    pm_insert_payloads["MODIFIED_DATE"] = datautil.local_timestamp()
    pm_update_payloads["MODIFIED_DATE"] = datautil.local_timestamp()

    pm_insert_payloads = map_knack_id_signal_id(signal_records,
                                                pm_insert_payloads)
    pm_update_payloads = map_knack_id_signal_id(signal_records,
                                                pm_update_payloads)

    knack_pm_records_id_df = knack_pm_records_df[["FULCRUM_ID", "id"]]
    pm_update_payloads = pm_update_payloads.merge(
        right=knack_pm_records_id_df,
        left_on="fulcrum_id",
        right_on="FULCRUM_ID",
        how="left",
    )

    pm_insert_payloads["PM_STATUS"] = "COMPLETED"
    pm_update_payloads["PM_STATUS"] = "COMPLETED"

    pm_insert_payloads.columns = map(str.upper, pm_insert_payloads.columns)
    pm_update_payloads.columns = map(str.upper, pm_update_payloads.columns)

    pm_update_payloads = pm_update_payloads.rename(columns={"ID": "id"})

    pm_insert_payloads = pm_insert_payloads.to_dict(orient="records")
    pm_update_payloads = pm_update_payloads.to_dict(orient="records")

    if len(pm_insert_payloads) != 0:
        pm_insert_payloads = map_technicians_id_pm_payloads(
            pm_insert_payloads, knack_technicians)

    pm_update_payloads = map_technicians_id_pm_payloads(
        pm_update_payloads, knack_technicians)

    # update signal modified time in replace method

    pm_replace_payloads_shallow = pm_update_payloads + pm_insert_payloads
    pm_replace_payloads = copy.deepcopy(pm_replace_payloads_shallow)

    for d in pm_replace_payloads:
        if "id" in d:
            del d["id"]

    signal_payloads = prepare_signals_payloads(pm_replace_payloads,
                                               signal_records)
    signals_payloads = datautil.replace_keys(signal_payloads,
                                             signal_records.field_map)
    signal_results = update_signals_modified_time(signals_payloads, app_name)

    # end update signal modified time in replace method

    pm_insert_payloads = datautil.replace_keys(pm_insert_payloads,
                                               knack_pm_records.field_map)

    pm_update_payloads = datautil.replace_keys(pm_update_payloads,
                                               knack_pm_records.field_map)

    for payload in pm_insert_payloads:
        print("inserting", payload)

        insert_res = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="create",
        )

    for payload in pm_update_payloads:
        print("updating", payload)

        update_res = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="update",
        )

    return len(pm_insert_payloads) + len(pm_update_payloads)