def main():

    args = cli_args()

    app_name = args.app_name

    CONFIG = cfg["dms"]
    KNACK_CREDS = KNACK_CREDENTIALS[app_name]

    kits_query = """
        SELECT DMSID as KITS_ID
        ,Multistring as DMS_MESSAGE
        ,LastUpdated as MESSAGE_TIME
        FROM [KITS].[DMS_RealtimeData]
        """

    kits_data = kitsutil.data_as_dict(KITS_CREDENTIALS, kits_query)

    for record in kits_data:
        new_date = arrow.get(record["MESSAGE_TIME"])
        record["MESSAGE_TIME"] = new_date.timestamp * 1000

    kn = knackpy.Knack(
        scene=CONFIG["scene"],
        view=CONFIG["view"],
        ref_obj=CONFIG["ref_obj"],
        app_id=KNACK_CREDS["app_id"],
        api_key=KNACK_CREDS["api_key"],
    )

    #  hack to avoid ref_obj field meta replacing primary obj modified date
    #  this is a knackpy issue
    #  TODO: fix knackpy field meta handling
    kn.field_map[
        CONFIG["modified_date_field"]] = CONFIG["modified_date_field_id"]

    knack_data = kn.data

    if kits_data:
        new_data = datautil.merge_dicts(knack_data, kits_data, "KITS_ID",
                                        ["DMS_MESSAGE", "MESSAGE_TIME"])

    for record in new_data:
        #  remove DMS formatting artifacts
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[np]", "\n")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[nl]", " ")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt40o0]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt30o0]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[fo13]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[fo2]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[jl3]", "")
        record["DMS_MESSAGE"] = record["DMS_MESSAGE"].replace("[pt30]", "")

        record[CONFIG["modified_date_field"]] = datautil.local_timestamp()

    new_data = datautil.reduce_to_keys(
        new_data,
        ["id", "MESSAGE_TIME", "DMS_MESSAGE", CONFIG["modified_date_field"]])

    new_data = datautil.replace_keys(new_data, kn.field_map)

    count = 0

    for record in new_data:
        count += 1
        print("updating record {} of {}".format(count, len(new_data)))

        res = knackpy.record(
            record,
            obj_key=CONFIG["ref_obj"][0],
            app_id=KNACK_CREDS["app_id"],
            api_key=KNACK_CREDS["api_key"],
            method="update",
        )

    return len(new_data)
def insert_pms(payloads, app_name):
    """Summary
    
    Args:
        payloads (TYPE): Description
    
    Returns:
        TYPE: Description
    """

    responses_list = []

    for payload in payloads:
        print("inserting", payload)

        response = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="create",
        )

        responses_list.append(response)

    return responses_list
def main():

    args = cli_args()
    app_name = args.app_name

    CONFIG = cfg["task_orders"]
    KNACK_CREDS = KNACK_CREDENTIALS[app_name]

    html = get_html(TASK_ORDERS_ENDPOINT)
    data = handle_html(html)
    rows = handle_rows(data)

    kn = knackpy.Knack(
        scene=CONFIG["scene"],
        view=CONFIG["view"],
        ref_obj=CONFIG["ref_obj"],
        app_id=KNACK_CREDS["app_id"],
        api_key=KNACK_CREDS["api_key"],
    )

    new_rows = compare(rows, kn.data)

    new_rows = datautil.replace_keys(new_rows, kn.field_map)

    for record in new_rows:

        res = knackpy.record(
            record,
            obj_key=CONFIG["ref_obj"][0],
            app_id=KNACK_CREDS["app_id"],
            api_key=KNACK_CREDS["api_key"],
            method="create",
        )

    return len(new_rows)
def update_signals_modified_time(signals_payloads, app_name):
    """Summary
    
    Args:
        signals_payloads (TYPE): Description
    
    Returns:
        TYPE: Description
    """
    responses_list = []

    responses_list = []

    for signal_payload in signals_payloads:

        response = knackpy.record(
            signal_payload,
            obj_key="object_12",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="update",
        )

        responses_list.append(response)

    return responses_list
示例#5
0
def main(app_id):

    auth = KNACK_CREDENTIALS[app_id]

    work_orders = knackpy_wrapper(cfg["work_orders"],
                                  auth,
                                  raw_connections=True)

    if not work_orders.data:
        return 200, "no data to process"

    for wo in work_orders.data_raw:
        payload = {}

        sr_id = wo.get(cfg["work_orders"]["sr_record_id_field"])[0].get("id")

        payload["id"] = wo.get("id")

        payload[cfg["work_orders"]
                ["flex_notes_url_field_id"]] = flex_notes_url(sr_id)

        print(sr_id)

        res = knackpy.record(
            payload,
            obj_key=cfg["work_orders"]["ref_obj"][0],
            app_id=auth["app_id"],
            api_key=auth["api_key"],
            method="update",
            timeout=50,
        )

    return 200, f"{len(payload)} records processed."
def update_records(payload, obj, method, app_name):
    """
    CRUD for Knack
    
    Args:
        payload (TYPE): Description
        obj (TYPE): Description
        method (TYPE): Description
        app_name (TYPE): Description
    
    Returns:
        TYPE: Description
    """
    results = []

    for record in payload:

        res = knackpy.record(
            record,
            obj_key=obj,
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            method=method,
        )

        results.append(res)

    return results
def update_record(payload, auth, obj):
    return knackpy.record(
        payload,
        obj_key=obj,
        app_id=auth["app_id"],
        api_key=auth["api_key"],
        method="update",
    )
示例#8
0
def create_record(record, app_name, config):

    return knackpy.record(
        record,
        obj_key=config["ref_obj"][0],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
        method="create",
    )
示例#9
0
def main():
    """Summary
    
    Args:
        job (TYPE): Description
        **kwargs: Description
    
    Returns:
        TYPE: Description
    """

    args = cli_args()

    device_type = args.device_type
    app_name = args.app_name

    primary_key = cfg[device_type]["primary_key"]
    status_field = cfg[device_type]["status_field"]
    status_filters = cfg[device_type]["status_filter_comm_status"]

    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        obj=cfg[device_type]["obj"],
        scene=cfg[device_type]["scene"],
        view=cfg[device_type]["view"],
        ref_obj=cfg[device_type]["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    kn_log = get_log_data(knack_creds)

    stats = defaultdict(int)

    stats["DEVICE_TYPE"] = device_type

    for device in kn.data:
        #  count stats only for devices that are TURNED_ON
        if device[status_field] in status_filters:
            status = device["IP_COMM_STATUS"]
            stats[status] += 1

    payload = build_payload(stats, args.device_type)
    payload = datautil.replace_keys([payload], kn_log.field_map)

    res = knackpy.record(
        payload[0],
        obj_key=LOG_OBJ,
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        method="create",
    )

    return len(payload)
def main():
    set_workdir()

    args = cli_args()

    app_name = args.app_name

    knack_creds = KNACK_CREDENTIALS[app_name]

    cfg = CONFIG["tmc_activities"]

    base_path = os.path.abspath(ESB_XML_DIRECTORY)
    inpath = "{}/{}".format(base_path, "ready_to_send")
    outpath = "{}/{}".format(base_path, "sent")

    if not os.path.exists(inpath):
        os.makedirs(inpath)

    if not os.path.exists(outpath):
        os.makedirs(outpath)

    directory = os.fsencode(inpath)
    """
    Get files in order by incremental ID. This ensures messages
    are transmitted chronologically.
    """
    files = get_sorted_file_list(inpath)

    for filename in files:
        """
        Extract record id, send message to ESB, move file to 'sent' folder,
        and update Knack record with status of SENT.
        """
        record_id = get_record_id_from_file(inpath, filename)

        msg = get_msg(inpath, filename)

        res = send_msg(msg, ESB_ENDPOINT["prod"], cfg["path_cert"], cfg["path_key"])

        res.raise_for_status()

        payload = create_payload(record_id, cfg["esb_status_field"])

        res = knackpy.record(
            payload,
            obj_key=cfg["obj"],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

        move_file(inpath, outpath, filename)

    return len(files)
示例#11
0
    def send(self):
        """
        Send the record payload to the dest app.
        """
        obj = self.knack_cfg.get(self.app_name_dest).get("object")
        app_id = self.app_id_dest
        api_key = KNACK_CREDENTIALS[app_id]["api_key"]
        method = self.method

        res = knackpy.record(self.payload,
                             obj_key=obj,
                             app_id=app_id,
                             api_key=api_key,
                             method=method)

        return res
示例#12
0
def update_record(record, obj_key, creds):
    """Summary
    
    Args:
        record (TYPE): Description
        obj_key (TYPE): Description
        creds (TYPE): Description
    
    Returns:
        TYPE: Description
    """
    res = knackpy.record(
        record,
        obj_key=obj_key,
        app_id=creds["app_id"],
        api_key=creds["api_key"],
        method="update",
    )

    return res
示例#13
0
def main():

    args = cli_args()

    app_name = args.app_name

    # look up the corresponding configuration based on app name
    # with this pattern, "data_tracker_prod" and "data_tracker_test"
    # returns the same config, which is what we want
    for cfg_name in SR_DUE_DATE.keys():
        if cfg_name in app_name:
            cfg = SR_DUE_DATE[cfg_name]

    srs = knackpy.Knack(
        view=cfg["issues"]["view"],
        scene=cfg["issues"]["scene"],
        ref_obj=cfg["issues"]["ref_obj"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
    )

    count = 0

    if not srs.data:
        return 0

    for sr in srs.data_raw:

        filters = sr_filter(
            sr[cfg["issues"]["sr_field_id"]],
            cfg["flex_notes"]["flex_question_code_field_id"],
            cfg["flex_notes"]["sr_id_field"],
        )

        flex_note = knackpy.Knack(
            view=cfg["flex_notes"]["view"],
            scene=cfg["flex_notes"]["scene"],
            ref_obj=cfg["flex_notes"]["ref_obj"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            filters=filters,
            page_limit=1,  # limit records, to be safe (there are lots)
            rows_per_page=10,
        )

        if not flex_note.data:
            continue
        """
        Always take the first due date in the list. there are occasionally duplicate
        due date flex records for one SR. We don't know why.
        """
        due_date = get_due_date(flex_note.data[0]["FLEX_ATTRIBUTE_VALUE"])

        record = {cfg["issues"]["due_date_field_id"]: due_date, "id": sr["id"]}

        res = knackpy.record(
            record,
            obj_key=cfg["issues"]["ref_obj"][0],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            method="update",
        )

        count += 1

    return count
def main():

    args = cli_args()
    app_name = args.app_name
    last_run_date = args.last_run_date

    knack_creds = KNACK_CREDENTIALS[app_name]

    if not last_run_date:
        # replace dataset by setting the last run date to a long, long time ago
        # the arrow package needs a specific date and timeformat
        last_run_date = "1970-01-01"

    filters = knackutil.date_filter_on_or_after(
        last_run_date, config["modified_date_field_id"])
    """
    We include a filter in our API call to limit to records which have
    been modified on or after the date the last time this job ran
    successfully. The Knack API supports filter requests by date only
    (not time), so we must apply an additional filter on the data after
    we receive it.
    """
    kn = knackpy.Knack(
        scene=config["scene"],
        view=config["view"],
        ref_obj=config["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        filters=filters,
    )

    if kn.data:
        # Filter data for records that have been modifed after the last
        # job run (see comment above)
        last_run_timestamp = arrow.get(last_run_date).timestamp * 1000
        kn.data = filter_by_date(kn.data, config["modified_date_field"],
                                 last_run_timestamp)

    payload = []
    unmatched_segments = []

    if not kn.data:
        # logger.info('No records to update.')
        return 0

    for street_segment in kn.data:

        token = agolutil.get_token(AGOL_CREDENTIALS)
        features = agolutil.query_atx_street(
            street_segment[config["primary_key"]], token)

        if features.get("features"):
            if len(features["features"]) > 0:
                segment_data = features["features"][0]["attributes"]
            else:
                unmatched_segments.append(
                    street_segment[config["primary_key"]])
                continue
        else:
            unmatched_segments.append(street_segment[config["primary_key"]])
            continue

        #  we don't want to compare modified dates
        #  because we don't keep that value in sync with the source data on AGOL
        #  because we use our own modified date set in the data tracker
        segment_data.pop(config["modified_date_field"])
        street_segment.pop(config["modified_date_field"])

        #  compare new data (segment data) against old (street_segment)
        #  we only want to upload values that have changed
        if not are_equal(street_segment, segment_data):
            segment_data["id"] = street_segment["id"]
            segment_data[
                config["modified_date_field"]] = datautil.local_timestamp()
            payload.append(segment_data)

    payload = datautil.reduce_to_keys(payload, kn.fieldnames)
    payload = datautil.replace_keys(payload, kn.field_map)

    update_response = []
    count = 0

    for record in payload:
        count += 1

        print("updating record {} of {}".format(count, len(payload)))

        #  remove whitespace from janky Esri attributes
        for field in record:
            if type(record[field]) == str:
                record[field] = record[field].strip()

        res = knackpy.record(
            record,
            obj_key=config["ref_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

        update_response.append(res)

    if len(unmatched_segments) > 0:
        error_text = "Unmatched street segments: {}".format(", ".join(
            str(x) for x in unmatched_segments))

        raise Exception(error_text)

    return count
示例#15
0
def profile():
    kn = Knack(obj='object_17',
               app_id='59ca76c0e4c83424df3eee62',
               api_key='a0998110-a889-11e7-9b23-7d5afe966012 ')
    info = oidc.user_getinfo(["sub", "name", "email"])
    if request.method == 'POST':
        if 'file' not in request.files:
            print('No file attached in request')
            return redirect(request.url)
        knack_csv = pd.read_csv(request.files.get('file'))
        knack_db = pd.DataFrame(kn.data)
        knack_db.drop('Email', axis=1, inplace=True)
        knack_db['LinkedIn profile'] = knack_db['LinkedIn profile_url']
        #rename knack data to fit the new data
        knack_db.rename(columns={
            'Participant Name_first': 'First',
            'Participant Name_last': 'Last',
            'Address_city': 'City',
            'Email_email': 'Email',
            'Address_street': 'street',
            'Address_country': 'Country',
            'Address_state': 'State',
            'Address_zip': 'Zip'
        },
                        inplace=True)
        knack_db.id = ''
        knack_db.drop([
            'Address_latitude',
            'Address_longitude',
            'Address',
            'Participant Name_middle',
            'Participant Name_title',
        ],
                      axis=1,
                      inplace=True)

        knack_db = knack_db[knack_csv.columns.tolist()]
        knack_db['Current Time Equation'] = ''
        knack_db['Date added'] = ''
        knack_db['Last updated'] = ''
        knack_db.Phone = knack_db.Phone.apply(pd.Series).iloc[::, 3:4]

        for col in knack_csv.columns.tolist():
            knack_csv[col] = knack_csv[col].astype('object')

        for col in knack_csv.columns.tolist():
            knack_csv[col] = knack_csv[col].astype('object')

        list_of_columns = [
            'First', 'Last', 'Company is sponsor user', 'SU', 'Company name',
            'street', 'Zip', 'City', 'State', 'Country', 'Time zone',
            'Recruiting source', 'Phone', 'Email', 'LinkedIn profile',
            'Total compensation this year', 'Personas', 'Date added',
            'Last updated', 'Updated by',
            'Wants to participate in future activities?', 'Age range',
            'Years in current role', 'Years in current industry', 'Job duties',
            'Time Zone Selector', 'Tome Zone Hours', 'Current Time Equation',
            'Business Model', 'Company size', 'Company Revenue', 'Team size',
            'Industry', 'Job title', 'Role/Responsibilities',
            'WCE Products used', 'UserTesting ID', 'id'
        ]

        knack_csv = knack_csv[list_of_columns]
        knack_csv = knack_csv.apply(lambda x: x.astype('str'))
        knack_csv = knack_csv.replace('nan', '', regex=True)
        knack_csv['Company size'] = knack_csv['Company size'].str.split(
            '.', expand=True)[0]

        knack_db = knack_db[list_of_columns]
        knack_db = knack_db.apply(lambda x: x.astype('str'))
        knack_db = knack_db.replace('nan', '', regex=True)

        knack_csv = knack_csv.apply(lambda x: x.str.title())
        knack_db = knack_db.apply(lambda x: x.str.title())

        knack_csv = knack_csv.astype(knack_db.dtypes.to_dict())
        df_knack = knack_csv.merge(knack_db, how='left', indicator=True)
        received = df_knack.shape[0]
        df_knack = df_knack[df_knack['_merge'] == 'left_only']
        df_knack.drop('_merge', axis=1, inplace=True)

        df_knack.columns = cols.values()
        # Ectract other non objects values in their dataframe
        df_knack_sub1 = df_knack.iloc[::, 2:5]
        df_knack_sub2 = df_knack.iloc[::, 10:38]
        df_knack_sub = pd.concat([df_knack_sub1, df_knack_sub2], axis=1)

        # convert data into dict
        df_knack_sub_dict = df_knack_sub.to_dict(orient='records')

        # only retrieve those with values
        dict_with_values = []
        for dict_data in df_knack_sub_dict:
            dic = {i: j for i, j in dict_data.items() if j != 'N/A'}
            dict_with_values.append(dic)

        # lets get the name objects
        bio_object = df_knack.iloc[::, 0:2]
        bio_object_dict = bio_object.to_dict(orient='records')

        #lets get address objects
        address_object = df_knack.iloc[::, 5:10]
        address_object.columns = address_object.columns.str.lower()
        address_object_dict = address_object.to_dict(orient='records')

        bio_val = []
        for i, dicti in enumerate(dict_with_values):
            dicti['field_298'] = bio_object_dict[i]
            bio_val.append(dicti)

        bio_val_addr = []
        for i, dicti in enumerate(bio_val):
            dicti["field_301"] = address_object_dict[i]
            bio_val_addr.append(dicti)

        all_data = []
        for record in bio_val_addr:
            response = knackpy.record(
                record,
                obj_key='object_17',
                app_id='59ca76c0e4c83424df3eee62',
                api_key='a0998110-a889-11e7-9b23-7d5afe966012',
                method='create')
            all_data.append(record)
        # output = pd.DataFrame(all_data)
        text = '{} records received, {} unique record succesfully loaded into Knack'.format(
            received, len(all_data))
        return render_template('profile.html', text=text, oidc=oidc)
    return render_template("profile.html", profile=info, oidc=oidc)
def main():

    args = cli_args()

    app_name = args.app_name

    eval_type = args.eval_type

    obj = cfg["eval_types"][eval_type]

    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        obj=cfg["eval_types"][eval_type],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    data = datautil.filter_by_val(kn.data, cfg["status_key"],
                                  cfg["status_vals"])

    #  new records will not have a score key. add it here.
    data = datautil.add_missing_keys(data, {cfg["score_key"]: 0})

    #  create a ranking month_year field
    data = datautil.concat_key_values(data, cfg["concat_keys"],
                                      cfg["group_key"], "_")

    knack_data_exclude = [
        record for record in data if record["EXCLUDE_FROM_RANKING"] == True
    ]
    knack_data_include = [
        record for record in data if record["EXCLUDE_FROM_RANKING"] == False
    ]

    #  create list of scores grouped by group key
    score_dict = {}

    for row in knack_data_include:
        key = row[cfg["group_key"]]
        score = int(row[cfg["score_key"]])

        if key not in score_dict:
            score_dict[key] = []

        score_dict[key].append(score)

    for key in score_dict:
        score_dict[key].sort()
        score_dict[key].reverse()

    #  get score rank and append record to payload
    payload = []

    for record in knack_data_include:
        score = int(record[cfg["score_key"]])
        key = record[cfg["group_key"]]
        rank = (datautil.min_index(score_dict[key], score) + 1
                )  #  add one because list indices start at 0

        if cfg["rank_key"] in record:
            if record[cfg["rank_key"]] != rank:
                record[cfg["rank_key"]] = rank
                record[cfg["modified_date_key"]] = datautil.local_timestamp()
                payload.append(record)

        else:
            record[cfg["rank_key"]] = rank

    #  assign null ranks to records flagged as exclude from ranking
    for record in knack_data_exclude:

        if cfg["rank_key"] in record:
            #  update excluded records if rank found
            if record[cfg["rank_key"]] != "":
                record[cfg["rank_key"]] = ""
                record[cfg["modified_date_key"]] = datautil.local_timestamp()
                payload.append(record)

    if payload:
        payload = datautil.reduce_to_keys(
            payload, [cfg["rank_key"], "id", cfg["modified_date_key"]])

        payload = datautil.replace_keys(payload, kn.field_map)

        update_response = []

        count = 0
        for record in payload:
            count += 1

            print("Updating record {} of {}".format(count, len(payload)))

            res = knackpy.record(
                record,
                obj_key=obj,
                app_id=knack_creds["app_id"],
                api_key=knack_creds["api_key"],
                method="update",
            )

            update_response.append(res)

        return len(payload)

    else:
        return 0
def main():
    """Summary
    
    Parameters
    ----------
    None
    
    Returns
    -------
    count_sig
        number of signals that has been updated
    """
    args = cli_args()
    app_name = args.app_name

    api_key = KNACK_CREDENTIALS[app_name]["api_key"]
    app_id = KNACK_CREDENTIALS[app_name]["app_id"]

    detectors = knackpy.Knack(
        scene=cfg["CONFIG_DETECTORS"]["scene"],
        view=cfg["CONFIG_DETECTORS"]["view"],
        ref_obj=cfg["CONFIG_DETECTORS"]["objects"],
        api_key=api_key,
        app_id=app_id,
        timeout=30,
    )

    signals = knackpy.Knack(
        scene=cfg["CONFIG_SIGNALS"]["scene"],
        view=cfg["CONFIG_SIGNALS"]["view"],
        ref_obj=cfg["CONFIG_SIGNALS"]["objects"],
        api_key=api_key,
        app_id=app_id,
        timeout=30,
    )

    signals.data = datautil.filter_by_key_exists(signals.data, "SIGNAL_STATUS")
    signals.data = datautil.filter_by_val(signals.data, "SIGNAL_STATUS", ["TURNED_ON"])

    lookup = groupBySignal(detectors.data)

    count_sig = 0
    count_status = 0

    for sig in signals.data:

        old_status = None
        new_status = getStatus(sig, lookup)
        new_status_date = getMaxDate(sig, lookup)

        if cfg["SIG_STATUS_LABEL"] in sig:
            old_status = sig[cfg["SIG_STATUS_LABEL"]]

            if old_status == new_status:
                continue

        payload_signals = {
            "id": sig["id"],
            cfg["SIG_STATUS_LABEL"]: new_status,
            cfg["SIG_DATE_LABEL"]: getMaxDate(sig, lookup),
        }

        payload_signals = datautil.replace_keys([payload_signals], signals.field_map)

        #  update signal record with detection status and date
        res = knackpy.record(
            payload_signals[0],
            obj_key=cfg["CONFIG_SIGNALS"]["objects"][0],
            app_id=app_id,
            api_key=api_key,
            method="update",
        )

        count_sig += 1

    return count_sig
示例#18
0
def main():

    app_name = "finance_admin_prod" #TODO: add to argutil

    if "finance" not in app_name:
        raise Exception('Unsupported application specified. Must be finance_admin_prod or finance_admin_test.')
    
    knack_creds = KNACK_CREDENTIALS[app_name]

    '''
    We start by making a "free" call to the API endpoint to check for records.
    This calls an endpoint that is not behind login, and we do not provide a
    reference object, which avoivds making a call for field data.

    This way we do not accure API usage when checking for records to process.
    '''
    free_creds = {
        'app_id' : knack_creds['app_id'],
        'api_key' : None
    }

    free_cfg = dict(cfg["purchase_requests"])

    free_cfg.pop('ref_obj')

    free_prs = knackpy_wrapper(
        free_cfg,
        knack_creds, 
        raw_connections=True,
    )
    
    if not free_prs.data_raw:
        return 0

    '''
    There is data to be processed, so make a standard request for the record
    and field data.
    '''
    prs = knackpy_wrapper(
        cfg["purchase_requests"],
        knack_creds, 
        raw_connections=True,
    )

    for record in prs.data:
        # this grabs the aut increment field value, which is then droppped
        pr_filter_id = record.get(cfg["purchase_requests"]['unique_id_field_name'])

        old_record_id = record.pop('id')

        record = handle_fields(record, prs.fields, prs.field_map)

        record = datautil.replace_keys(
            [record],
            prs.field_map
        )[0]

        record = assign_requester(cfg["purchase_requests"], record)

        #  Set the "copy" field to No
        record[cfg["purchase_requests"]['copy_field_id']] = False

        copied_record = knackpy.record(
            record,
            obj_key=cfg["purchase_requests"]['ref_obj'][0],
            app_id=knack_creds['app_id'],
            api_key=knack_creds['api_key'],
            method='create'
        )

        #  update the older record with need_to_copy=false
        old_record_payload = {
            'id' : old_record_id,
            cfg["purchase_requests"]['copy_field_id'] : False
        }

        old_record_update = knackpy.record(
            old_record_payload,
            obj_key=cfg["purchase_requests"]['ref_obj'][0],
            app_id=knack_creds['app_id'],
            api_key=knack_creds['api_key'],
            method='update'
        )

        # fetch item records related to the copied purchase request, and copy
        # them to the new purchase request
        item_filter = get_filter(cfg["items"]['pr_field_id'], pr_filter_id)
        items = get_items(cfg["items"], item_filter, knack_creds)

        for item in items.data:
            item = handle_fields(item, items.fields, items.field_map)
            
            # set item connection to copied purchase request record
            item[cfg["items"]['pr_connection_field_name']] = [copied_record['id']]

            item.pop('id')
            
            item = datautil.replace_keys(
                [item],
                items.field_map
            )[0]

            new_item = knackpy.record(
                item,
                obj_key=cfg["items"]['obj'],
                app_id=knack_creds['app_id'],
                api_key=knack_creds['api_key'],
                method='create'
            )

    return len(records)        
def replace_pm_records(postgre_records, knack_pm_records, signal_records,
                       knack_technicians, app_name):
    """Summary
    
    Args:
        postgre_records (TYPE): Description
        knack_pm_records (TYPE): Description
        signal_records (TYPE): Description
    
    Returns:
        TYPE: Description
    """

    postgre_records_df = pd.DataFrame.from_dict(postgre_records)
    knack_pm_records_df = pd.DataFrame.from_dict(knack_pm_records.data)

    pm_insert_payloads = postgre_records_df[
        ~postgre_records_df["fulcrum_id"].
        isin(knack_pm_records_df["FULCRUM_ID"])].copy()

    pm_update_payloads = postgre_records_df[
        postgre_records_df["fulcrum_id"].isin(
            knack_pm_records_df["FULCRUM_ID"])].copy()

    pm_insert_payloads["MODIFIED_DATE"] = datautil.local_timestamp()
    pm_update_payloads["MODIFIED_DATE"] = datautil.local_timestamp()

    pm_insert_payloads = map_knack_id_signal_id(signal_records,
                                                pm_insert_payloads)
    pm_update_payloads = map_knack_id_signal_id(signal_records,
                                                pm_update_payloads)

    knack_pm_records_id_df = knack_pm_records_df[["FULCRUM_ID", "id"]]
    pm_update_payloads = pm_update_payloads.merge(
        right=knack_pm_records_id_df,
        left_on="fulcrum_id",
        right_on="FULCRUM_ID",
        how="left",
    )

    pm_insert_payloads["PM_STATUS"] = "COMPLETED"
    pm_update_payloads["PM_STATUS"] = "COMPLETED"

    pm_insert_payloads.columns = map(str.upper, pm_insert_payloads.columns)
    pm_update_payloads.columns = map(str.upper, pm_update_payloads.columns)

    pm_update_payloads = pm_update_payloads.rename(columns={"ID": "id"})

    pm_insert_payloads = pm_insert_payloads.to_dict(orient="records")
    pm_update_payloads = pm_update_payloads.to_dict(orient="records")

    if len(pm_insert_payloads) != 0:
        pm_insert_payloads = map_technicians_id_pm_payloads(
            pm_insert_payloads, knack_technicians)

    pm_update_payloads = map_technicians_id_pm_payloads(
        pm_update_payloads, knack_technicians)

    # update signal modified time in replace method

    pm_replace_payloads_shallow = pm_update_payloads + pm_insert_payloads
    pm_replace_payloads = copy.deepcopy(pm_replace_payloads_shallow)

    for d in pm_replace_payloads:
        if "id" in d:
            del d["id"]

    signal_payloads = prepare_signals_payloads(pm_replace_payloads,
                                               signal_records)
    signals_payloads = datautil.replace_keys(signal_payloads,
                                             signal_records.field_map)
    signal_results = update_signals_modified_time(signals_payloads, app_name)

    # end update signal modified time in replace method

    pm_insert_payloads = datautil.replace_keys(pm_insert_payloads,
                                               knack_pm_records.field_map)

    pm_update_payloads = datautil.replace_keys(pm_update_payloads,
                                               knack_pm_records.field_map)

    for payload in pm_insert_payloads:
        print("inserting", payload)

        insert_res = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="create",
        )

    for payload in pm_update_payloads:
        print("updating", payload)

        update_res = knackpy.record(
            payload,
            obj_key="object_84",
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            method="update",
        )

    return len(pm_insert_payloads) + len(pm_update_payloads)
示例#20
0
def update_output(contents):
    kn = Knack(obj='object_17',
               app_id='59ca76c0e4c83424df3eee62',
               api_key='a0998110-a889-11e7-9b23-7d5afe966012 ')  # added
    if contents is not None:
        content_type, content_string = contents.split(',')
        if 'csv' in content_type:
            knack_csv = pd.read_csv(
                io.StringIO(
                    pybase64.b64decode(content_string).decode('utf-8')))
            knack_db = pd.DataFrame(kn.data)
            knack_db.drop('Email', axis=1, inplace=True)
            knack_db['LinkedIn profile'] = knack_db['LinkedIn profile_url']
            #rename knack data to fit the new data
            knack_db.rename(columns={
                'Participant Name_first': 'First',
                'Participant Name_last': 'Last',
                'Address_city': 'City',
                'Email_email': 'Email',
                'Address_street': 'street',
                'Address_country': 'Country',
                'Address_state': 'State',
                'Address_zip': 'Zip'
            },
                            inplace=True)
            knack_db.id = ''
            knack_db.drop([
                'Address_latitude',
                'Address_longitude',
                'Address',
                'Participant Name_middle',
                'Participant Name_title',
            ],
                          axis=1,
                          inplace=True)

            knack_db = knack_db[knack_csv.columns.tolist()]
            knack_db['Current Time Equation'] = ''
            knack_db['Date added'] = ''
            knack_db['Last updated'] = ''
            knack_db.Phone = knack_db.Phone.apply(pd.Series).iloc[::, 3:4]

            for col in knack_csv.columns.tolist():
                knack_csv[col] = knack_csv[col].astype('object')
            list_of_columns = [
                'First', 'Last', 'Company is sponsor user', 'SU',
                'Company name', 'street', 'Zip', 'City', 'State', 'Country',
                'Time zone', 'Recruiting source', 'Phone', 'Email',
                'LinkedIn profile', 'Total compensation this year', 'Personas',
                'Date added', 'Last updated', 'Updated by',
                'Wants to participate in future activities?', 'Age range',
                'Years in current role', 'Years in current industry',
                'Job duties', 'Time Zone Selector', 'Tome Zone Hours',
                'Current Time Equation', 'Business Model', 'Company size',
                'Company Revenue', 'Team size', 'Industry', 'Job title',
                'Role/Responsibilities', 'WCE Products used', 'UserTesting ID',
                'id'
            ]
            knack_csv = knack_csv[list_of_columns]
            knack_csv = knack_csv.apply(lambda x: x.astype('str'))
            knack_csv = knack_csv.replace('nan', '', regex=True)
            knack_csv['Company size'] = knack_csv['Company size'].str.split(
                '.', expand=True)[0]

            knack_db = knack_db[list_of_columns]
            knack_db = knack_db.apply(lambda x: x.astype('str'))
            knack_db = knack_db.replace('nan', '', regex=True)

            knack_csv = knack_csv.apply(lambda x: x.str.title())
            knack_db = knack_db.apply(lambda x: x.str.title())

            knack_csv = knack_csv.astype(knack_db.dtypes.to_dict())
            df_knack = knack_csv.merge(knack_db, how='left', indicator=True)
            received = df_knack.shape[0]
            df_knack = df_knack[df_knack['_merge'] == 'left_only']
            df_knack.drop('_merge', axis=1, inplace=True)

            df_knack.columns = cols.values()
            # Ectract other non objects values in their dataframe
            df_knack_sub1 = df_knack.iloc[::, 2:5]
            df_knack_sub2 = df_knack.iloc[::, 10:38]
            df_knack_sub = pd.concat([df_knack_sub1, df_knack_sub2], axis=1)

            # convert data into dict
            df_knack_sub_dict = df_knack_sub.to_dict(orient='records')

            dict_with_values = []
            for dict_data in df_knack_sub_dict:
                dic = {i: j for i, j in dict_data.items() if j != 'N/A'}
                dict_with_values.append(dic)

            # lets get the name objects
            bio_object = df_knack.iloc[::, 0:2]
            bio_object_dict = bio_object.to_dict(orient='records')

            #lets get address objects
            address_object = df_knack.iloc[::, 5:10]
            address_object.columns = address_object.columns.str.lower()
            address_object_dict = address_object.to_dict(orient='records')

            bio_val = []
            for i, dicti in enumerate(dict_with_values):
                dicti['field_298'] = bio_object_dict[i]
                bio_val.append(dicti)

            bio_val_addr = []
            for i, dicti in enumerate(bio_val):
                dicti["field_301"] = address_object_dict[i]
                bio_val_addr.append(dicti)

            all_data = []
            for record in bio_val_addr:
                response = knackpy.record(
                    record,
                    obj_key='object_17',
                    app_id='59ca76c0e4c83424df3eee62',
                    api_key='a0998110-a889-11e7-9b23-7d5afe966012',
                    method='create')
                all_data.append(record)
            return html.Div(children=[
                '{} records received, {} unique record succesfully loaded into Knack'
                .format(received, len(all_data))
            ],
                            style={
                                'text-align': 'center',
                                'width': '600px',
                                'margin-left': '520px',
                                'margin-bottom': '300px',
                                'backgroundColor': '#e3c9c9'
                            },
                            className=
                            "alert alert-success alert-dismissible fade show")

        elif 'xlsx' in content_type:

            return html.Div([
                html.
                h5("Knack migrate does not allow excel files files, save your file as csv"
                   )
            ])
        else:
            return html.Div([html.h5(html.P("Wrong file upload"))])
def main():

    args = cli_args()
    app_name = args.app_name

    knack_creds = KNACK_CREDENTIALS[app_name]

    knack_data_pm = knackpy.Knack(
        view=cfg["params_pm"]["view"],
        scene=cfg["params_pm"]["scene"],
        ref_obj=cfg["params_pm"]["field_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    data_pm = []

    if not knack_data_pm.data:
        return 0

    for pm in knack_data_pm.data:
        #  verify there is data that needs to be processed
        #  the source view is also filtered by these conditions
        #  so this is a redundant check, to be safe
        if (not pm["COPIED_TO_SECONDARY"] and pm["PM_STATUS"] == "COMPLETED"
                and int(pm["SECONDARY_SIGNALS_COUNT"]) > 0):

            data_pm.append(pm)

    if not data_pm:
        return 0

    #  get signal data
    #  TODO: filter for signals and their secondaries based on PM data
    #  this would reduce size of request
    knack_data_signals = knackpy.Knack(
        view=cfg["params_signal"]["view"],
        scene=cfg["params_signal"]["scene"],
        ref_obj=cfg["params_signal"]["field_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    primary_signals_with_children = get_prim_signals(knack_data_signals.data)

    pm_payload_insert = []
    pm_payload_update = []
    signals_update = []

    for pm in data_pm:
        """
        Check all preventative maintenance records at signals with secondary signals
        Copy pm record to secondary signal if needed
        """
        if "SIGNAL" in pm:

            primary_signal_id = pm["SIGNAL"][0]["id"]

            if primary_signal_id in primary_signals_with_children:
                #  update original pm record with copied to secondary = True
                pm_payload_update.append({
                    "id": pm["id"],
                    "COPIED_TO_SECONDARY": True
                })

                for secondary in primary_signals_with_children[
                        primary_signal_id]:
                    #  create new pm record for secondary signal(s)
                    new_record = copy_pm_record(secondary["id"], pm,
                                                cfg["copy_fields"])

                    signals_update.append({"id": secondary["id"]})
                    pm_payload_insert.append(new_record)

    # update modified date of secondary signals which have a new PM
    signals_payload_update = apply_modified_date(signals_update)

    signals_payload_update = datautil.replace_keys(
        signals_payload_update, knack_data_signals.field_map)

    pm_payload_update = datautil.replace_keys(pm_payload_update,
                                              knack_data_pm.field_map)

    pm_payload_insert = datautil.replace_keys(pm_payload_insert,
                                              knack_data_pm.field_map)

    for record in signals_payload_update:
        res = knackpy.record(
            record,
            obj_key=cfg["params_signal"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    for record in pm_payload_update:
        res = knackpy.record(
            record,
            obj_key=cfg["params_pm"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    for record in pm_payload_insert:
        res = knackpy.record(
            record,
            obj_key=cfg["params_pm"]["field_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="create",
        )

    return len(pm_payload_insert) + len(pm_payload_update) + len(
        signals_payload_update)
示例#22
0
def main():

    args = cli_args()

    app_name = args.app_name

    update_fields = [
        field for layer in cfg["layers"] for field in layer["updateFields"]
    ]

    kn = knackpy.Knack(
        obj=cfg["obj"],
        app_id=KNACK_CREDENTIALS[app_name]["app_id"],
        api_key=KNACK_CREDENTIALS[app_name]["api_key"],
        filters=cfg["filters"],
        timeout=30,
    )

    unmatched_locations = []

    if not kn.data:
        return 0
    """
    Remove "update fields" from record. these are re-appended via
    spatial lookup and thus the fieldnames must match those of the source
    dataset or be mapped in the field map config dict.
    """
    keep_fields = [
        field for field in kn.fieldnames if field not in update_fields
    ]
    kn.data = datautil.reduce_to_keys(kn.data, keep_fields)

    for location in kn.data:

        point = [location["LOCATION_longitude"], location["LOCATION_latitude"]]

        for layer in cfg["layers"]:
            layer["geometry"] = point
            field_map = cfg["field_maps"].get(layer["service_name"])
            params = get_params(layer)

            try:
                res = agolutil.point_in_poly(layer["service_name"],
                                             layer["layer_id"], params)

                if res.get("error"):
                    raise Exception(str(res))

                if res.get("features"):
                    location = join_features_to_record(res["features"], layer,
                                                       location)

                    if field_map:
                        location = map_fields(location, field_map)

                    continue

                if "service_name_secondary" in layer:
                    res = agolutil.point_in_poly(
                        layer["service_name_secondary"], layer["layer_id"],
                        params)

                    if len(res["features"]) > 0:
                        location = join_features_to_record(
                            res["features"], layer, location)

                        if field_map:
                            location = map_fields(location, field_map)
                            continue

                #  no intersecting features found
                for field in layer["updateFields"]:
                    """
                    set corresponding fields on location record to null to
                    overwrite any existing data
                    """
                    location[field] = ""

                continue

            except Exception as e:
                unmatched_locations.append(location)
                continue

        location["UPDATE_PROCESSED"] = True

        location["MODIFIED_DATE"] = datautil.local_timestamp()

        location = datautil.reduce_to_keys(
            [location],
            update_fields + ["id", "UPDATE_PROCESSED", "MODIFIED_DATE"])
        location = datautil.replace_keys(location, kn.field_map)

        res = knackpy.record(
            location[0],
            obj_key=cfg["obj"],
            app_id=KNACK_CREDENTIALS[app_name]["app_id"],
            api_key=KNACK_CREDENTIALS[app_name]["api_key"],
            method="update",
        )

    if len(unmatched_locations) > 0:
        error_text = "Location Point/Poly Match Failure(s): {}".format(
            ", ".join(str(x) for x in unmatched_locations))
        raise Exception(error_text)

    else:
        return len(kn.data)
示例#23
0
def main():

    args = cli_args()

    device_type = args.device_type
    app_name = args.app_name

    primary_key = cfg[device_type]["primary_key"]
    ip_field = cfg[device_type]["ip_field"]

    global timeout
    timeout = cfg[device_type].get("timeout")

    if not timeout:
        timeout = 3

    knack_creds = KNACK_CREDENTIALS[app_name]

    out_fields_upload = [
        "id",
        ip_field,
        "IP_COMM_STATUS",
        "COMM_STATUS_DATETIME_UTC",
        "MODIFIED_DATE",
        "MODIFIED_BY",
    ]

    #  get device data from Knack application
    kn = knackpy.Knack(
        obj=cfg[device_type]["obj"],
        scene=cfg[device_type]["scene"],
        view=cfg[device_type]["view"],
        ref_obj=cfg[device_type]["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
    )

    #  append config data to each item to be processed
    #  this is a hacky way to pass args to each thread
    for i in kn.data:
        i["ip_field"] = ip_field
        i["device_type"] = device_type

    pool = ThreadPool(8)

    results = pool.map(get_status, kn.data)

    for result in results:
        """
        Result is None if status has not changed. Otherwise result
        is device record dict
        """
        if result:
            #  format for upload to Knack
            result = [result]
            result = apply_modified_date(result)
            result = apply_modified_by(result)
            result = datautil.reduce_to_keys(result, out_fields_upload)
            result = datautil.replace_keys(result, kn.field_map)

            res = knackpy.record(
                result[0],
                obj_key=cfg[device_type]["ref_obj"][
                    0
                ],  #  assumes record object is included in config ref_obj and is the first elem in array,
                app_id=knack_creds["app_id"],
                api_key=knack_creds["api_key"],
                method="update",
            )

    # close the pool and wait for the work to finish
    pool.close()
    pool.join()

    return len([record for record in results if record])
def main():

    args = cli_args()

    app_name = args.app_name
    knack_creds = KNACK_CREDENTIALS[app_name]

    kn = knackpy.Knack(
        scene=cfg["scene"],
        view=cfg["view"],
        ref_obj=cfg["ref_obj"],
        app_id=knack_creds["app_id"],
        api_key=knack_creds["api_key"],
        raw_connections=True,
    )

    primary_signals_old = get_old_prim_signals(kn.data)
    primary_signals_new = get_new_prim_signals(kn.data)

    payload = []

    for signal_id in primary_signals_new:
        """
        identify new and changed primary-secondary relationships
        """
        if signal_id in primary_signals_old:
            new_secondaries = collections.Counter(primary_signals_new[signal_id])
            old_secondaries = collections.Counter(primary_signals_old[signal_id])

            if old_secondaries != new_secondaries:

                payload.append(
                    {
                        "id": signal_id,
                        cfg["update_field"]: primary_signals_new[signal_id],
                    }
                )

        else:
            payload.append(
                {"id": signal_id, cfg["update_field"]: primary_signals_new[signal_id]}
            )

    for signal_id in primary_signals_old:
        """
        identify primary-secondary relationships that have been removed
        """
        if signal_id not in primary_signals_new:
            payload.append({"id": signal_id, cfg["update_field"]: []})

    if len(payload) == 0:
        return 0

    for record in payload:

        res = knackpy.record(
            record,
            obj_key=cfg["ref_obj"][0],
            app_id=knack_creds["app_id"],
            api_key=knack_creds["api_key"],
            method="update",
        )

    return len(payload)