def upsert(input_dir, month_day_year, awam_host_instance_name="Austin"):

    # Set IAF path parameters
    # iaf_filename = "%s_bt_%s.txt" % (awam_host_instance_name, month_day_year)
    iaf_filename = "%s_bt_%s.csv" % (awam_host_instance_name, month_day_year)
    iaf_input_path = os.path.join(input_dir, iaf_filename)

    # Upload the data
    with open(iaf_input_path, 'r') as iaf_input_file:
        reader = csv.reader(iaf_input_file)
        fieldnames = [
            'record_id', 'host_read_time', 'field_device_read_time',
            'reader_identifier', 'device_address'
        ]
        data = [dict(zip(fieldnames, record)) for record in reader]
        upsert_data(SOCRATA_CREDENTIALS, data, "qnpj-zrb9")
Exemplo n.º 2
0
def upsert(input_dir, month_day_year, awam_host_instance_name="Austin"):

    # Set ITMF path parameters
    # itmf_filename = "%s_btmatch_%s.txt" % (awam_host_instance_name, month_day_year)
    itmf_filename = "%s_btmatch_%s.csv" % (awam_host_instance_name,
                                           month_day_year)
    itmf_input_path = os.path.join(input_dir, itmf_filename)

    # Upload the data
    with open(itmf_input_path, 'r') as itmf_input_file:
        reader = csv.reader(itmf_input_file)
        fieldnames = [
            'record_id', 'device_address', 'origin_reader_identifier',
            'destination_reader_identifier', 'start_time', 'end_time',
            'travel_time_seconds', 'speed_miles_per_hour', 'match_validity',
            'filter_identifier', 'day_of_week'
        ]

        # Skip header row
        reader.next()
        data = [dict(zip(fieldnames, record)) for record in reader]
        upsert_data(SOCRATA_CREDENTIALS, data, "x44q-icha")
                included += 1
                row[9] = get_timestamp(row[9])

                # create unique row id
                row.insert(0, '{}{}{}'.format(row[9], row[0], row[1]) )

                rows.append(row)
            else:
                excluded += 1

        print('incuded: {}'.format(included) )
        print('excluded: {}'.format(excluded) )
        
        return rows



for dirpath, subdirs, files in os.walk(rootDir):
    for fname in files:
        if 'Austin_bt_summary_' in fname:
            print(fname)
            data = process_data( os.path.join(dirpath, fname) )
            
            payload = [dict(zip(fieldnames, record)) for record in data]

            socrata_helpers.upsert_data(secrets.SOCRATA_CREDENTIALS, payload, resouce_id)




def main(date_time):
    print('starting stuff now')

    try:
        field_list = knack_helpers.get_fields(KNACK_PARAMS)

        knack_data = knack_helpers.get_data(KNACK_PARAMS)

        knack_data_parsed = knack_helpers.parse_data(knack_data,
                                                     field_list,
                                                     KNACK_PARAMS,
                                                     require_locations=True,
                                                     convert_to_unix=True)

        kits_query = kits_helpers.generate_status_id_query(
            knack_data_parsed, 'ATD_SIGNAL_ID')

        kits_data = kits_helpers.data_as_dict(secrets.KITS_CREDENTIALS,
                                              kits_query)

        kits_data = data_helpers.stringify_key_values(kits_data)

        stale = kits_helpers.check_for_stale(kits_data,
                                             'OPERATION_STATE_DATETIME', 15)

        if stale['stale']:
            email_helpers.send_stale_email(stale['delta_minutes'],
                                           secrets.ALERTS_DISTRIBUTION)

            response_obj = {
                'Errors': 1,
                'message': 'WARNING: stale data detected',
                'Rows Updated': 0,
                'Rows Created': 0,
                'Rows Deleted': 0
            }

            stale_data_log = socrata_helpers.prep_pub_log(
                date_time, 'signal_status_update', response_obj)

            socrata_helpers.upsert_data(secrets.SOCRATA_CREDENTIALS,
                                        stale_data_log, SOCRATA_PUB_LOG_ID)

            sys.exit()

        kits_data = data_helpers.filter_by_key(
            kits_data, 'OPERATION_STATE',
            FLASH_STATUSES)  #  filter by flash statuses

        if kits_data:
            new_data = data_helpers.merge_dicts(
                knack_data_parsed, kits_data, 'ATD_SIGNAL_ID',
                ['OPERATION_STATE_DATETIME', 'OPERATION_STATE', 'PLAN_ID'])

            new_data = data_helpers.iso_to_unix(new_data, replace_tz=True)

            new_data = data_helpers.stringify_key_values(new_data)

        else:
            new_data = []

        socrata_data = socrata_helpers.get_public_data(SOCRATA_SIGNAL_STATUS)

        socrata_data = socrata_helpers.strip_geocoding(socrata_data)

        socrata_data = data_helpers.upper_case_keys(socrata_data)

        socrata_data = data_helpers.iso_to_unix(socrata_data)

        socrata_data = data_helpers.stringify_key_values(socrata_data)

        cd_results = data_helpers.detect_changes(socrata_data,
                                                 new_data,
                                                 'ATD_SIGNAL_ID',
                                                 keys=['OPERATION_STATE'])

        for thing in cd_results:
            print('{} : {}'.format(thing, len(cd_results[thing])))

        if cd_results['new'] or cd_results['change'] or cd_results['delete']:
            socrata_payload = socrata_helpers.create_payload(
                cd_results, 'ATD_SIGNAL_ID')

            socrata_payload = socrata_helpers.create_location_fields(
                socrata_payload)

            socrata_payload = data_helpers.lower_case_keys(socrata_payload)

            socrata_payload = data_helpers.unix_to_iso(socrata_payload)

            status_upsert_response = socrata_helpers.upsert_data(
                secrets.SOCRATA_CREDENTIALS, socrata_payload,
                SOCRATA_SIGNAL_STATUS)

        else:
            status_upsert_response = {
                'Errors': 0,
                'message': 'No signal status change detected',
                'Rows Updated': 0,
                'Rows Created': 0,
                'Rows Deleted': 0
            }

        log_payload = socrata_helpers.prep_pub_log(date_time,
                                                   'signal_status_update',
                                                   status_upsert_response)

        pub_log_response = socrata_helpers.upsert_data(
            secrets.SOCRATA_CREDENTIALS, log_payload, SOCRATA_PUB_LOG_ID)

        if 'error' in status_upsert_response:
            email_helpers.send_socrata_alert(secrets.ALERTS_DISTRIBUTION,
                                             SOCRATA_SIGNAL_STATUS,
                                             status_upsert_response)

        elif status_upsert_response['Errors']:
            email_helpers.send_socrata_alert(secrets.ALERTS_DISTRIBUTION,
                                             SOCRATA_SIGNAL_STATUS,
                                             status_upsert_response)

        if cd_results['delete']:

            historical_payload = data_helpers.lower_case_keys(
                cd_results['delete'])

            historical_payload = socrata_helpers.add_hist_fields(
                historical_payload)

            status_upsert_historical_response = socrata_helpers.upsert_data(
                secrets.SOCRATA_CREDENTIALS, historical_payload,
                SOCRATA_SIGNAL_STATUS_HISTORICAL)

            historical_log_payload = socrata_helpers.prep_pub_log(
                date_time, 'signal_status_historical_update',
                status_upsert_historical_response)

            pub_log_historical_response = socrata_helpers.upsert_data(
                secrets.SOCRATA_CREDENTIALS, historical_log_payload,
                SOCRATA_PUB_LOG_ID)

            if 'error' in status_upsert_historical_response:
                email_helpers.send_socrata_alert(
                    secrets.ALERTS_DISTRIBUTION, SOCRATA_SIGNAL_STATUS,
                    status_upsert_historical_response)

            elif status_upsert_historical_response['Errors']:
                email_helpers.send_socrata_alert(
                    secrets.ALERTS_DISTRIBUTION, SOCRATA_SIGNAL_STATUS,
                    status_upsert_historical_response)

        else:
            print('no new historical status data to upload')
            status_upsert_historical_response = None

        return {
            'res': status_upsert_response,
            'res_historical': status_upsert_historical_response,
        }

    except Exception as e:
        print('Failed to process data for {}'.format(date_time))
        print(e)
        email_helpers.send_email(
            secrets.ALERTS_DISTRIBUTION,
            'DATA PROCESSING ALERT: Signal Status Update Failure', str(e))
        raise e
def main(date_time):
    print('starting stuff now')

    try:

        field_list = knack_helpers.get_fields(KNACK_PARAMS)

        knack_data = knack_helpers.get_data(KNACK_PARAMS)

        knack_data = knack_helpers.parse_data(knack_data,
                                              field_list,
                                              KNACK_PARAMS,
                                              require_locations=True,
                                              convert_to_unix=True)

        knack_data = data_helpers.stringify_key_values(knack_data)

        knack_data = data_helpers.remove_linebreaks(knack_data,
                                                    ['LOCATION_NAME'])

        knack_data_mills = data_helpers.unix_to_mills(deepcopy(knack_data))

        # token = agol_helpers.get_token(secrets.AGOL_CREDENTIALS)

        # agol_payload = agol_helpers.build_payload(knack_data_mills)

        # del_response = agol_helpers.delete_features(SERVICE_URL, token)

        # add_response = agol_helpers.add_features(SERVICE_URL, token, agol_payload)

        socrata_data = socrata_helpers.get_private_data(
            secrets.SOCRATA_CREDENTIALS, SOCRATA_RESOURCE_ID)

        socrata_data = data_helpers.upper_case_keys(socrata_data)

        socrata_data = data_helpers.stringify_key_values(socrata_data)

        socrata_data = data_helpers.iso_to_unix(socrata_data, replace_tz=True)

        cd_results = data_helpers.detect_changes(
            socrata_data,
            knack_data,
            PRIMARY_KEY,
            keys=KNACK_PARAMS['FIELD_NAMES'] + ['LATITUDE', 'LONGITUDE'])

        if cd_results['new'] or cd_results['change'] or cd_results['delete']:
            socrata_payload = socrata_helpers.create_payload(
                cd_results, PRIMARY_KEY)

            socrata_payload = socrata_helpers.create_location_fields(
                socrata_payload)

        else:
            socrata_payload = []

        socrata_payload = data_helpers.lower_case_keys(socrata_payload)

        socrata_payload = data_helpers.unix_to_iso(socrata_payload)

        upsert_response = socrata_helpers.upsert_data(
            secrets.SOCRATA_CREDENTIALS, socrata_payload, SOCRATA_RESOURCE_ID)

        if 'error' in upsert_response:
            email_helpers.send_socrata_alert(secrets.ALERTS_DISTRIBUTION,
                                             SOCRATA_RESOURCE_ID,
                                             upsert_response)

        elif upsert_response['Errors']:
            email_helpers.send_socrata_alert(secrets.ALERTS_DISTRIBUTION,
                                             SOCRATA_RESOURCE_ID,
                                             upsert_response)

        log_payload = socrata_helpers.prep_pub_log(date_time, 'sensors_update',
                                                   upsert_response)

        pub_log_response = socrata_helpers.upsert_data(
            secrets.SOCRATA_CREDENTIALS, log_payload, SOCRATA_PUB_LOG_ID)

        return log_payload

    except Exception as e:
        print('Failed to process data for {}'.format(date_time))
        print(e)
        raise e
Exemplo n.º 6
0
def main(date_time):
    print('starting stuff now')

    try:
        #  get and parse phb eval data
        field_list = knack_helpers.get_fields(KNACK_PARAMS_SYNC_SIGNALS)

        knack_data_sync_signals = knack_helpers.get_data(
            KNACK_PARAMS_SYNC_SIGNALS)

        knack_data_sync_signals = knack_helpers.parse_data(
            knack_data_sync_signals,
            field_list,
            KNACK_PARAMS_SYNC_SIGNALS,
            convert_to_unix=True)

        knack_data_sync_signals = data_helpers.stringify_key_values(
            knack_data_sync_signals)

        knack_data_sync_signals_mills = data_helpers.unix_to_mills(
            deepcopy(knack_data_sync_signals))

        #  get and parse location info
        field_list = knack_helpers.get_fields(KNACK_PARAMS_LOCATIONS)

        knack_data_loc = knack_helpers.get_data(KNACK_PARAMS_LOCATIONS)

        knack_data_loc = knack_helpers.parse_data(knack_data_loc,
                                                  field_list,
                                                  KNACK_PARAMS_LOCATIONS,
                                                  convert_to_unix=True)

        knack_data_loc = data_helpers.stringify_key_values(knack_data_loc)

        #  append location info to eval data dicts
        knack_data_master = data_helpers.merge_dicts(
            knack_data_sync_signals_mills, knack_data_loc, 'ATD_SIGNAL_ID',
            ['LATITUDE', 'LONGITUDE'])

        print(knack_data_master[0:3])

        # #  get published request data from Socrata and compare to Knack database
        socrata_data = socrata_helpers.get_private_data(
            secrets.SOCRATA_CREDENTIALS, SOCRATA_RESOURCE_ID)

        socrata_data = data_helpers.upper_case_keys(socrata_data)

        socrata_data = data_helpers.stringify_key_values(socrata_data)

        socrata_data = data_helpers.iso_to_unix(socrata_data, replace_tz=True)

        cd_results = data_helpers.detect_changes(
            socrata_data,
            knack_data_master,
            PRIMARY_KEY,
            keys=KNACK_PARAMS_SYNC_SIGNALS['FIELD_NAMES'] +
            ['LATITUDE', 'LONGITUDE'])

        if cd_results['new'] or cd_results['change'] or cd_results['delete']:
            socrata_payload = socrata_helpers.create_payload(
                cd_results, PRIMARY_KEY)

            #  socrata_payload = socrata_helpers.create_location_fields(socrata_payload)

        else:
            socrata_payload = []

        socrata_payload = data_helpers.lower_case_keys(socrata_payload)

        socrata_payload = data_helpers.unix_to_iso(socrata_payload)

        upsert_response = socrata_helpers.upsert_data(
            secrets.SOCRATA_CREDENTIALS, socrata_payload, SOCRATA_RESOURCE_ID)

        if 'error' in upsert_response:
            email_helpers.send_socrata_alert(secrets.ALERTS_DISTRIBUTION,
                                             SOCRATA_RESOURCE_ID,
                                             upsert_response)

        elif upsert_response['Errors']:
            email_helpers.send_socrata_alert(secrets.ALERTS_DISTRIBUTION,
                                             SOCRATA_RESOURCE_ID,
                                             upsert_response)

        log_payload = socrata_helpers.prep_pub_log(date_time,
                                                   'sync_signal_corridors',
                                                   upsert_response)

        pub_log_response = socrata_helpers.upsert_data(
            secrets.SOCRATA_CREDENTIALS, log_payload, SOCRATA_PUB_LOG_ID)

        return upsert_response

    except Exception as e:
        print('Failed to process data for {}'.format(date_time))
        print(e)
        raise e