Esempio n. 1
0
def process_samples(samples):
    """
    Convert sensor-generated samples to data rows appropriate to upload to Fusion Table.
    """
    fields_to_columns = ['seconds',
                         'kind',
                         'pin',
                         'Tf',
                         'RH']

    # Loop over all samples.  Convert each to a Fusion Table row.
    data_rows = []
    for info in samples:
        row = [info[n] for n in fields_to_columns]

        seconds = row[0]
        time_stamp = utility.pretty_timestamp(seconds)
        row.insert(0, time_stamp)

        data_rows.append(row)

    # Finish.
    column_names = fields_to_columns
    column_names.insert(0, 'DateTime')

    # Done.
    return data_rows, column_names
Esempio n. 2
0
def process_samples(samples):
    """
    Convert sensor-generated samples to data rows appropriate to upload to Fusion Table.
    """
    fields_to_columns = ['seconds', 'kind', 'pin', 'Tf', 'RH']

    # Loop over all samples.  Convert each to a Fusion Table row.
    data_rows = []
    for info in samples:
        row = [info[n] for n in fields_to_columns]

        seconds = row[0]
        time_stamp = utility.pretty_timestamp(seconds)
        row.insert(0, time_stamp)

        data_rows.append(row)

    # Finish.
    column_names = fields_to_columns
    column_names.insert(0, 'DateTime')

    # Done.
    return data_rows, column_names
Esempio n. 3
0
def record_data(channels, queue, service, tableId, info_config, power_cycle_interval=None):
    """
    Do the work to record data from sensors.
    """

    if not power_cycle_interval:
        power_cycle_interval = 30*60  # seconds

    # Status LED.
    pin_ok = int(info_config['pin_ok'])
    pin_upload = int(info_config['pin_err'])

    blink_sensors = blinker.Blinker(pin_ok)

    # Setup.
    source = sensors.data_collector(queue)                     # data producer / generator
    sink = upload.data_uploader(service, tableId, pin_upload)  # consumer coroutine

    # Main processing loop.
    time_power_zero = time.time()
    for samples in source:
        try:
            # Pass the data along to the uploader.
            blink_sensors.frequency = 0

            sink.send(samples)

            blink_sensors.frequency = len(samples)

            # Pretty status message.
            t = samples[0]['seconds']
            fmt = '%Y-%m-%d %H-%M-%S'
            time_stamp = utility.pretty_timestamp(t, fmt)
            print('samples:%3d [%s]' % (len(samples), time_stamp))

            # Do a power cycle?
            if time.time() - time_power_zero > power_cycle_interval:
                print('Power cycle')
                blink_sensors.frequency = 0.2

                power_cycle(channels, info_config)
                time_power_zero = time.time()

                blink_sensors.frequency = 0

        except fusion_tables.errors.Who8MyGoogleError as e:
            print()
            print('Error: %s' % e.message)
            break

        except KeyboardInterrupt:
            print()
            print('User stop!')
            break

        except Exception as e:
            # More gentle end for unknown exception.
            print(e)
            blink_sensors.stop()
            sink.close()

            raise e

    # Finish.
    blink_sensors.stop()
    sink.close()
Esempio n. 4
0
def update(year_start=None, month_start=None, day_start=None):
    """
    Update local data store from Google Fusion Table.

    Default start date equal to day of most recent data in storage.
    """

    dates = dates_in_storage()
    if dates:
        date_latest = max(dates)
    else:
        year_start = 2013
        month_start = 8
        day_start = 1

    # Start time, US/Pacific time.
    if not year_start:
        year_start = date_latest.year

    if not month_start:
        month_start = date_latest.month

    if not day_start:
        day_start = date_latest.day

    # End time, US/Pacific time.
    date_tomorrow = arrow.now().replace(days=1)
    year_end = date_tomorrow.year
    month_end = date_tomorrow.month
    day_end = date_tomorrow.day

    #
    # Download data from Google in one big chunk.
    #
    seconds_start = utility.timestamp_seconds(year_start, month_start,
                                              day_start)
    seconds_end = utility.timestamp_seconds(year_end, month_end, day_end)

    pretty_start = utility.pretty_timestamp(seconds_start)
    pretty_end = utility.pretty_timestamp(seconds_end)

    print('Start: {:s}'.format(pretty_start))
    print('End:   {:s}'.format(pretty_end))

    table_id = master_table.get_current_table_id()
    print('Fetch data from Fusion Table: {:s}'.format(table_id))

    data_table = download.data_between(table_id, seconds_start, seconds_end)

    #
    # Extract data, store in Pandas DataFrame.
    #
    print('Process data...')
    ix_seconds = 1
    ix_pin = 3
    ix_T = 4
    ix_RH = 5

    # Convert data seconds to a handy timestamp (US/Pacific) index.
    seconds = [row[ix_seconds] for row in data_table]
    timestamps = [utility.datetime_seconds(s) for s in seconds]
    timestamps_index = pd.DatetimeIndex(timestamps)

    # Extract data columns.
    col_pin = np.asarray([row[ix_pin] for row in data_table], dtype=np.uint8)
    col_T = np.asarray([row[ix_T] for row in data_table], dtype=np.float32)
    col_RH = np.asarray([row[ix_RH] for row in data_table], dtype=np.float32)

    data_dict = {'Pin': col_pin, 'Temperature': col_T, 'Humidity': col_RH}

    data_frame = pd.DataFrame(data_dict, index=timestamps_index)

    pins = np.unique(data_frame.Pin).values
    print('GPIO pins: {:s}'.format(str(pins)))

    #
    # Main loop over days of data.
    #
    dt_start = utility.datetime_seconds(seconds_start)
    dt_end = utility.datetime_seconds(seconds_end)

    path_store = os.path.join(path_to_module(), _folder_store)
    if not os.path.isdir(path_store):
        os.makedirs(path_store)

    for date_k in daterange(dt_start, dt_end):
        date_filter = date_k.strftime("%Y-%m-%d")

        try:
            # One day of data.
            df_k = data_frame[date_filter]

            # Save to file.
            fname = 'data_{:s}.h5'.format(date_filter)
            f = os.path.join(path_store, fname)

            if df_k.shape[0]:
                print(date_filter, df_k.shape)
                df_k.to_hdf(f, 'df', table=True)

        except KeyError:
            pass
Esempio n. 5
0
def update(year_start=None, month_start=None, day_start=None):
    """
    Update local data store from Google Fusion Table.

    Default start date equal to day of most recent data in storage.
    """

    dates = dates_in_storage()
    if dates:
        date_latest = max(dates)
    else:
        year_start = 2013
        month_start = 8
        day_start = 1

    # Start time, US/Pacific time.
    if not year_start:
        year_start = date_latest.year

    if not month_start:
        month_start = date_latest.month

    if not day_start:
        day_start = date_latest.day

    # End time, US/Pacific time.
    date_tomorrow = arrow.now().replace(days=1)
    year_end = date_tomorrow.year
    month_end = date_tomorrow.month
    day_end = date_tomorrow.day

    #
    # Download data from Google in one big chunk.
    #
    seconds_start = utility.timestamp_seconds(year_start, month_start, day_start)
    seconds_end = utility.timestamp_seconds(year_end, month_end, day_end)

    pretty_start = utility.pretty_timestamp(seconds_start)
    pretty_end = utility.pretty_timestamp(seconds_end)

    print('Start: {:s}'.format(pretty_start))
    print('End:   {:s}'.format(pretty_end))

    table_id = master_table.get_current_table_id()
    print('Fetch data from Fusion Table: {:s}'.format(table_id))

    data_table = download.data_between(table_id, seconds_start, seconds_end)

    #
    # Extract data, store in Pandas DataFrame.
    #
    print('Process data...')
    ix_seconds = 1
    ix_pin = 3
    ix_T = 4
    ix_RH = 5

    # Convert data seconds to a handy timestamp (US/Pacific) index.
    seconds = [row[ix_seconds] for row in data_table]
    timestamps = [utility.datetime_seconds(s) for s in seconds]
    timestamps_index = pd.DatetimeIndex(timestamps)

    # Extract data columns.
    col_pin = np.asarray([row[ix_pin] for row in data_table], dtype=np.uint8)
    col_T = np.asarray([row[ix_T] for row in data_table], dtype=np.float32)
    col_RH = np.asarray([row[ix_RH] for row in data_table], dtype=np.float32)

    data_dict = {'Pin': col_pin, 'Temperature': col_T, 'Humidity': col_RH}

    data_frame = pd.DataFrame(data_dict, index=timestamps_index)

    pins = np.unique(data_frame.Pin).values
    print('GPIO pins: {:s}'.format(str(pins)))

    #
    # Main loop over days of data.
    #
    dt_start = utility.datetime_seconds(seconds_start)
    dt_end = utility.datetime_seconds(seconds_end)

    path_store = os.path.join(path_to_module(), _folder_store)
    if not os.path.isdir(path_store):
        os.makedirs(path_store)

    for date_k in daterange(dt_start, dt_end):
        date_filter = date_k.strftime("%Y-%m-%d")

        try:
            # One day of data.
            df_k = data_frame[date_filter]

            # Save to file.
            fname = 'data_{:s}.h5'.format(date_filter)
            f = os.path.join(path_store, fname)

            if df_k.shape[0]:
                print(date_filter, df_k.shape)
                df_k.to_hdf(f, 'df', table=True)

        except KeyError:
            pass
Esempio n. 6
0
def record_data(channels,
                queue,
                service,
                tableId,
                info_config,
                power_cycle_interval=None):
    """
    Do the work to record data from sensors.
    """

    if not power_cycle_interval:
        power_cycle_interval = 30 * 60  # seconds

    # Status LED.
    pin_ok = int(info_config['pin_ok'])
    pin_upload = int(info_config['pin_err'])

    blink_sensors = blinker.Blinker(pin_ok)

    # Setup.
    source = sensors.data_collector(queue)  # data producer / generator
    sink = upload.data_uploader(service, tableId,
                                pin_upload)  # consumer coroutine

    # Main processing loop.
    time_power_zero = time.time()
    for samples in source:
        try:
            # Pass the data along to the uploader.
            blink_sensors.frequency = 0

            sink.send(samples)

            blink_sensors.frequency = len(samples)

            # Pretty status message.
            t = samples[0]['seconds']
            fmt = '%Y-%m-%d %H-%M-%S'
            time_stamp = utility.pretty_timestamp(t, fmt)
            print('samples:%3d [%s]' % (len(samples), time_stamp))

            # Do a power cycle?
            if time.time() - time_power_zero > power_cycle_interval:
                print('Power cycle')
                blink_sensors.frequency = 0.2

                power_cycle(channels, info_config)
                time_power_zero = time.time()

                blink_sensors.frequency = 0

        except fusion_tables.errors.Who8MyGoogleError as e:
            print()
            print('Error: %s' % e.message)
            break

        except KeyboardInterrupt:
            print()
            print('User stop!')
            break

        except Exception as e:
            # More gentle end for unknown exception.
            print(e)
            blink_sensors.stop()
            sink.close()

            raise e

    # Finish.
    blink_sensors.stop()
    sink.close()