def add_measurement_influxdb(self, channel, measurement): # Convert value/unit is conversion_id present and valid if self.channels_conversion[channel]: conversion = db_retrieve_table_daemon( Conversion, unique_id=self.channels_measurement[channel].conversion_id) if conversion: meas = parse_measurement( self.channels_conversion[channel], self.channels_measurement[channel], measurement, channel, measurement[channel], timestamp=measurement[channel]['timestamp_utc']) measurement[channel]['measurement'] = meas[channel][ 'measurement'] measurement[channel]['unit'] = meas[channel]['unit'] measurement[channel]['value'] = meas[channel]['value'] if measurement: self.logger.debug( "Adding measurement to influxdb: {}".format(measurement)) add_measurements_influxdb(self.unique_id, measurement, use_same_timestamp=INPUT_INFORMATION[ 'measurements_use_same_timestamp'])
def create_measurements_dict(self): measurements_record = {} for each_channel, each_measurement in self.measurement.values.items(): measurement = self.device_measurements.filter( DeviceMeasurements.channel == each_channel).first() if 'value' in each_measurement: conversion = self.conversions.filter( Conversion.unique_id == measurement.conversion_id).first() measurements_record = parse_measurement( conversion, measurement, measurements_record, each_channel, each_measurement) return measurements_record
def create_measurements_dict(self): measurements_record = {} for each_channel, each_measurement in self.measurement.values.items(): measurement = self.device_measurements.filter( DeviceMeasurements.channel == each_channel).first() if 'value' in each_measurement: conversion = self.conversions.filter( Conversion.unique_id == measurement.conversion_id).first() measurements_record = parse_measurement( conversion, measurement, measurements_record, each_channel, each_measurement) self.logger.debug( "Adding measurements to InfluxDB with ID {}: {}".format( self.unique_id, measurements_record)) return measurements_record
def create_measurements_dict(self): measurements_record = {} for each_channel, each_measurement in self.measurement.values.items(): measurement = self.device_measurements.filter( DeviceMeasurements.channel == each_channel).first() if 'value' in each_measurement: conversion = self.conversions.filter( Conversion.unique_id == measurement.conversion_id).first() measurements_record = parse_measurement( conversion, measurement, measurements_record, each_channel, each_measurement) return measurements_record
def loop(self): if self.timer_loop < time.time(): while self.timer_loop < time.time(): self.timer_loop += self.period measurements = {} for channel in self.channels_measurement: # Original value/unit measurements[channel] = {} measurements[channel][ 'measurement'] = self.channels_measurement[ channel].measurement measurements[channel]['unit'] = self.channels_measurement[ channel].unit measurements[channel]['value'] = random.randint(1, 100) # Convert value/unit is conversion_id present and valid if self.channels_conversion[channel]: conversion = db_retrieve_table_daemon( Conversion, unique_id=self.channels_measurement[channel]. conversion_id) if conversion: meas = parse_measurement( self.channels_conversion[channel], self.channels_measurement[channel], measurements, channel, measurements[channel]) measurements[channel]['measurement'] = meas[channel][ 'measurement'] measurements[channel]['unit'] = meas[channel]['unit'] measurements[channel]['value'] = meas[channel]['value'] if measurements: self.logger.debug( "Adding measurements to influxdb: {}".format(measurements)) add_measurements_influxdb(self.unique_id, measurements) else: self.logger.debug("No measurements to add to influxdb.")
def get_new_data(self, past_seconds): # Basic implementation. Future development may use more complex library to access API endpoint = "https://{app}.data.thethingsnetwork.org/api/v2/query/{dev}?last={time}".format( app=self.application_id, dev=self.device_id, time="{}s".format(int(past_seconds))) headers = {"Authorization": "key {k}".format(k=self.app_api_key)} timestamp_format = '%Y-%m-%dT%H:%M:%S.%f' response = requests.get(endpoint, headers=headers) try: response.json() except ValueError: # No data returned self.logger.debug( "Response Error. Response: {}. Likely there is no data to be retrieved on TTN" .format(response.content)) return for each_resp in response.json(): if not self.running: break try: datetime_utc = datetime.datetime.strptime( each_resp['time'][:-7], timestamp_format) except Exception: # Sometimes the original timestamp is in milliseconds # instead of nanoseconds. Therefore, remove 3 less digits # past the decimal and try again to parse. try: datetime_utc = datetime.datetime.strptime( each_resp['time'][:-4], timestamp_format) except Exception as e: self.logger.error( "Could not parse timestamp '{}': {}".format( each_resp['time'], e)) continue # Malformed timestamp encountered. Discard measurement. if (not self.latest_datetime or self.latest_datetime < datetime_utc): self.latest_datetime = datetime_utc measurements = {} for channel in self.channels_measurement: if (self.is_enabled(channel) and self.options_channels['variable_name'][channel] in each_resp and each_resp[self.options_channels['variable_name'] [channel]] is not None): # Original value/unit measurements[channel] = {} measurements[channel][ 'measurement'] = self.channels_measurement[ channel].measurement measurements[channel]['unit'] = self.channels_measurement[ channel].unit measurements[channel]['value'] = each_resp[ self.options_channels['variable_name'][channel]] measurements[channel]['timestamp_utc'] = datetime_utc # Convert value/unit is conversion_id present and valid if self.channels_conversion[channel]: conversion = db_retrieve_table_daemon( Conversion, unique_id=self.channels_measurement[channel]. conversion_id) if conversion: meas = parse_measurement( self.channels_conversion[channel], self.channels_measurement[channel], measurements, channel, measurements[channel], timestamp=datetime_utc) measurements[channel]['measurement'] = meas[ channel]['measurement'] measurements[channel]['unit'] = meas[channel][ 'unit'] measurements[channel]['value'] = meas[channel][ 'value'] if measurements: self.logger.debug( "Adding measurements to influxdb: {}".format(measurements)) add_measurements_influxdb( self.unique_id, measurements, use_same_timestamp=INPUT_INFORMATION[ 'measurements_use_same_timestamp']) else: self.logger.debug("No measurements to add to influxdb.") # set datetime to latest timestamp if self.running: with session_scope(MYCODO_DB_PATH) as new_session: mod_input = new_session.query(Input).filter( Input.unique_id == self.unique_id).first() if not mod_input.datetime or mod_input.datetime < self.latest_datetime: mod_input.datetime = self.latest_datetime new_session.commit()
def get_new_data(self, past_seconds): # Basic implementation. Future development may use more complex library to access API endpoint = "https://nam1.cloud.thethings.network/api/v3/as/applications/{app}/devices/{dev}/packages/storage/uplink_message?last={time}&field_mask=up.uplink_message.decoded_payload".format( app=self.application_id, dev=self.device_id, time="{}s".format(int(past_seconds))) headers = { "Authorization": "Bearer {k}".format(k=self.app_api_key), 'Content-Type': 'application/json' } timestamp_format = '%Y-%m-%dT%H:%M:%S.%f' response = requests.get(endpoint, headers=headers) if response.status_code != 200: self.logger.info("response.status_code != 200: {}".format( response.reason)) self.logger.debug("response.content: {}".format(response.content)) list_dicts = response.content.decode().split("\n") self.logger.debug("list_dicts: {}".format(list_dicts)) cpm_value = None cpm_ts = None usv_h_value = None usv_h_ts = None for each_resp in list_dicts: if not each_resp: continue self.logger.debug("each_resp: {}".format(each_resp)) cpm_value = None usv_h_value = None try: resp_json = json.loads(each_resp) except: resp_json = {} self.logger.debug("resp_json: {}".format(resp_json)) self.return_dict = measurements_dict.copy() try: datetime_utc = datetime.datetime.strptime( resp_json['result']['received_at'][:-7], timestamp_format) except: # Sometimes the original timestamp is in milliseconds # instead of nanoseconds. Therefore, remove 3 less digits # past the decimal and try again to parse. try: datetime_utc = datetime.datetime.strptime( resp_json['result']['received_at'][:-4], timestamp_format) except: self.logger.error("Could not parse timestamp: {}".format( resp_json['result']['received_at'])) return if (not self.latest_datetime or self.latest_datetime < datetime_utc): self.latest_datetime = datetime_utc for channel in self.return_dict: if (self.is_enabled(channel) and self.return_dict[channel]['name'] in resp_json['result']['uplink_message'] ['decoded_payload'] and resp_json['result']['uplink_message'] ['decoded_payload'][self.return_dict[channel]['name']] is not None): self.return_dict[channel]['value'] = resp_json['result'][ 'uplink_message']['decoded_payload'][ self.return_dict[channel]['name']] self.return_dict[channel]['timestamp_utc'] = datetime_utc if self.return_dict[channel]['unit'] == 'cpm': cpm_value = float(self.return_dict[channel]['value']) cpm_ts = self.return_dict[channel]['timestamp_utc'] elif self.return_dict[channel]['unit'] == 'uSv_hr': usv_h_value = float(self.return_dict[channel]['value']) usv_h_ts = self.return_dict[channel]['timestamp_utc'] # Convert value/unit if conversion_id present and valid if self.channels_conversion[channel]: conversion = db_retrieve_table_daemon( Conversion, unique_id=self.channels_measurement[channel]. conversion_id) if conversion: meas = parse_measurement( self.channels_conversion[channel], self.channels_measurement[channel], self.return_dict, channel, self.return_dict[channel], timestamp=datetime_utc) self.return_dict[channel]['unit'] = meas[channel][ 'unit'] self.return_dict[channel]['value'] = meas[channel][ 'value'] if 'value' in self.return_dict[0] and 'value' in self.return_dict[ 1]: self.logger.debug("Adding measurements to influxdb: {}".format( self.return_dict)) add_measurements_influxdb( self.unique_id, self.return_dict, use_same_timestamp=INPUT_INFORMATION[ 'measurements_use_same_timestamp']) else: self.logger.debug("No measurements to add to influxdb.") # Send uSv/hr to Safecast if self.send_safecast and cpm_value and usv_h_value: try: safecast = self.safecastpy.SafecastPy( api_key=self.safecast_api_key) measurement_usv = safecast.add_measurement( json={ 'latitude': self.safecast_latitude, 'longitude': self.safecast_longitude, 'value': usv_h_value, 'unit': self.safecastpy.UNIT_USV, 'captured_at': usv_h_ts.isoformat() + '+00:00', 'device_id': self.safecast_device_id, 'location_name': self.safecast_location_name }) measurement_cpm = safecast.add_measurement( json={ 'latitude': self.safecast_latitude, 'longitude': self.safecast_longitude, 'value': cpm_value, 'unit': self.safecastpy.UNIT_CPM, 'captured_at': cpm_ts.isoformat() + '+00:00', 'device_id': self.safecast_device_id, 'location_name': self.safecast_location_name }) self.logger.debug('uSv/hr measurement id: {0}'.format( measurement_usv['id'])) self.logger.debug('CPM measurement id: {0}'.format( measurement_cpm['id'])) except Exception as e: self.logger.error( "Error adding data to Safecast: {}".format(e)) # Send to GMC Map (doesn't accept time, so can only send the latest measurement) if (self.send_gmcmap and cpm_value and usv_h_value): try: gmcmap = 'http://www.GMCmap.com/log2.asp?AID=02376&GID=22044260632&CPM={cpm:.0f}&uSV={usv:.3f}'.format( aid=self.gmcmap_account_id, gcid=self.gmcmap_geiger_counter_id, cpm=cpm_value, usv=usv_h_value) contents = urllib.request.urlopen(gmcmap).read() self.logger.debug("GMCMap: {}".format(contents)) except Exception as e: self.logger.error("Error adding data to GMC Map: {}".format(e)) # set datetime to latest timestamp if self.running: with session_scope(MYCODO_DB_PATH) as new_session: mod_input = new_session.query(Input).filter( Input.unique_id == self.unique_id).first() if not mod_input.datetime or mod_input.datetime < self.latest_datetime: mod_input.datetime = self.latest_datetime new_session.commit()
def download_data(self): self.logger.debug("Downloading Data") # Clear data previously stored in dictionary self.gadget.loggedDataReadout = {'Temp': {}, 'Humi': {}} # Download stored data starting from self.gadget.newestTimeStampMs self.gadget.readLoggedDataInterval( startMs=self.gadget.newestTimeStampMs) while self.running: if (not self.gadget.waitForNotifications(5) or not self.gadget.isLogReadoutInProgress()): break # Done reading data self.logger.debug("Downloaded Data") self.logger.debug("Parsing/saving data") list_timestamps_temp = [] list_timestamps_humi = [] # Store logged temperature self.logger.debug("Storing {} temperatures".format( len(self.gadget.loggedDataReadout['Temp']))) for each_ts, each_measure in self.gadget.loggedDataReadout['Temp'].items(): if not self.running: break if -40 > each_measure or each_measure > 125: continue # Temperature outside acceptable range list_timestamps_temp.append(each_ts) if self.is_enabled(0): datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) measurement_single = { 0: { 'measurement': 'temperature', 'unit': 'C', 'value': each_measure } } measurement_single = parse_measurement( self.channels_conversion[0], self.channels_measurement[0], measurement_single, self.channels_measurement[0].channel, measurement_single[0]) write_influxdb_value( self.unique_id, measurement_single[0]['unit'], value=measurement_single[0]['value'], measure=measurement_single[0]['measurement'], channel=0, timestamp=datetime_ts) # Store logged humidity self.logger.debug("Storing {} humidities".format( len(self.gadget.loggedDataReadout['Humi']))) for each_ts, each_measure in self.gadget.loggedDataReadout['Humi'].items(): if not self.running: break if 0 >= each_measure or each_measure > 100: continue # Humidity outside acceptable range list_timestamps_humi.append(each_ts) if self.is_enabled(1): datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) measurement_single = { 1: { 'measurement': 'humidity', 'unit': 'percent', 'value': each_measure } } measurement_single = parse_measurement( self.channels_conversion[1], self.channels_measurement[1], measurement_single, self.channels_measurement[1].channel, measurement_single[1]) write_influxdb_value( self.unique_id, measurement_single[1]['unit'], value=measurement_single[1]['value'], measure=measurement_single[1]['measurement'], channel=1, timestamp=datetime_ts) # Find common timestamps from both temperature and humidity lists list_timestamps_both = list( set(list_timestamps_temp).intersection(list_timestamps_humi)) self.logger.debug("Calculating/storing {} dewpoint and vpd".format( len(list_timestamps_both))) for each_ts in list_timestamps_both: if not self.running: break temperature = self.gadget.loggedDataReadout['Temp'][each_ts] humidity = self.gadget.loggedDataReadout['Humi'][each_ts] if ((-200 > temperature or temperature > 200) or (0 > humidity or humidity > 100)): continue # Measurement outside acceptable range datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) # Calculate and store dew point if (self.is_enabled(3) and self.is_enabled(0) and self.is_enabled(1)): dewpoint = calculate_dewpoint(temperature, humidity) measurement_single = { 3: { 'measurement': 'dewpoint', 'unit': 'C', 'value': dewpoint } } measurement_single = parse_measurement( self.channels_conversion[3], self.channels_measurement[3], measurement_single, self.channels_measurement[3].channel, measurement_single[3]) write_influxdb_value( self.unique_id, measurement_single[3]['unit'], value=measurement_single[3]['value'], measure=measurement_single[3]['measurement'], channel=3, timestamp=datetime_ts) # Calculate and store vapor pressure deficit if (self.is_enabled(4) and self.is_enabled(0) and self.is_enabled(1)): vpd = calculate_vapor_pressure_deficit(temperature, humidity) measurement_single = { 4: { 'measurement': 'vapor_pressure_deficit', 'unit': 'Pa', 'value': vpd } } measurement_single = parse_measurement( self.channels_conversion[4], self.channels_measurement[4], measurement_single, self.channels_measurement[4].channel, measurement_single[4]) write_influxdb_value( self.unique_id, measurement_single[4]['unit'], value=measurement_single[4]['value'], measure=measurement_single[4]['measurement'], channel=4, timestamp=datetime_ts) # Download successfully finished, set newest timestamp self.gadget.newestTimeStampMs = self.gadget.tmp_newestTimeStampMs self.logger.debug("Parsed/saved data")
def download_data(self): # Clear data previously stored in dictionary self.gadget.loggedDataReadout = {'Temp': {}, 'Humi': {}} # Download stored data starting from self.gadget.newestTimeStampMs self.gadget.readLoggedDataInterval( startMs=self.gadget.newestTimeStampMs) while self.running: if (not self.gadget.waitForNotifications(5) or not self.gadget.isLogReadoutInProgress()): break # Done reading data list_timestamps_temp = [] list_timestamps_humi = [] # Store logged temperature measurement = self.device_measurements.filter( DeviceMeasurements.channel == 0).first() conversion = db_retrieve_table_daemon( Conversion, unique_id=measurement.conversion_id) for each_ts, each_measure in self.gadget.loggedDataReadout[ 'Temp'].items(): list_timestamps_temp.append(each_ts) datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) if self.is_enabled(0): measurement_single = { 0: { 'measurement': 'temperature', 'unit': 'C', 'value': each_measure } } measurement_single = parse_measurement(conversion, measurement, measurement_single, measurement.channel, measurement_single[0]) write_influxdb_value( self.unique_id, measurement_single[0]['unit'], value=measurement_single[0]['value'], measure=measurement_single[0]['measurement'], channel=0, timestamp=datetime_ts) # Store logged humidity measurement = self.device_measurements.filter( DeviceMeasurements.channel == 1).first() conversion = db_retrieve_table_daemon( Conversion, unique_id=measurement.conversion_id) for each_ts, each_measure in self.gadget.loggedDataReadout[ 'Humi'].items(): list_timestamps_humi.append(each_ts) datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) if self.is_enabled(1): measurement_single = { 1: { 'measurement': 'humidity', 'unit': 'percent', 'value': each_measure } } measurement_single = parse_measurement(conversion, measurement, measurement_single, measurement.channel, measurement_single[1]) write_influxdb_value( self.unique_id, measurement_single[1]['unit'], value=measurement_single[1]['value'], measure=measurement_single[1]['measurement'], channel=1, timestamp=datetime_ts) # Find common timestamps from both temperature and humidity lists list_timestamps_both = list( set(list_timestamps_temp).intersection(list_timestamps_humi)) for each_ts in list_timestamps_both: datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) # Calculate and store dew point if (self.is_enabled(3) and self.is_enabled(0) and self.is_enabled(1)): measurement = self.device_measurements.filter( DeviceMeasurements.channel == 3).first() conversion = db_retrieve_table_daemon( Conversion, unique_id=measurement.conversion_id) dewpoint = calculate_dewpoint( self.gadget.loggedDataReadout['Temp'][each_ts], self.gadget.loggedDataReadout['Humi'][each_ts]) measurement_single = { 3: { 'measurement': 'dewpoint', 'unit': 'C', 'value': dewpoint } } measurement_single = parse_measurement(conversion, measurement, measurement_single, measurement.channel, measurement_single[3]) write_influxdb_value( self.unique_id, measurement_single[3]['unit'], value=measurement_single[3]['value'], measure=measurement_single[3]['measurement'], channel=3, timestamp=datetime_ts) # Calculate and store vapor pressure deficit if (self.is_enabled(4) and self.is_enabled(0) and self.is_enabled(1)): measurement = self.device_measurements.filter( DeviceMeasurements.channel == 4).first() conversion = db_retrieve_table_daemon( Conversion, unique_id=measurement.conversion_id) vpd = calculate_vapor_pressure_deficit( self.gadget.loggedDataReadout['Temp'][each_ts], self.gadget.loggedDataReadout['Humi'][each_ts]) measurement_single = { 4: { 'measurement': 'vapor_pressure_deficit', 'unit': 'Pa', 'value': vpd } } measurement_single = parse_measurement(conversion, measurement, measurement_single, measurement.channel, measurement_single[4]) write_influxdb_value( self.unique_id, measurement_single[4]['unit'], value=measurement_single[4]['value'], measure=measurement_single[4]['measurement'], channel=4, timestamp=datetime_ts) # Download successfully finished, set newest timestamp self.gadget.newestTimeStampMs = self.gadget.tmp_newestTimeStampMs
def download_data(self): # Clear data previously stored in dictionary self.gadget.loggedDataReadout = {'Temp': {}, 'Humi': {}} # Download stored data starting from self.gadget.newestTimeStampMs self.gadget.readLoggedDataInterval( startMs=self.gadget.newestTimeStampMs) while self.running: if (not self.gadget.waitForNotifications(5) or not self.gadget.isLogReadoutInProgress()): break # Done reading data list_timestamps_temp = [] list_timestamps_humi = [] # Store logged temperature measurement = self.device_measurements.filter( DeviceMeasurements.channel == 0).first() conversion = db_retrieve_table_daemon( Conversion, unique_id=measurement.conversion_id) for each_ts, each_measure in self.gadget.loggedDataReadout['Temp'].items(): list_timestamps_temp.append(each_ts) datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) if self.is_enabled(0): measurement_single = { 0: { 'measurement': 'temperature', 'unit': 'C', 'value': each_measure } } measurement_single = parse_measurement( conversion, measurement, measurement_single, measurement.channel, measurement_single[0]) write_influxdb_value( self.unique_id, measurement_single[0]['unit'], value=measurement_single[0]['value'], measure=measurement_single[0]['measurement'], channel=0, timestamp=datetime_ts) # Store logged humidity measurement = self.device_measurements.filter( DeviceMeasurements.channel == 1).first() conversion = db_retrieve_table_daemon( Conversion, unique_id=measurement.conversion_id) for each_ts, each_measure in self.gadget.loggedDataReadout['Humi'].items(): list_timestamps_humi.append(each_ts) datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) if self.is_enabled(1): measurement_single = { 1: { 'measurement': 'humidity', 'unit': 'percent', 'value': each_measure } } measurement_single = parse_measurement( conversion, measurement, measurement_single, measurement.channel, measurement_single[1]) write_influxdb_value( self.unique_id, measurement_single[1]['unit'], value=measurement_single[1]['value'], measure=measurement_single[1]['measurement'], channel=1, timestamp=datetime_ts) # Find common timestamps from both temperature and humidity lists list_timestamps_both = list( set(list_timestamps_temp).intersection(list_timestamps_humi)) for each_ts in list_timestamps_both: datetime_ts = datetime.datetime.utcfromtimestamp(each_ts / 1000) # Calculate and store dew point if (self.is_enabled(3) and self.is_enabled(0) and self.is_enabled(1)): measurement = self.device_measurements.filter( DeviceMeasurements.channel == 3).first() conversion = db_retrieve_table_daemon( Conversion, unique_id=measurement.conversion_id) dewpoint = calculate_dewpoint( self.gadget.loggedDataReadout['Temp'][each_ts], self.gadget.loggedDataReadout['Humi'][each_ts]) measurement_single = { 3: { 'measurement': 'dewpoint', 'unit': 'C', 'value': dewpoint } } measurement_single = parse_measurement( conversion, measurement, measurement_single, measurement.channel, measurement_single[3]) write_influxdb_value( self.unique_id, measurement_single[3]['unit'], value=measurement_single[3]['value'], measure=measurement_single[3]['measurement'], channel=3, timestamp=datetime_ts) # Calculate and store vapor pressure deficit if (self.is_enabled(4) and self.is_enabled(0) and self.is_enabled(1)): measurement = self.device_measurements.filter( DeviceMeasurements.channel == 4).first() conversion = db_retrieve_table_daemon( Conversion, unique_id=measurement.conversion_id) vpd = calculate_vapor_pressure_deficit( self.gadget.loggedDataReadout['Temp'][each_ts], self.gadget.loggedDataReadout['Humi'][each_ts]) measurement_single = { 4: { 'measurement': 'vapor_pressure_deficit', 'unit': 'Pa', 'value': vpd } } measurement_single = parse_measurement( conversion, measurement, measurement_single, measurement.channel, measurement_single[4]) write_influxdb_value( self.unique_id, measurement_single[4]['unit'], value=measurement_single[4]['value'], measure=measurement_single[4]['measurement'], channel=4, timestamp=datetime_ts) # Download successfully finished, set newest timestamp self.gadget.newestTimeStampMs = self.gadget.tmp_newestTimeStampMs
def get_new_data(self, past_seconds): # Basic implementation. Future development may use more complex library to access API endpoint = "https://{app}.data.thethingsnetwork.org/api/v2/query/{dev}?last={time}".format( app=self.application_id, dev=self.device_id, time="{}s".format(int(past_seconds))) headers = {"Authorization": "key {k}".format(k=self.app_api_key)} timestamp_format = '%Y-%m-%dT%H:%M:%S.%f' response = requests.get(endpoint, headers=headers) try: responses = response.json() except ValueError: # No data returned self.logger.debug("Response Error. Response: {}".format( response.content)) return for each_resp in response.json(): if not self.running: break ts_formatted_correctly = False try: datetime_utc = datetime.datetime.strptime( each_resp['time'][:-7], timestamp_format) ts_formatted_correctly = True except: # Sometimes the original timestamp is in milliseconds # instead of nanoseconds. Therefore, remove 3 less digits # past the decimal and try again to parse. try: datetime_utc = datetime.datetime.strptime( each_resp['time'][:-4], timestamp_format) ts_formatted_correctly = True except: self.logger.error("Could not parse timestamp: {}".format( each_resp['time'])) if not ts_formatted_correctly: # Malformed timestamp encountered. Discard measurement. continue if (not self.latest_datetime or self.latest_datetime < datetime_utc): self.latest_datetime = datetime_utc measurements = {} for each_meas in self.device_measurements.all(): if (self.is_enabled(each_meas.channel) and each_meas.name in each_resp and each_resp[each_meas.name] is not None): # Original value/unit measurements[each_meas.channel] = {} measurements[each_meas.channel]['measurement'] = each_meas.measurement measurements[each_meas.channel]['unit'] = each_meas.unit measurements[each_meas.channel]['value'] = each_resp[each_meas.name] measurements[each_meas.channel]['timestamp'] = datetime_utc # Convert value/unit is conversion_id present and valid if each_meas.conversion_id: conversion = db_retrieve_table_daemon( Conversion, unique_id=each_meas.conversion_id) if conversion: meas = parse_measurement( conversion, each_meas, measurements, each_meas.channel, measurements[each_meas.channel]) measurements[each_meas.channel]['measurement'] = meas[each_meas.channel]['measurement'] measurements[each_meas.channel]['unit'] = meas[each_meas.channel]['unit'] measurements[each_meas.channel]['value'] = meas[each_meas.channel]['value'] measurements[each_meas.channel]['timestamp'] = datetime_utc add_measurements_influxdb(self.unique_id, measurements) # set datetime to latest timestamp if self.running: with session_scope(MYCODO_DB_PATH) as new_session: mod_input = new_session.query(Input).filter( Input.unique_id == self.unique_id).first() if not mod_input.datetime or mod_input.datetime < self.latest_datetime: mod_input.datetime = self.latest_datetime new_session.commit()
def get_measurement(self): """ Gets the temperature and humidity """ # # Initialize measurements dictionary # measurements = {} for channel in self.channels_measurement: if self.is_enabled(channel): # Initialize channel dictionary measurements[channel] = {} # # Set the measurement and unit # measurements[channel][ 'measurement'] = self.channels_measurement[ channel].measurement measurements[channel]['unit'] = self.channels_measurement[ channel].unit # # Set the measurement value # measurements[channel]['value'] = self.random.randint(50, 70) self.logger.info( "Channel {} is enabled and storing a value of {} " "with measurement {} and unit {}".format( channel, measurements[channel]['value'], measurements[channel]['measurement'], measurements[channel]['unit'])) # Convert value/unit is conversion_id present and valid if self.channels_conversion[channel]: conversion = db_retrieve_table_daemon( Conversion, unique_id=self.channels_measurement[channel]. conversion_id) if conversion: meas = parse_measurement( self.channels_conversion[channel], self.channels_measurement[channel], measurements, channel, measurements[channel]) measurements[channel]['measurement'] = meas[channel][ 'measurement'] measurements[channel]['unit'] = meas[channel]['unit'] measurements[channel]['value'] = meas[channel]['value'] if measurements: self.logger.debug( "Adding measurements to influxdb: {}".format(measurements)) add_measurements_influxdb(self.unique_id, measurements, use_same_timestamp=INPUT_INFORMATION[ 'measurements_use_same_timestamp']) else: self.logger.debug("No measurements to add to influxdb.") self.logger.info("This INFO message will always be displayed. " "self.fan_modulate: {}, " "self.fan_seconds: {}, " "self.measure_range: {}.".format( self.fan_modulate, self.fan_seconds, self.measure_range)) self.logger.debug("This DEBUG message will only be displayed if the " "Debug option is enabled.") return self.return_dict
def get_new_data(self, past_seconds): # Basic implementation. Future development may use more complex library to access API endpoint = "https://{app}.data.thethingsnetwork.org/api/v2/query/{dev}?last={time}".format( app=self.application_id, dev=self.device_id, time="{}s".format(int(past_seconds))) headers = {"Authorization": "key {k}".format(k=self.app_api_key)} timestamp_format = '%Y-%m-%dT%H:%M:%S.%f' response = requests.get(endpoint, headers=headers) try: responses = response.json() except ValueError: # No data returned self.logger.debug( "Response Error. Response: {}. Likely there is no data to be retrieved on TTN" .format(response.content)) return for i, each_resp in enumerate(response.json(), 1): self.return_dict = measurements_dict.copy() if not self.running: break try: datetime_utc = datetime.datetime.strptime( each_resp['time'][:-7], timestamp_format) except: # Sometimes the original timestamp is in milliseconds # instead of nanoseconds. Therefore, remove 3 less digits # past the decimal and try again to parse. try: datetime_utc = datetime.datetime.strptime( each_resp['time'][:-4], timestamp_format) except: self.logger.error("Could not parse timestamp: {}".format( each_resp['time'])) continue if (not self.latest_datetime or self.latest_datetime < datetime_utc): self.latest_datetime = datetime_utc cpm_value = None cpm_ts = None usv_h_value = None usv_h_ts = None for channel in self.return_dict: if (self.is_enabled(channel) and self.return_dict[channel]['name'] in each_resp and each_resp[self.return_dict[channel]['name']] is not None): self.return_dict[channel]['value'] = each_resp[ self.return_dict[channel]['name']] self.return_dict[channel]['timestamp_utc'] = datetime_utc if self.return_dict[channel]['unit'] == 'cpm': cpm_value = float(self.return_dict[channel]['value']) cpm_ts = self.return_dict[channel]['timestamp_utc'] elif self.return_dict[channel]['unit'] == 'uSv_hr': usv_h_value = float(self.return_dict[channel]['value']) usv_h_ts = self.return_dict[channel]['timestamp_utc'] # Convert value/unit if conversion_id present and valid if self.channels_conversion[channel]: conversion = db_retrieve_table_daemon( Conversion, unique_id=self.channels_measurement[channel]. conversion_id) if conversion: meas = parse_measurement( self.channels_conversion[channel], self.channels_measurement[channel], self.return_dict, channel, self.return_dict[channel], timestamp=datetime_utc) self.return_dict[channel]['unit'] = meas[channel][ 'unit'] self.return_dict[channel]['value'] = meas[channel][ 'value'] if 'value' in self.return_dict[0] and 'value' in self.return_dict[ 1]: self.logger.debug("Adding measurements to influxdb: {}".format( self.return_dict)) add_measurements_influxdb( self.unique_id, self.return_dict, use_same_timestamp=INPUT_INFORMATION[ 'measurements_use_same_timestamp']) else: self.logger.debug("No measurements to add to influxdb.") # Send to GMC Map if (self.send_gmcmap and i == len(response.json()) and cpm_value > 0 and usv_h_value > 0): try: gmcmap = 'http://www.GMCmap.com/log2.asp?AID=02376&GID=22044260632&CPM={cpm:.0f}&uSV={usv:.3f}'.format( aid=self.gmcmap_account_id, gcid=self.gmcmap_geiger_counter_id, cpm=cpm_value, usv=usv_h_value) contents = urllib.request.urlopen(gmcmap).read() self.logger.debug("GMCMap: {}".format(contents)) except Exception as e: self.logger.error( "Error adding data to GMC Map: {}".format(e)) # Send uSv/hr to Safecast if self.send_safecast and cpm_value > 0 and usv_h_value > 0: try: safecast = self.safecastpy.SafecastPy( api_key=self.safecast_api_key) measurement_usv = safecast.add_measurement( json={ 'latitude': self.safecast_latitude, 'longitude': self.safecast_longitude, 'value': usv_h_value, 'unit': self.safecastpy.UNIT_USV, 'captured_at': usv_h_ts.isoformat() + '+00:00', 'device_id': self.safecast_device_id, 'location_name': self.safecast_location_name }) measurement_cpm = safecast.add_measurement( json={ 'latitude': self.safecast_latitude, 'longitude': self.safecast_longitude, 'value': cpm_value, 'unit': self.safecastpy.UNIT_CPM, 'captured_at': cpm_ts.isoformat() + '+00:00', 'device_id': self.safecast_device_id, 'location_name': self.safecast_location_name }) self.logger.debug('uSv/hr measurement id: {0}'.format( measurement_usv['id'])) self.logger.debug('CPM measurement id: {0}'.format( measurement_cpm['id'])) except Exception as e: self.logger.error( "Error adding data to Safecast: {}".format(e)) # set datetime to latest timestamp if self.running: with session_scope(MYCODO_DB_PATH) as new_session: mod_input = new_session.query(Input).filter( Input.unique_id == self.unique_id).first() if not mod_input.datetime or mod_input.datetime < self.latest_datetime: mod_input.datetime = self.latest_datetime new_session.commit()