def test_PublicData(auth, requests_mock): with open("fixtures/public_data_simple.json") as f: json_fixture = json.load(f) requests_mock.post( pyatmo.public_data._GETPUBLIC_DATA, json=json_fixture, headers={"content-type": "application/json"}, ) publicData = pyatmo.PublicData(auth) assert publicData.status == "ok" publicData = pyatmo.PublicData(auth, required_data_type="temperature,rain_live") assert publicData.status == "ok"
def publicData(auth, requests_mock): with open("fixtures/public_data_simple.json") as f: json_fixture = json.load(f) requests_mock.post( pyatmo.public_data._GETPUBLIC_DATA, json=json_fixture, headers={"content-type": "application/json"}, ) return pyatmo.PublicData(auth)
def test_public_data_error(auth, requests_mock): with open("fixtures/public_data_error_mongo.json") as json_file: json_fixture = json.load(json_file) requests_mock.post( pyatmo.public_data._GETPUBLIC_DATA, json=json_fixture, headers={"content-type": "application/json"}, ) with pytest.raises(pyatmo.NoDevice): pyatmo.PublicData(auth, LAT_NE, LON_NE, LAT_SW, LON_SW)
def update(self): """Request an update from the Netatmo API.""" import pyatmo data = pyatmo.PublicData(self.auth, LAT_NE=self.lat_ne, LON_NE=self.lon_ne, LAT_SW=self.lat_sw, LON_SW=self.lon_sw, filtering=True) if data.CountStationInArea() == 0: _LOGGER.warning('No Stations available in this area.') return self.data = data
def public_data(auth, requests_mock): with open("fixtures/public_data_simple.json") as json_file: json_fixture = json.load(json_file) requests_mock.post( pyatmo.public_data._GETPUBLIC_DATA, json=json_fixture, headers={"content-type": "application/json"}, ) lon_ne = 6.221652 lat_ne = 46.610870 lon_sw = 6.217828 lat_sw = 46.596485 return pyatmo.PublicData(auth, lat_ne, lon_ne, lat_sw, lon_sw)
def update(self): """Request an update from the Netatmo API.""" try: data = pyatmo.PublicData( self.auth, LAT_NE=self.lat_ne, LON_NE=self.lon_ne, LAT_SW=self.lat_sw, LON_SW=self.lon_sw, filtering=True, ) except pyatmo.NoDevice: data = None if not data: _LOGGER.debug("No data received when updating public station data") return if data.CountStationInArea() == 0: _LOGGER.warning("No Stations available in this area.") return self.data = data
def test_public_data_unavailable(auth, requests_mock): requests_mock.post(pyatmo.public_data._GETPUBLIC_DATA, status_code=404) with pytest.raises(pyatmo.ApiError): pyatmo.PublicData(auth, LAT_NE, LON_NE, LAT_SW, LON_SW)
def main(collect, configuration, sensors): """ Fetch cavelink and weather data to store them to influxDB. A config file is mandatory to ensure proper data manipulation and storage in the database. A sensors file (JSON formatted) is mandatory to input a list of sensors to collect data from. Please specify the data type you are interested in. """ click.secho('Executing speleo %s' % __version__, fg='yellow') configuration_file = configuration sensors_file = sensors # Load the Logging configuration from config.ini file # Parse the config file content config = configparser.ConfigParser() config.read(configuration_file) # Configure logging loglevel = config.get('logging', 'loglevel') # Create Logger and set log level (parent) logger = logging.getLogger() LogLevel = getattr(logging, loglevel.upper(), None) logger.setLevel(LogLevel) # Create a Console Handler and add it to parent logger consoleHandler = logging.StreamHandler() consoleHandler.setLevel(LogLevel) logger.addHandler(consoleHandler) # Specify the log format formatter = logging.Formatter( '%(asctime)s %(name)s %(levelname)s: %(message)s') consoleHandler.setFormatter(formatter) logging.info('============= [ START ] =============') logging.info('Configuration file = %s' % (configuration_file)) click.secho('Configuration file = %s' % (configuration_file), fg='yellow') # get the list of sensors and details logging.info('Sensors definition file = %s' % (sensors_file)) click.secho('Sensors definition file = %s' % (sensors_file), fg='yellow') # get nb_rows to be queried from config file nb_rows = config.get('cavelink', 'rows') logging.info('Config file: fetching %s last record(s) from cavelink.' % (nb_rows)) click.secho('Config file: fetching %s last record(s) from cavelink.' % (nb_rows), fg='green') # get information to interact with NetAtmo netatmo_client_id = config.get('netatmo', 'CLIENT_ID') netatmo_client_secret = config.get('netatmo', 'CLIENT_SECRET') netatmo_username = config.get('netatmo', 'USERNAME') netatmo_password = config.get('netatmo', 'PASSWORD') # get tolerance/margin for coordinate system, to be passed to NetAtmo margin = config.get('netatmo', 'margin') margin = float(margin) # get params to connect the influxDB database influxDB_url = config.get('database', 'influxDB_url') influxDB_token = config.get('database', 'influxDB_token') influxDB_org = config.get('database', 'influxDB_org') influxDB_bucket = config.get('database', 'influxDB_bucket') # read sensors definition file with open(sensors_file, 'r') as f: sensors = json.load(f) logging.info('Found %d sensor(s) in %s.' % (len(sensors), sensors_file)) click.secho('Found %d sensor(s) in %s.' % (len(sensors), sensors_file), fg='green') # Filtering sensors list. Keeping only those to process if collect == 'speleo': active_sensors = [ s for s in sensors if s['active'].lower() == "true" and s['type'].lower() == 'cavelink' ] elif collect == 'weather': active_sensors = [ s for s in sensors if s['active'].lower() == "true" and s['type'].lower() == 'netatmo' ] else: # collect = all active_sensors = [s for s in sensors if s['active'].lower() == "true"] logging.info('Found %s active sensor(s) of type: "%s"' % (len(active_sensors), collect)) click.secho('Found %s active sensor(s) of type: "%s".' % (len(active_sensors), collect), fg='green') # Login at Netatmo logging.debug("Loging to Netatmo") authorization = pyatmo.ClientAuth(client_id=netatmo_client_id, client_secret=netatmo_client_secret, username=netatmo_username, password=netatmo_password, scope="read_station") measurements = [ ] # create a list of measurements to be written to influxDB # Parse a list of dict (sensors, from sensors file) # for each sensor, we generate a 'measurement' with click.progressbar(active_sensors, label='Collecting data') as bar: for sensor in bar: if sensor['type'] == 'cavelink': # get URL from definition file, specified in config file webpage = sensor['url'] logging.debug('url selected: %s' % (webpage)) logging.info('Querying %s rows of %s' % (nb_rows, sensor['description'])) s = cavelink.Sensor(webpage, nb_rows) cl = json.loads(s.getJSON('epoch')) logging.debug('%s records received for sensor : %s' % (len(cl['measures']), sensor['description'])) logging.debug('Records recevied: %s' % (cl['measures'])) for timestamp in cl['measures']: valuetype = sensor['tags']['sensor'] measurement = {} measurement['measurement'] = sensor['table'] measurement['tags'] = sensor['tags'] measurement['tags']['unit'] = cl['sensor']['unit'] measurement['time'] = int( timestamp) * 1_000_000_000 # nanoseconds measurement['fields'] = {} measurement['fields'][valuetype] = cl['measures'][ timestamp] # Append a measurement to the list measurements.append(measurement) # in case of debug, show only the last measurment of the sensor logging.debug(measurement) elif sensor['type'] == 'netatmo': # get netatmo station's location from definition file latitude = float(sensor['latitude']) longitude = float(sensor['longitude']) logging.info('Looking for Netatmo: %s at %s' % (sensor['description'], sensor['address'])) logging.debug( 'Fetch netatmo data for location: Lat %s and Lon %s' % (latitude, longitude)) try: weather_pubdata = pyatmo.PublicData( authorization, lat_ne=str(latitude + margin), lon_ne=str(longitude + margin), lat_sw=str(latitude - margin), lon_sw=str(longitude - margin), ) weather_pubdata.update() logging.debug('Got public data, %s stations in area.' % weather_pubdata.stations_in_area()) except BaseException: e = sys.exc_info() logging.debug(e) avg_rainGauge = weather_pubdata.get_average_60_min_rain() logging.debug('Average rain at %s: %s' % (avg_rainGauge, sensor["address"])) if avg_rainGauge > 0: # avoid insert data with rain=0 measurement = {} measurement['measurement'] = sensor['table'] measurement['tags'] = sensor['tags'] measurement['tags']['unit'] = sensor['unit'] measurement['time'] = int(round(time.time() * 1000000000)) # nanoseconds measurement['fields'] = {} measurement['fields']['value'] = avg_rainGauge logging.debug('Add measurement: %s mm. at %s' % ( avg_rainGauge, sensor["address"], )) # Append a measurement to the list measurements.append(measurement) # Post the measurements in batch to influxDB, if not empty if measurements: # login to influxDB client = InfluxDBClient(url=influxDB_url, token=influxDB_token) # write measurements in batch write_api = client.write_api(write_options=SYNCHRONOUS) write_api.write(influxDB_bucket, influxDB_org, measurements) logging.info('%s measurements written in DB.' % len(measurements)) click.secho('%s measurements written in DB.' % len(measurements), fg='green') else: logging.info('No measurement to write.') click.secho('No measurement to write.', fg='red')