def test_coverage(self): # Get 2018 daily data for Frankfurt Airport data = Daily(['10637'], start = datetime(2018, 1, 1), end = datetime(2018, 12, 31)) coverage = data.normalize().coverage() # Check if coverage is 100% self.assertEqual( coverage, 1, 'Normalized daily data returns coverage of ' + str(coverage) + ', should be 1' )
def test_aggregate(self): # Get 2018 daily data for Frankfurt Airport data = Daily(['10637'], start = datetime(2018, 1, 1), end = datetime(2018, 12, 31)) count = data.normalize().aggregate(freq = '1W').count() # Check if count matches 53 self.assertEqual( count, 53, 'Aggregated daily data returns count of ' + str(count) + ', should be 53' )
def test_normalize(self): # Get 2018 daily data for Frankfurt Airport data = Daily(['10637'], start = datetime(2018, 1, 1), end = datetime(2018, 12, 31)) count = data.normalize().count() # Check if count matches 365 self.assertEqual( count, 365, 'Normalized daily data returns count of ' + str(count) + ', should be 365' )
from meteostat import Stations, Daily from datetime import datetime import matplotlib.pyplot as plt data = Daily(['10637'], start = datetime(2018, 1, 1), end = datetime(2018, 12, 31)) data = data.normalize().aggregate(freq = '1W').fetch() data.plot(x = 'time', y = ['tavg', 'tmin', 'tmax'], kind = 'line') plt.show()
from datetime import datetime import matplotlib.pyplot as plt from meteostat import Stations, Daily # Configuration Daily.max_threads = 5 # Time period start = datetime(1980, 1, 1) end = datetime(2019, 12, 31) # Get random weather stations in the US stations = Stations() stations = stations.region('US') stations = stations.inventory('daily', (start, end)) stations = stations.fetch(limit=20, sample=True) # Get daily data data = Daily(stations, start, end) # Normalize & aggregate data = data.normalize().aggregate('1Y', spatial=True).fetch() # Chart title TITLE = 'Average US Annual Temperature from 1980 to 2019' # Plot chart data.plot(y=['tavg'], title=TITLE) plt.show()
""" Spatial sampling with Meteostat """ from meteostat import Stations, Daily from datetime import datetime import matplotlib.pyplot as plt # Get 20 random weather stations in Germany stations = Stations(country='DE', daily=datetime(2005, 1, 1)).fetch(limit=20, sample=True) # Get daily data data = Daily(stations, max_threads=5, start=datetime(1980, 1, 1), end=datetime(2019, 12, 31)) # Normalize data and aggregate data = data.normalize().aggregate(freq='5Y', spatial=True).fetch() # Plot chart data.plot(y=['tavg'], kind='line', title='Sampled DE Annual Mean Temperature from 1980 to 2019') plt.show()
from meteostat import Stations, Daily from datetime import datetime import pandas as pd import matplotlib.pyplot as plt # Get weather stations by Meteostat ID stations = Stations(id=['D1424', '10729', '10803', '10513']).fetch() # Get daily data since 1980 data = Daily(stations, max_threads=5, start=datetime(1980, 1, 1), end=datetime(2019, 12, 31)) # Normalize data data = data.normalize().fetch() # Function for converting degrees to Boolean (North-East or Not-North-East) def direction(value): if (value >= 337 and value <= 360) or value <= 114: return 1 else: return 0 # Convert wind direction data['wdir'] = data['wdir'].apply(direction) # Filter for DEC and JAN only time = data.index.get_level_values('time')
def stations_daily(): """ Return daily station data in JSON format """ # Get query parameters args = utils.get_parameters(parameters) # Check if required parameters are set if args['station'] and len(args['start']) == 10 and len(args['end']) == 10: try: # Convert start & end date strings to datetime start = datetime.strptime(args['start'], '%Y-%m-%d') end = datetime.strptime(f'{args["end"]} 23:59:59', '%Y-%m-%d %H:%M:%S') # Get number of days between start and end date date_diff = (end - start).days # Check date range if date_diff < 0 or date_diff > max_days: # Bad request abort(400) # Caching now_diff = (datetime.now() - end).days if now_diff < 30: cache_time = 60 * 60 * 24 else: cache_time = 60 * 60 * 24 * 3 Daily.max_age = cache_time # Get data data = Daily(args['station'], start, end, model=args['model']) # Check if any data if data.count() > 0: # Normalize data data = data.normalize() # Aggregate if args['freq']: data = data.aggregate(args['freq']) # Unit conversion if args['units'] == 'imperial': data = data.convert(units.imperial) elif args['units'] == 'scientific': data = data.convert(units.scientific) # Fetch DataFrame data = data.fetch() # Convert to integer data['tsun'] = data['tsun'].astype('Int64') # DateTime Index to String data.index = data.index.strftime('%Y-%m-%d') data.index.rename('date', inplace=True) data = data.reset_index().to_json(orient="records") else: # No data data = '[]' # Inject meta data meta = {} meta['generated'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S') # Generate output string output = f'''{{"meta":{json.dumps(meta)},"data":{data}}}''' # Return return utils.send_response(output, cache_time) except BaseException: # Bad request abort(400) else: # Bad request abort(400)