def _load(self) -> None: """ Load file from Meteostat """ # File name file = 'stations/slim.csv.gz' # Get local file path path = get_file_path(self.cache_dir, self.cache_subdir, file) # Check if file in cache if self.max_age > 0 and file_in_cache(path, self.max_age): # Read cached data df = pd.read_pickle(path) else: # Get data from Meteostat df = load_handler(self.endpoint, file, self._columns, self._types, self._parse_dates, True) # Add index df = df.set_index('id') # Save as Pickle if self.max_age > 0: df.to_pickle(path) # Set data self._data = df
def _load( self, station: str ) -> None: """ Load file from Meteostat """ # File name file = f'normals/{station}.csv.gz' # Get local file path path = get_file_path(self.cache_dir, self.cache_subdir, file) # Check if file in cache if self.max_age > 0 and file_in_cache(path, self.max_age): # Read cached data df = pd.read_pickle(path) else: # Get data from Meteostat df = load_handler( self.endpoint, file, self._columns, self._types, None) if df.index.size > 0: # Add weather station ID df['station'] = station # Set index df = df.set_index(['station', 'start', 'end', 'month']) # Save as Pickle if self.max_age > 0: df.to_pickle(path) # Filter time period and append to DataFrame if df.index.size > 0 and self._end: # Get time index end = df.index.get_level_values('end') # Filter & return return df.loc[end == self._end] return df
def _load( self, station: str ) -> None: """ Load file for a single station from Meteostat """ # File name file = 'daily/' + ('full' if self._model else 'obs') + \ '/' + station + '.csv.gz' # Get local file path path = get_file_path(self.cache_dir, self.cache_subdir, file) # Check if file in cache if self.max_age > 0 and file_in_cache(path, self.max_age): # Read cached data df = pd.read_pickle(path) else: # Get data from Meteostat df = load_handler( self.endpoint, file, self._columns, self._types, self._parse_dates) # Validate Series df = validate_series(df, station) # Save as Pickle if self.max_age > 0: df.to_pickle(path) # Filter time period and append to DataFrame if self._start and self._end: # Get time index time = df.index.get_level_values('time') # Filter & return return df.loc[(time >= self._start) & (time <= self._end)] # Return return df
def _load(self, station: str, year: str = None) -> None: """ Load file from Meteostat """ # File name file = 'hourly/' + ('full' if self._model else 'obs') + '/' + \ (year + '/' if year else '') + station + '.csv.gz' # Get local file path path = get_file_path(self.cache_dir, self.cache_subdir, file) # Check if file in cache if self.max_age > 0 and file_in_cache(path, self.max_age): # Read cached data df = pd.read_pickle(path) else: # Get data from Meteostat df = load_handler(self.endpoint, file, self._columns, self._types, self._parse_dates) # Validate Series df = validate_series(df, station) # Save as Pickle if self.max_age > 0: df.to_pickle(path) # Localize time column if self._timezone is not None and len(df.index) > 0: df = df.tz_localize('UTC', level='time').tz_convert(self._timezone, level='time') # Filter time period and append to DataFrame if self._start and self._end: # Get time index time = df.index.get_level_values('time') # Filter & return return df.loc[(time >= self._start) & (time <= self._end)] # Return return df