'86': 22, '66': 10, '67': 11, '56': 10, '57': 11, '95': 25 } return condicodes.get(str(code), None) # Create new task task = Routine('import.dwd.hourly.model') # Get counter value counter = task.get_var('station_counter') skip = 0 if counter is None else int(counter) # Get MOSMIX stations try: stations = pd.read_csv(MOSMIX_PATH, dtype='str', skiprows=skip, nrows=STATIONS_PER_CYCLE, names=['id', 'mosmix']) except pd.errors.EmptyDataError: stations = None pass # Update counter if stations is None or len(stations.index) < STATIONS_PER_CYCLE:
pass return file # Create task task = Routine('import.dwd.hourly.national') # Connect to DWD FTP server ftp = FTP(DWD_FTP_SERVER) ftp.login() ftp.cwd('/climate_environment/CDC/observations_germany/climate/hourly/' + BASE_DIR) # Get counter value counter = task.get_var(f'station_counter_{MODE}') counter = int(counter) if counter is not None else 0 skip = 3 if counter is None else 3 + counter # Get all files in directory try: endpos = STATIONS_PER_CYCLE + skip stations = ftp.nlst()[skip:endpos] except BaseException: stations = None pass # Update counter if stations is None or len(stations) < STATIONS_PER_CYCLE: task.set_var(f'station_counter_{MODE}', 0) exit()
USAF_WBAN_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../..', 'resources')) + '/usaf_wban.csv' CURRENT_YEAR = datetime.now().year # Required columns usecols = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10] # Column names NAMES = ['time', 'temp', 'dwpt', 'pres', 'wdir', 'wspd', 'prcp'] # Create new task task = Routine('import.noaa.hourly.global') # Get counter value counter = task.get_var('station_counter_' + MODE) skip = 0 if counter is None else int(counter) # Get year if MODE == 'historical': year = task.get_var('year') year = 1901 if year is None else int(year) # Get ISD Lite stations try: stations = pd.read_csv(USAF_WBAN_PATH, dtype='str', skiprows=skip, nrows=STATIONS_PER_CYCLE, names=['id', 'usaf', 'wban']) except pd.errors.EmptyDataError: