Beispiel #1
0
 def __call__(self, stack):
     if isinstance(self.parent, Reader):
         stack.append(self.parent.parent[self.name])
     elif isinstance(self.parent, Setter):
         self.parent.parent[self.name] = stack.pop()
     elif isinstance(self.parent, Caller):
         item = self.parent.parent[self.name]
         item = Routine(item)
         item(stack)
"""
Export meta data for weather stations

The code is licensed under the MIT license.
"""

from io import BytesIO, StringIO
from gzip import GzipFile
import csv
import json
from routines import Routine

task = Routine('export.bulk.stations.meta', True)


def write_json_dump(data: list, name: str) -> None:

    global task

    file = BytesIO()

    if len(data) > 0:

        with GzipFile(fileobj=file, mode='w') as gz:
            gz.write(json.dumps(data, indent=4, default=str).encode())
            gz.close()
            file.seek(0)

        task.bulk_ftp.storbinary(f'STOR /stations/meta/{name}.json.gz', file)

Beispiel #3
0
        ftp.cwd(
            '/climate_environment/CDC/observations_germany/climate/hourly/' +
            path)
        files = ftp.nlst()
        matching = [f for f in files if needle in f]
        file = matching[0]

    except BaseException:

        pass

    return file


# Create task
task = Routine('import.dwd.hourly.national')

# Connect to DWD FTP server
ftp = FTP(DWD_FTP_SERVER)
ftp.login()
ftp.cwd('/climate_environment/CDC/observations_germany/climate/hourly/' +
        BASE_DIR)

# Get counter value
counter = task.get_var(f'station_counter_{MODE}')
counter = int(counter) if counter is not None else 0
skip = 3 if counter is None else 3 + counter

# Get all files in directory
try:
    endpos = STATIONS_PER_CYCLE + skip
Export daily bulk data

The code is licensed under the MIT license.
"""

from sys import argv
from io import BytesIO, StringIO
from gzip import GzipFile
import csv
from routines import Routine

# Configuration
SCOPE = argv[1]
STATIONS_PER_CYCLE = 10

task = Routine('export.bulk.daily.' + SCOPE.lower(), True)

stations = task.get_stations(
    f'''
    SELECT
        `stations`.`id` AS `id`,
        `stations`.`tz` AS `tz`
    FROM `stations`
    WHERE
        `stations`.`id` IN (
            SELECT DISTINCT `station`
            FROM `inventory`
            WHERE
                `mode` IN {"('D', 'H', 'P')" if SCOPE == 'full' else "('D', 'H')"}
        )
''', STATIONS_PER_CYCLE)
Get hourly model forecasts for weather stations based on geo location

The code is licensed under the MIT license.
"""

from urllib import request, error
import json
import pandas as pd
from routines import Routine
from routines.schema import hourly_model

# Configuration
STATIONS_PER_CYCLE = 20

task = Routine('import.metno.hourly.model')

stations = task.get_stations("""
    SELECT
        `stations`.`id` AS `id`,
        `stations`.`latitude` AS `latitude`,
        `stations`.`longitude` AS `longitude`,
        `stations`.`altitude` AS `altitude`
    FROM `stations`
    WHERE
        `stations`.`latitude` IS NOT NULL AND
        `stations`.`longitude` IS NOT NULL AND
        `stations`.`altitude` IS NOT NULL AND
        `stations`.`mosmix` IS NULL AND
        `stations`.`id` IN (
            SELECT DISTINCT `station`
correct_message = TextStim(win=mywin, text="Correct!", color=text_correct_color, height=text_height)
incorrect_message = TextStim(win=mywin, text="Incorrect!", color=text_incorrect_color, height=text_height)
too_slow_message = TextStim(win=mywin, text="Too slow!", color=text_too_slow, height=text_height)

beginning_learning_block_message = TextStim(win=mywin, text="A block of trials will start now. Here, you will receive feedback after each choice you made.", color=text_beginning_block, height=text_height)
beginning_transfer_block_message = TextStim(win=mywin, text="A block of trials will start now. Here, you will no longer receive feedback.", color=text_beginning_block, height=text_height)
messages_beginning = [beginning_learning_block_message, beginning_transfer_block_message]

end_transfer_message = TextStim(win=mywin, color=text_beginning_block, height=text_height)

#create the dataframe
data = pd.DataFrame([])

#draw the stimuli
trial_routine = Routine(window=mywin, frames_per_second=frames_per_second, escape_key=escape_key)

for bl in range(n_blocks):
    block = blocks[bl]

    trial_routine.wait_for_time_limit(
            components=[messages_beginning[bl]], 
            time_seconds=message_beginning_duration, 
            label='message_beginning')

    for t in range(n_trials):
        # put here things that change at the beginning of every trial
        image_trial = 'patch{}.png'.format(block['image_number'][t])
        correct_resp_trial = block['correct_response'][t]
        patch_image.image = os.path.join(directory_stimuli, image_trial)
Beispiel #7
0
from urllib import request, error
import pandas as pd
from metar import Metar
from metar.Datatypes import (
    temperature,
    pressure,
    speed,
    distance,
    direction,
    precipitation,
)
from routines import Routine
from routines.convert import temp_dwpt_to_rhum
from routines.schema import hourly_metar

task = Routine('import.noaa.hourly.metar')

# Map METAR codes to Meteostat condicodes


def get_condicode(weather: list):

    try:

        code = weather[0][3]

        condicodes = {
            'RA': 8,
            'SHRA': 17,
            'DZ': 7,
            'DZRA': 7,
Beispiel #8
0
        '83': 19,
        '84': 20,
        '85': 21,
        '86': 22,
        '66': 10,
        '67': 11,
        '56': 10,
        '57': 11,
        '95': 25
    }

    return condicodes.get(str(code), None)


# Create new task
task = Routine('import.dwd.hourly.model')

# Get counter value
counter = task.get_var('station_counter')
skip = 0 if counter is None else int(counter)

# Get MOSMIX stations
try:
    stations = pd.read_csv(MOSMIX_PATH,
                           dtype='str',
                           skiprows=skip,
                           nrows=STATIONS_PER_CYCLE,
                           names=['id', 'mosmix'])
except pd.errors.EmptyDataError:
    stations = None
    pass
"""
ZAMG hourly synop import routine

Get hourly synop data for selected weather stations in Austria.

The code is licensed under the MIT license.
"""

import pandas as pd
from routines import Routine
from routines.schema import hourly_synop

task = Routine('import.zamg.hourly.synop')

# Configuration
parse_dates = {
    'time': [1, 2]
}

usecols = [0, 3, 4, 5, 7, 8, 9, 11, 12, 13, 15]

names = {
    'Station': 'station',
    'T °C': 'temp',
    'RF %': 'rhum',
    'WR °': 'wdir',
    'WG km/h': 'wspd',
    'WSG km/h': 'wpgt',
    'N l/m²': 'prcp',
    'LDred hPa': 'pres',
    'SO %': 'tsun'
"""
Update hourly inventory

The code is licensed under the MIT license.
"""

from routines import Routine

task = Routine('task.inventory.hourly')

task.query('''
    INSERT INTO
        `inventory`(`station`, `mode`, `start`)
    SELECT
        `station`,
        'H' AS `mode`,
        MIN(`mindate`) AS `start` FROM (
            (SELECT
                `station`,
                DATE(MIN(`time`)) as `mindate`
            FROM `hourly_synop`
            GROUP BY `station`)
        UNION ALL
            (SELECT
                `station`,
                DATE(MIN(`time`)) as `mindate`
            FROM `hourly_metar`
            GROUP BY `station`)
        UNION ALL
            (SELECT
                `station`,
Beispiel #11
0
# Column names
names = {
    'Temperatur (2m)': 'temp',
    'Windgeschwindigkeit': 'wspd',
    'Windboen (letzte Stunde)': 'wpgt',
    'Niederschlag (letzte Stunde)': 'prcp',
    'Relative Feuchte': 'rhum',
    'Windrichtung': 'wdir',
    'Druck (auf Meereshoehe)': 'pres',
    'Sonnenscheindauer (letzte Stunde)': 'tsun',
    'aktuelles Wetter': 'coco',
    'Schneehoehe': 'snow'
}

# Create new task
task = Routine('import.dwd.hourly.synop')

# Map DWD codes to Meteostat condicodes


def get_condicode(code: str):
    """ Check docs/dwd_poi_codes.pdf for more information """

    condicodes = {
        '1': 1,
        '2': 2,
        '3': 3,
        '4': 4,
        '5': 5,
        '6': 6,
        '7': 7,
Beispiel #12
0
# Configuration
MODE = argv[1]
STATIONS_PER_CYCLE = 1 if MODE == 'recent' else 4
USAF_WBAN_PATH = os.path.abspath(
    os.path.join(os.path.dirname(__file__), '../../..',
                 'resources')) + '/usaf_wban.csv'
CURRENT_YEAR = datetime.now().year

# Required columns
usecols = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10]

# Column names
NAMES = ['time', 'temp', 'dwpt', 'pres', 'wdir', 'wspd', 'prcp']

# Create new task
task = Routine('import.noaa.hourly.global')

# Get counter value
counter = task.get_var('station_counter_' + MODE)
skip = 0 if counter is None else int(counter)

# Get year
if MODE == 'historical':
    year = task.get_var('year')
    year = 1901 if year is None else int(year)

# Get ISD Lite stations
try:
    stations = pd.read_csv(USAF_WBAN_PATH,
                           dtype='str',
                           skiprows=skip,
The code is licensed under the MIT license.
"""

from sys import argv
from io import BytesIO, StringIO
from gzip import GzipFile
import csv
from datetime import datetime
from routines import Routine

# Configuration
SCOPE = argv[1]
MODE = argv[2]
STATIONS_PER_CYCLE = 8 if MODE == 'recent' else 1

task = Routine(f'export.bulk.hourly.{SCOPE}.{MODE}', True)

stations = task.get_stations(
    f'''
    SELECT
        `stations`.`id` AS `id`
    FROM `stations`
    WHERE
        `stations`.`id` IN (
            SELECT DISTINCT `station`
            FROM `inventory`
            WHERE
                `mode` IN {"('H', 'P')" if SCOPE == 'full' else "('H')"}
        )
''', STATIONS_PER_CYCLE)
# Column names
names = {
    'MM/DD/YYYY': 'time',
    'TMAX': 'tmax',
    'TMIN': 'tmin',
    'TAVG': 'tavg',
    'PRCP': 'prcp',
    'SNWD': 'snow',
    'AWDR': 'wdir',
    'AWND': 'wspd',
    'TSUN': 'tsun',
    'WSFG': 'wpgt'
}

# Create new task
task = Routine('import.noaa.daily.global')

# Get counter value
counter = task.get_var('station_counter')
skip = 0 if counter is None else int(counter)

# Get GHCN stations
try:
    stations = pd.read_csv(GHCN_PATH,
                           dtype='str',
                           skiprows=skip,
                           nrows=STATIONS_PER_CYCLE,
                           names=['id', 'ghcn'])
except pd.errors.EmptyDataError:
    stations = None
    pass
NAMES = {
    'FX': 'wpgt',
    'FM': 'wspd',
    'RSK': 'prcp',
    'SDK': 'tsun',
    'SHK_TAG': 'snow',
    'PM': 'pres',
    'TMK': 'tavg',
    'UPM': 'rhum',
    'TXK': 'tmax',
    'TNK': 'tmin'
}

# Create task
task = Routine('import.dwd.daily.national')

# Connect to DWD FTP server
ftp = FTP(DWD_FTP_SERVER)
ftp.login()
ftp.cwd('/climate_environment/CDC/observations_germany/climate/daily/kl/' +
        MODE)

# Get counter value
counter = task.get_var(f'station_counter_{MODE}')
counter = int(counter) if counter is not None else 0
skip = 3 if counter is None else 3 + counter

# Get all files in directory
try:
    endpos = STATIONS_PER_CYCLE + skip
Beispiel #16
0
"""
Update daily inventory

The code is licensed under the MIT license.
"""

from routines import Routine

task = Routine('task.inventory.daily')

task.query('''
    INSERT INTO
        `inventory`(`station`, `mode`, `start`)
    SELECT
        `station`,
        'D' AS `mode`,
        MIN(`mindate`) AS `start` FROM (
            (SELECT
                `station`,
                MIN(`date`) as `mindate`
            FROM `daily_national`
            GROUP BY `station`)
        UNION ALL
            (SELECT
                `station`,
                MIN(`date`) as `mindate`
            FROM `daily_ghcn`
            GROUP BY `station`)
        ) AS `daily_inventory`
    GROUP BY `station`
    ON DUPLICATE KEY UPDATE