def __call__(self, stack): if isinstance(self.parent, Reader): stack.append(self.parent.parent[self.name]) elif isinstance(self.parent, Setter): self.parent.parent[self.name] = stack.pop() elif isinstance(self.parent, Caller): item = self.parent.parent[self.name] item = Routine(item) item(stack)
def select_card(self, card): #Prevent misselection isSame = len( self.selected_cards) != 0 and self.selected_cards[0] == card hasTwo = len(self.selected_cards) >= 2 if isSame or hasTwo: return #Show card content and add to pool card.set_shape_visible(True) self.selected_cards.append(card) #check if has two cards if len(self.selected_cards) == 2: #wait a bit before display result w = Routine.wait_for_seconds(1) r = True while r != False: yield True r = next(w, False) #Check result same_shape = self.selected_cards[0].shape == self.selected_cards[ 1].shape same_color = self.selected_cards[ 0].shape_color == self.selected_cards[1].shape_color if same_shape and same_color: #RIGHT CARDS #remove cards from game for c in self.selected_cards: self.cards.remove(c) self.on_correct() else: #WRONG CARDS #hide cards shapes for c in self.selected_cards: c.set_shape_visible(False) self.on_wrong() #clear pool self.selected_cards.clear() #if has no more cards it means that game has ended... right??? if len(self.cards) == 0: self.on_finish_game()
Export daily bulk data The code is licensed under the MIT license. """ from sys import argv from io import BytesIO, StringIO from gzip import GzipFile import csv from routines import Routine # Configuration SCOPE = argv[1] STATIONS_PER_CYCLE = 10 task = Routine('export.bulk.daily.' + SCOPE.lower(), True) stations = task.get_stations( f''' SELECT `stations`.`id` AS `id`, `stations`.`tz` AS `tz` FROM `stations` WHERE `stations`.`id` IN ( SELECT DISTINCT `station` FROM `inventory` WHERE `mode` IN {"('D', 'H', 'P')" if SCOPE == 'full' else "('D', 'H')"} ) ''', STATIONS_PER_CYCLE)
from urllib import request, error import pandas as pd from metar import Metar from metar.Datatypes import ( temperature, pressure, speed, distance, direction, precipitation, ) from routines import Routine from routines.convert import temp_dwpt_to_rhum from routines.schema import hourly_metar task = Routine('import.noaa.hourly.metar') # Map METAR codes to Meteostat condicodes def get_condicode(weather: list): try: code = weather[0][3] condicodes = { 'RA': 8, 'SHRA': 17, 'DZ': 7, 'DZRA': 7,
correct_message = TextStim(win=mywin, text="Correct!", color=text_correct_color, height=text_height) incorrect_message = TextStim(win=mywin, text="Incorrect!", color=text_incorrect_color, height=text_height) too_slow_message = TextStim(win=mywin, text="Too slow!", color=text_too_slow, height=text_height) beginning_learning_block_message = TextStim(win=mywin, text="A block of trials will start now. Here, you will receive feedback after each choice you made.", color=text_beginning_block, height=text_height) beginning_transfer_block_message = TextStim(win=mywin, text="A block of trials will start now. Here, you will no longer receive feedback.", color=text_beginning_block, height=text_height) messages_beginning = [beginning_learning_block_message, beginning_transfer_block_message] end_transfer_message = TextStim(win=mywin, color=text_beginning_block, height=text_height) #create the dataframe data = pd.DataFrame([]) #draw the stimuli trial_routine = Routine(window=mywin, frames_per_second=frames_per_second, escape_key=escape_key) for bl in range(n_blocks): block = blocks[bl] trial_routine.wait_for_time_limit( components=[messages_beginning[bl]], time_seconds=message_beginning_duration, label='message_beginning') for t in range(n_trials): # put here things that change at the beginning of every trial image_trial = 'patch{}.png'.format(block['image_number'][t]) correct_resp_trial = block['correct_response'][t] patch_image.image = os.path.join(directory_stimuli, image_trial)
""" ZAMG hourly synop import routine Get hourly synop data for selected weather stations in Austria. The code is licensed under the MIT license. """ import pandas as pd from routines import Routine from routines.schema import hourly_synop task = Routine('import.zamg.hourly.synop') # Configuration parse_dates = { 'time': [1, 2] } usecols = [0, 3, 4, 5, 7, 8, 9, 11, 12, 13, 15] names = { 'Station': 'station', 'T °C': 'temp', 'RF %': 'rhum', 'WR °': 'wdir', 'WG km/h': 'wspd', 'WSG km/h': 'wpgt', 'N l/m²': 'prcp', 'LDred hPa': 'pres', 'SO %': 'tsun'
'83': 19, '84': 20, '85': 21, '86': 22, '66': 10, '67': 11, '56': 10, '57': 11, '95': 25 } return condicodes.get(str(code), None) # Create new task task = Routine('import.dwd.hourly.model') # Get counter value counter = task.get_var('station_counter') skip = 0 if counter is None else int(counter) # Get MOSMIX stations try: stations = pd.read_csv(MOSMIX_PATH, dtype='str', skiprows=skip, nrows=STATIONS_PER_CYCLE, names=['id', 'mosmix']) except pd.errors.EmptyDataError: stations = None pass
# Configuration MODE = argv[1] STATIONS_PER_CYCLE = 1 if MODE == 'recent' else 4 USAF_WBAN_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../..', 'resources')) + '/usaf_wban.csv' CURRENT_YEAR = datetime.now().year # Required columns usecols = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10] # Column names NAMES = ['time', 'temp', 'dwpt', 'pres', 'wdir', 'wspd', 'prcp'] # Create new task task = Routine('import.noaa.hourly.global') # Get counter value counter = task.get_var('station_counter_' + MODE) skip = 0 if counter is None else int(counter) # Get year if MODE == 'historical': year = task.get_var('year') year = 1901 if year is None else int(year) # Get ISD Lite stations try: stations = pd.read_csv(USAF_WBAN_PATH, dtype='str', skiprows=skip,
'right_feedback': 0 }) stimuli.loc[stimuli.right_image == 'B.png', 'right_feedback'] = rewards_B stimuli.loc[stimuli.right_image == 'C.png', 'right_feedback'] = rewards_C stimuli = stimuli.sample(frac=1).reset_index(drop=True) stimuli['trial'] = np.arange(n_trials) + 1 #create the dataframe data = pd.DataFrame([]) #draw the stimuli trial_routine = Routine(window=mywin, frames_per_second=frames_per_second, escape_key=escape_key) for t in range(n_trials): # put here things that change every trial left_feedback.text = '%s' % stimuli.loc[t, 'left_feedback'] right_feedback.text = '%s' % stimuli.loc[t, 'right_feedback'] left_picture.image = os.path.join(os.getcwd(), 'stimuli', 'example_4', stimuli.loc[t, 'left_image']) right_picture.image = os.path.join(os.getcwd(), 'stimuli', 'example_4', stimuli.loc[t, 'right_image']) # first event trial_routine.wait_for_time_limit(components=[fixation_cross], time_seconds=fixation_duration,
The code is licensed under the MIT license. """ from sys import argv from io import BytesIO, StringIO from gzip import GzipFile import csv from datetime import datetime from routines import Routine # Configuration SCOPE = argv[1] MODE = argv[2] STATIONS_PER_CYCLE = 8 if MODE == 'recent' else 1 task = Routine(f'export.bulk.hourly.{SCOPE}.{MODE}', True) stations = task.get_stations( f''' SELECT `stations`.`id` AS `id` FROM `stations` WHERE `stations`.`id` IN ( SELECT DISTINCT `station` FROM `inventory` WHERE `mode` IN {"('H', 'P')" if SCOPE == 'full' else "('H')"} ) ''', STATIONS_PER_CYCLE)
# Column names names = { 'MM/DD/YYYY': 'time', 'TMAX': 'tmax', 'TMIN': 'tmin', 'TAVG': 'tavg', 'PRCP': 'prcp', 'SNWD': 'snow', 'AWDR': 'wdir', 'AWND': 'wspd', 'TSUN': 'tsun', 'WSFG': 'wpgt' } # Create new task task = Routine('import.noaa.daily.global') # Get counter value counter = task.get_var('station_counter') skip = 0 if counter is None else int(counter) # Get GHCN stations try: stations = pd.read_csv(GHCN_PATH, dtype='str', skiprows=skip, nrows=STATIONS_PER_CYCLE, names=['id', 'ghcn']) except pd.errors.EmptyDataError: stations = None pass
NAMES = { 'FX': 'wpgt', 'FM': 'wspd', 'RSK': 'prcp', 'SDK': 'tsun', 'SHK_TAG': 'snow', 'PM': 'pres', 'TMK': 'tavg', 'UPM': 'rhum', 'TXK': 'tmax', 'TNK': 'tmin' } # Create task task = Routine('import.dwd.daily.national') # Connect to DWD FTP server ftp = FTP(DWD_FTP_SERVER) ftp.login() ftp.cwd('/climate_environment/CDC/observations_germany/climate/daily/kl/' + MODE) # Get counter value counter = task.get_var(f'station_counter_{MODE}') counter = int(counter) if counter is not None else 0 skip = 3 if counter is None else 3 + counter # Get all files in directory try: endpos = STATIONS_PER_CYCLE + skip
def new_game(self, columns, rows): #RESET GAME self.cards.clear() self.score = 0 self.selected_cards.clear() self.wrong_attempts = 0 self.score_text.text = Game.defaultScoreText + "0" self.win_text.is_visible = False #get window size sHeight = self.screen.get_height() sWidth = self.screen.get_width() #set cards size relative to screen self.card_size.y = (sHeight * 0.8) / rows self.card_size.x = (sHeight * 0.6) / columns #offset to center all cards offset = vector2() offset.x = (sWidth - ((columns - 1) * self.cards_offset + columns * self.card_size.x)) / 2 offset.y = (sHeight - ( (rows - 1) * self.cards_offset + rows * self.card_size.y)) / 2 #instantiate cards for x in range(columns): for y in range(rows): #card position p = vector2() p.x = x * (self.cards_offset + self.card_size.x) + offset.x p.y = y * (self.cards_offset + self.card_size.y) + offset.y #instantiate c = FigureButton(p, self.card_size, str(y * columns + x)) #define onclick # call select card as routine to prevent locking the application with a thread.sleep() # and have a nice delay before hiding back or removing the cards c.on_click = lambda c=c: Routine.start_coroutine( self.select_card(c)) self.cards.append(c) #mix them random.shuffle(self.cards) random.shuffle(self.cards) #twice to get a better mix #define available figures and colors colors = [(255, 0, 0), (0, 255, 0), (0, 0, 255), (255, 0, 255), (255, 255, 0), (0, 255, 255)] figures = [0, 1, 2] total = rows * columns #add colors and figures i = 0 while i // 6 < total: for x in range(3): #for each shape for y in range(2): #do it twice to have pairs if (i >= total): return self.cards[i].shape_color = colors[(i // 6) % len( colors )] # if it exceed the amount of collors start to repeat self.cards[i].shape = figures[x] i += 1
Get hourly model forecasts for weather stations based on geo location The code is licensed under the MIT license. """ from urllib import request, error import json import pandas as pd from routines import Routine from routines.schema import hourly_model # Configuration STATIONS_PER_CYCLE = 20 task = Routine('import.metno.hourly.model') stations = task.get_stations(""" SELECT `stations`.`id` AS `id`, `stations`.`latitude` AS `latitude`, `stations`.`longitude` AS `longitude`, `stations`.`altitude` AS `altitude` FROM `stations` WHERE `stations`.`latitude` IS NOT NULL AND `stations`.`longitude` IS NOT NULL AND `stations`.`altitude` IS NOT NULL AND `stations`.`mosmix` IS NULL AND `stations`.`id` IN ( SELECT DISTINCT `station`
#create some stimuli fixed_gamble = TextStim(win=mywin, text='50% chance of winning CHF 600', color=text_color, pos=(-options_x_offset, 0)) changing_gamble = TextStim(win=mywin, color=text_color, pos=(options_x_offset, 0)) fixation_cross = TextStim(win=mywin, text='+', color=text_color) rewards = np.arange(0, 601, 30) rewards[0] += 1 n_trials = len(rewards) #create the dataframe data = pd.DataFrame([]) #draw the stimuli trial_routine = Routine(window=mywin, frames_per_second=frames_per_second, escape_key=escape_key) for t in range(n_trials): # put here things that change every trial changing_gamble.text = 'a sure gain of CHF %s' % rewards[t] # first event trial_routine.wait_for_time_limit( components=[fixation_cross], time_seconds=fixation_duration, label='fixation_cross') # second event key, rt = trial_routine.wait_for_keys_or_time_limit( components=[fixed_gamble, changing_gamble], valid_keys=choice_keys,
ftp.cwd( '/climate_environment/CDC/observations_germany/climate/hourly/' + path) files = ftp.nlst() matching = [f for f in files if needle in f] file = matching[0] except BaseException: pass return file # Create task task = Routine('import.dwd.hourly.national') # Connect to DWD FTP server ftp = FTP(DWD_FTP_SERVER) ftp.login() ftp.cwd('/climate_environment/CDC/observations_germany/climate/hourly/' + BASE_DIR) # Get counter value counter = task.get_var(f'station_counter_{MODE}') counter = int(counter) if counter is not None else 0 skip = 3 if counter is None else 3 + counter # Get all files in directory try: endpos = STATIONS_PER_CYCLE + skip
""" Update hourly inventory The code is licensed under the MIT license. """ from routines import Routine task = Routine('task.inventory.hourly') task.query(''' INSERT INTO `inventory`(`station`, `mode`, `start`) SELECT `station`, 'H' AS `mode`, MIN(`mindate`) AS `start` FROM ( (SELECT `station`, DATE(MIN(`time`)) as `mindate` FROM `hourly_synop` GROUP BY `station`) UNION ALL (SELECT `station`, DATE(MIN(`time`)) as `mindate` FROM `hourly_metar` GROUP BY `station`) UNION ALL (SELECT `station`,
""" Export meta data for weather stations The code is licensed under the MIT license. """ from io import BytesIO, StringIO from gzip import GzipFile import csv import json from routines import Routine task = Routine('export.bulk.stations.meta', True) def write_json_dump(data: list, name: str) -> None: global task file = BytesIO() if len(data) > 0: with GzipFile(fileobj=file, mode='w') as gz: gz.write(json.dumps(data, indent=4, default=str).encode()) gz.close() file.seek(0) task.bulk_ftp.storbinary(f'STOR /stations/meta/{name}.json.gz', file)
""" Update daily inventory The code is licensed under the MIT license. """ from routines import Routine task = Routine('task.inventory.daily') task.query(''' INSERT INTO `inventory`(`station`, `mode`, `start`) SELECT `station`, 'D' AS `mode`, MIN(`mindate`) AS `start` FROM ( (SELECT `station`, MIN(`date`) as `mindate` FROM `daily_national` GROUP BY `station`) UNION ALL (SELECT `station`, MIN(`date`) as `mindate` FROM `daily_ghcn` GROUP BY `station`) ) AS `daily_inventory` GROUP BY `station` ON DUPLICATE KEY UPDATE