def db_setup(): import psiturk.models from psiturk import db db.init_db() db.truncate_tables() yield db.truncate_tables()
def launch(): """Launch the experiment.""" exp = experiment(db.init_db(drop_all=False)) exp.log("Launching experiment...", "-----") init_db() exp.recruiter().open_recruitment(n=exp.initial_recruitment_size) session_psiturk.commit() session.commit() return success_response(request_type="launch")
def db_setup(mocker, experiment_dir, tmpdir, request): import psiturk.db reload(psiturk.db) import psiturk.models psiturk.models.Base.metadata.clear() reload(psiturk.models) from psiturk.db import init_db init_db() yield
def launch(): """Launch the experiment.""" exp = experiment(db.init_db(drop_all=False)) exp.log("Launch route hit, initializing tables and opening recruitment.", "-----") init_db() exp.recruiter().open_recruitment(n=exp.initial_recruitment_size) session_psiturk.commit() session.commit() exp.log("Experiment successfully launched!", "-----") data = {"status": "success"} js = dumps(data) return Response(js, status=200, mimetype='application/json')
def do_it(participant_attributes={}): participant_attribute_defaults = { 'workerid': faker.md5(raw_output=False), 'hitid': faker.md5(raw_output=False), 'assignmentid': faker.md5(raw_output=False), } participant_attributes = dict(list( participant_attribute_defaults.items()) + list(participant_attributes.items())) init_db() participant = Participant(**participant_attributes) db_session.add(participant) db_session.commit() return participant
import image_slicer import os from psiturk.db import db_session, init_db, Base from custom_models import Pixels init_db() # initialze the data base, creating tables for the custom_models.py if necessary TILEPATH = './static/images/tiles/' TARGETFILE = 'brain.png' NTILES = 16 # first delete all the files in the static/images/tiles folder files = os.listdir(TILEPATH) for myfile in files: if myfile != 'placeholder.txt': print "removing existing file", TILEPATH+myfile os.remove(TILEPATH+myfile) # query existing pixels Pixels.query.delete() # create the tiles using the target image tiles = image_slicer.slice(TARGETFILE, NTILES, save=False) image_slicer.save_tiles(tiles, prefix='tile', directory='static/images/tiles', format='png') # add tiles to database for tile in tiles: pixel_width, pixel_height = tile.image.size print "dimensions:", (pixel_width, pixel_height) , "filename:", tile.filename pixel_attributes = dict(filename = tile.filename, n_completed = 0,
print("created new game of type " + str(new_game.__class__.__name__)) for user in [a, b]: games[user] = new_game arg = { "role": games[user].role_string(user), "pattern": new_game.__class__.__name__ } #sio.emit("sendTrainingMessage", "* You've been matched as "+ games[user].role_string(user) + ".", room=user) sio.emit("instructions", arg, room=user) #initial task #new_game.new_task() db_session.commit() else: sio.emit("sendTrainingMessage", "* Waiting for a partner.", room=uid) #queue = queue if __name__ == "__main__": # app = Flask(__name__) # app.register_blueprint(custom_code) # app.wsgi_app = socketio.Middleware(sio, app.wsgi_app) # app.run(host='localhost', port=5000) import os, sqlalchemy if not os.path.exists("participants.db"): init_db()
# import image_slicer import os import csv from psiturk.db import db_session, init_db, Base from custom_models import Word, Feature, Rating init_db( ) # initialze the data base, creating tables for the custom_models.py if necessary # read in the words, definitions, and feature strings with open('./stims/amt_unique_words.csv', 'rb') as f: amt_words = list(tuple(rec) for rec in csv.reader(f, delimiter='\t')) with open('./stims/amt_unique_defs.csv', 'rb') as f: amt_defs = list(tuple(rec) for rec in csv.reader(f, delimiter='\t')) with open('./stims/amt_feats.csv', 'rb') as f: amt_feats = list(tuple(rec) for rec in csv.reader(f, delimiter='\t')) # populate words table with words and definitions for idx in range(len(amt_words[0])): word = Word() word.word_string = amt_words[0][idx] word.word_definition = amt_defs[0][idx] db_session.add(word) db_session.commit() # put features in the feature table for cfeat in amt_feats[0]: feature = Feature() feature.feature_string = cfeat