def route_preview_questionnaire(questionnaireName): errors = [] try: f = open( current_app.root_path + '/questionnaires/' + questionnaireName + ".json", 'r') jsonData = f.read() jsonData = json.loads(jsonData) except Exception as e: errors = list(e.args) tableName = "questionnaire_" + questionnaireName if questionnaireName in page_list.get_questionnaire_list(): try: db.session.query(db.metadata.tables[tableName]).first() except Exception as e: errors.extend(list(e.args)) if "(OperationalError) no such column:" in e.args[0]: errors.append( "Click <a href=\"?fix_errors\">here</a> if you would like to try to automatically add " "this column. Alternatively, you can drop the table and it will be recreated." ) elif "(OperationalError) no such table:" in e.args[0]: errors.append( "Click <a href=\"?fix_errors\">here</a> if you would like to try to automatically create " "this table. Alternatively, you can restart the server and it will be created." ) if 'fix_errors' in request.args: # Figure out what column it is by parsing errors. for e in errors: if "(OperationalError) no such column:" in e: e = e.split(tableName + ".") columnName = e[len(e) - 1] dataType = db.metadata.tables[tableName].columns[ columnName].type addColumn = db.DDL( str.format("ALTER TABLE {} ADD COLUMN {} {}", tableName, columnName, dataType)) db.engine.execute(addColumn) errors.append( str.format( u"{} {} was added to {}. " u"This error should be gone when you refresh.", columnName, dataType, tableName)) if "(OperationalError) no such table:" in e: db.create_all() errors.append( str.format( u"The error should be gone if you refresh.")) return render_template("preview_questionnaire.html", q=jsonData, errors=errors)
def route_results(): qList = page_list.get_questionnaire_list(include_tags=True) results = {} for qNameAndTag in qList: qName, qTag = questionnaire_name_and_tag(qNameAndTag) qResults = QuestionnaireResults(questionnaires[qName], qTag) qResults.run_query() qResults.calc_descriptives() results[qNameAndTag] = qResults return render_template("results.html", results=results)
def route_export(): unfinishedCount = db.session.query(db.Participant).filter(db.Participant.finished == False).count() # For display only missingCount = 0 innerJoins = db.session.query(db.Participant) # Participants with complete data leftJoins = db.session.query(db.Participant) # Participants with complete or incomplete data includeUnfinished = request.args.get('includeUnfinished', False) includeMissing = request.args.get('includeMissing', False) qList = page_list.get_questionnaire_list(include_tags=True) columns = dict() columns['participant'] = [ "participantID", "mTurkID", "condition", "duration", "finished" ] calculatedColumns = dict() # First loop constructs the query and fetches the column names for qNameAndTag in qList: qName, qTag = questionnaire_name_and_tag(qNameAndTag) # The python class that describes the questionnaire questionnaire = questionnaires[qName] # Add the questionnaire's table/class to the query... qDBC = db.aliased(questionnaires[qName].dbClass, name=qNameAndTag) leftJoins = leftJoins.outerjoin(qDBC, db.and_( qDBC.participantID == db.Participant.participantID, qDBC.tag == qTag )).add_entity(qDBC) innerJoins = innerJoins.join(qDBC, db.and_( qDBC.participantID == db.Participant.participantID, qDBC.tag == qTag )).add_entity(qDBC) #attributes = questionnaires[qName].dbClass.__dict__ #keys = sorted(attributes.keys()) columns[qNameAndTag] = [] calculatedColumns[qNameAndTag] = [] # Make a list of the columns to later construct the CSV header row # This could also be done with questionnaire.fields for column in questionnaire.fields: columns[qNameAndTag].append(column.id) # Similarly, make a list of calculated columns to later be part of the CSV header row. for column in questionnaire.calcFields: calculatedColumns[qNameAndTag].append(column) if not includeUnfinished: leftJoins = leftJoins.filter(db.Participant.finished == True) innerJoins = innerJoins.filter(db.Participant.finished == True) leftJoins = leftJoins.group_by(db.Participant.participantID) innerJoins = innerJoins.group_by(db.Participant.participantID) if includeMissing: rows = leftJoins.all() else: rows = innerJoins.all() missingCount = leftJoins.filter(db.Participant.finished == True).count() - innerJoins.count() # Repeated measures in other tables... customExports = [] for export in current_app.config['EXPORT']: levels, baseQuery = create_export_base_queries(export) customExports.append({'options': export, 'base_query': baseQuery, 'levels': levels}) # Now that the data is loaded, construct the CSV syntax. # Starting with the header row... columnList = columns['participant'] # Add questionnaire fields to CSV header for qNameAndTag in qList: qName, qTag = questionnaire_name_and_tag(qNameAndTag) for col in columns[qNameAndTag]: if col.startswith(qName + "_"): # If it's already prefixed, remove it so the code below works col = col.replace(qName, "") # Prefix the column with the questionnaire name if qTag != "": col = qName + "_" + qTag + "_" + col else: col = qName + "_" + col columnList.append(col) if qTag != "": columnList.append(str.format(u"{}_{}_duration", qName, qTag)) else: columnList.append(str.format(u"{}_duration", qName)) # Add any calculated columns to the CSV header for calcCol in calculatedColumns[qNameAndTag]: if qTag != "": columnList.append(str.format(u"{}_{}_{}", qName, qTag, calcCol)) else: columnList.append(str.format(u"{}_{}", qName, calcCol)) # For custom exports, add columns based on levels determined by prior query for export in customExports: for level in export['levels']: for field in export['options']['fields']: columnList.append(str.format(u"{}_{}", field, str(level[0]).replace(" ", "_"))) # Finally construct the CSV string. csvString = ",".join(columnList) + "\n" # CSV Header for row in rows: csvString += str.format(u"{},{},{},{},{}", row.Participant.participantID, row.Participant.mTurkID, row.Participant.condition, row.Participant.duration, row.Participant.finished ) for qNameAndTag in qList: qData = getattr(row, qNameAndTag) for col in columns[qNameAndTag]: if qData: csvString += "," + escape_csv(getattr(qData, col)) else: csvString += "," if not qData: csvString += "," else: csvString += str.format(u",{}", qData.duration()) # Special case for duration # See if there are any calculations to include in the export. for col in calculatedColumns[qNameAndTag]: if qData: csvString += "," + escape_csv(getattr(qData, col)()) else: csvString += "," for export in customExports: query = export['base_query'] query = query.filter(db.literal_column('participantID') == row.Participant.participantID) customExportData = query.all() # Running separate queries will get the job done, but be kind of slow with many participants... # build dictionary with one row per level... customExportRMs = {} for r in customExportData: classValues = getattr(r, export['options']['table']) groupValue = getattr(classValues, export['options']['group_by']) customExportRMs[groupValue] = r for level in export['levels']: for field in export['options']['fields']: if not level[0] in customExportRMs: csvString += "," break # Missing data! classValues = getattr(customExportRMs[level[0]], export['options']['table']) # The entire table class is added to the query, as well as the individual fields. So try both. # Try class first due to it also having access to python properties. if hasattr(classValues, field): value = getattr(classValues, field) else: value = getattr(customExportRMs[level[0]], field) if callable(value): value = value() csvString += "," + escape_csv(value) csvString += "\n" if request.base_url.endswith("/download"): return Response(csvString, mimetype="text/csv", headers={ "Content-disposition": "attachment; filename=%s.csv" % ("export_" + datetime.now().strftime("%Y-%m-%d_%H-%M")) }) else: return render_template("export.html", data=csvString, rowCount=len(rows), unfinishedCount=unfinishedCount, missingCount=missingCount)