示例#1
0
def main(file):
    db_utils.create_postgres_db()
    db.dal.connect()
    session = db.dal.Session()

    df = pd.read_excel(file)

    data_columns = [filter_feature, validation]

    data = df[data_columns]
    support_data = json.loads(df[df.columns.difference(data_columns)].to_json(orient='records'))

    for i in range(len(data)):

        data_row = data.iloc[i]
        support_data_row = support_data[i]

        data_obj = Data(filter_feature=str(data_row[filter_feature]), validation=int(data_row[validation]))
        session.add(data_obj)
        session.flush()
        support_data_obj = SupportData(support_data=support_data_row)
        data_obj.support_data = support_data_obj
        support_data_obj.data = data_obj
        support_data_obj.data_id = support_data_obj.data.id
        session.add(support_data_obj)

    session.commit()
    print(f'Loaded {len(data)} records of data and support_data.')
示例#2
0
    def __init__(self, last_response_id, apiToken=None, surveyId=None, fileFormat='json', 
                 dataCenter='cemgsa'):
        print("Getting data from Qualtrics...")
        if not apiToken and not surveyId:
            apiToken = qualtrics_sitewide_creds['apiToken']
            surveyId = qualtrics_sitewide_creds['surveyId']

        self.apiToken = apiToken
        self.surveyId = surveyId
        self.fileFormat = fileFormat
        self.dataCenter = dataCenter
        if not last_response_id:
            db_utils.create_postgres_db()
            db.dal.connect()
            session = db.dal.Session()
            last_response_id = db_utils.fetch_last_RespondentID(session)
        self.lastResponseId = last_response_id
示例#3
0
def main(survey_name="Site-Wide Survey English",
         model_description="model_sw.pkl"):
    '''
    Create db if it doesn't exist; fetch survey data from Qualtrics; make predictions; provide the user
    with a chance to validate the predictions in a spreadsheet; and insert data into db.
    '''

    db_utils.create_postgres_db()
    db.dal.connect()
    session = db.dal.Session()
    df = get_survey_data(session)
    results_path, df, id_pred_map, outfile = make_predictions(df)
    user_prompt(outfile)
    validated_id_pred_map = get_validations(results_path)

    insert_data(df, validated_id_pred_map, id_pred_map, survey_name,
                model_description, session)
    session.commit()

    retrain_model(session)

    print("DONE!")
示例#4
0
@auth.login_required
def predict():
    return jsonify({'task': 'predict',
                    'username': auth.username()
                    })


@app.route('/validate')  # , methods=['POST'])
@auth.login_required
def validate():
    return jsonify({'task': 'validate',
                    'username': auth.username(),
                    }), 200


@app.route('/train')  # , methods=['POST'])
@auth.login_required
def train():

    return jsonify({'task': 'train',
                    'username': auth.username()
                    }), 200


if __name__ == "__main__":
    db_utils.create_postgres_db()
    db.dal.connect()
    session = db.dal.Session()
    port = int(config.APP_PORT)
    app.run(host='0.0.0.0', port=port)