Esempio n. 1
0
def load_survey_data():
    """
        This function the survey data from the Azure SQL DB.

        :return: pandas.DataFrame: Returns dataframe with servey data.
    """
    logger.info("Started load_survey_data()")

    # open connection to Azure SQL DB
    conn, cursor = general_utils.connect_to_azure_sql_db()

    # get forecast date
    fcast_date = get_forecast_day()
    logger.info("fcast_date")
    logger.info(fcast_date)

    # extract Data from Azure SQL DB and one dummy line for the day that will be predicted here
    sql_stmt = """
        select * from sonntagsfrage.results_questionaire_clean
        union all
        select '""" + fcast_date + """', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '0', '0', '0'
        """
    df_survey_results = pd.read_sql(sql_stmt, conn)

    df_survey_results_clean = clean_survey_data(df_survey_results)

    df_survey_results_final = df_survey_results_clean
    utils.write_df_to_file(df_survey_results_final, 'load_survey_data')
    return df_survey_results_final
Esempio n. 2
0
def export_results(df_input):
    """
        This function writes the generated predictions to variousu sources so that the preds can be consumed py other
            applications.

        :param df_input: The dataframe with predictions.
    """
    logger.info("Start export_results()")

    df_working = df_input.copy()
    df_working = utils.unset_datecol_as_index_if_needed(df_working)
    output_col_names = [DATE_COL] + get_pred_col_names() + ['estimator']
    target_table_name = 'sonntagsfrage.predictions_questionaire'

    df_output = df_working[output_col_names]

    # open connection
    conn, cursor = connect_to_azure_sql_db()

    # write to Azure SQL DB
    if WRITE_TO_AZURE:
        write_df_to_sql_db(df_output,
                           conn,
                           cursor,
                           target_table_name,
                           header=False,
                           delete_dates=False)
def main():
    """
        Main function, performs the data transfer from the Azure SQL DB to a google spreadsheet document..
    """
    logger.info("Start main()")

    # open connections
    conn_azure, cursor_azure = conns.connect_to_azure_sql_db()
    conn_google = conns.connect_to_google_spreadsheets()

    # load worksheets
    sheet_data = conn_google.open(DATA_SPREADSHEET_NAME)
    sheet_preds = conn_google.open(PREDS_SPREADSHEET_NAME)
    data_worksheet = sheet_data.worksheet(DATA_WORKSHEET_NAME)
    preds_worksheet = sheet_preds.worksheet(PREDS_WORKSHEET_NAME)

    # load tables from Azure SQL DB
    sql_stmt = """select * from sonntagsfrage.v_predictions_questionaire_pivot"""
    df_table_preds = pd.read_sql(sql_stmt, conn_azure)
    sql_stmt = """select * from sonntagsfrage.v_results_questionaire_clean_pivot"""
    df_table_data = pd.read_sql(sql_stmt, conn_azure)

    empty_worksheet(data_worksheet)
    empty_worksheet(preds_worksheet)

    fill_worksheet_from_df(data_worksheet, df_table_data)
    fill_worksheet_from_df(preds_worksheet, df_table_preds)
def main(credentials=None):
    """
        Main function, performs the data transfer from the Azure SQL DB to a google spreadsheet document..
    """
    logger.info("Start main()")

    # open connections
    conn_azure, cursor_azure = conns.connect_to_azure_sql_db()
    if credentials:
        conn_google = conns.connect_to_google_spreadsheets(
            auth_type='env', credentials=credentials)
    else:
        conn_google = conns.connect_to_google_spreadsheets()

    # load sheets
    sheet_data = conn_google.open(DATA_SPREADSHEET_NAME)
    sheet_preds = conn_google.open(PREDS_SPREADSHEET_NAME)
    sheet_metrics = conn_google.open(METRICS_SPREADSHEET_NAME)
    sheet_next_sunday = conn_google.open(NEXT_SUNDAY_SPREADSHEET_NAME)

    # load worksheets
    data_worksheet = sheet_data.worksheet(DATA_WORKSHEET_NAME)
    preds_worksheet = sheet_preds.worksheet(PREDS_WORKSHEET_NAME)
    metrics_worksheet = sheet_metrics.worksheet(METRICS_WORKSHEET_NAME)
    next_sunday_worksheet = sheet_next_sunday.worksheet(
        NEXT_SUNDAY_WORKSHEET_NAME)

    # load tables from Azure SQL DB
    sql_stmt = """select * from sonntagsfrage.v_predictions_questionaire_pivot"""
    df_table_preds = pd.read_sql(sql_stmt, conn_azure)
    sql_stmt = """select * from sonntagsfrage.v_results_questionaire_clean_pivot"""
    df_table_data = pd.read_sql(sql_stmt, conn_azure)
    sql_stmt = """select * from sonntagsfrage.v_metrics_pivot"""
    df_table_metrics = pd.read_sql(sql_stmt, conn_azure)
    sql_stmt = """select * from sonntagsfrage.v_prediction_next_sunday_pivot"""
    df_table_next_sunday = pd.read_sql(sql_stmt, conn_azure)

    empty_worksheet(data_worksheet)
    empty_worksheet(preds_worksheet)
    empty_worksheet(metrics_worksheet)
    empty_worksheet(next_sunday_worksheet)

    fill_worksheet_from_df(data_worksheet, df_table_data)
    fill_worksheet_from_df(preds_worksheet, df_table_preds)
    fill_worksheet_from_df(metrics_worksheet, df_table_metrics)
    fill_worksheet_from_df(next_sunday_worksheet, df_table_next_sunday)
Esempio n. 5
0
def export_metrics(df_input):
    """
        This function writes the metrics of the generated predictions to various sources so that the metrics can be
            consumed py other applications.

        :param df_input: The dataframe with predictions.
    """
    logger.info("Start export_results()")

    df_output = df_input.copy()
    target_table_name = 'sonntagsfrage.metric_results'

    # open connection
    conn, cursor = connect_to_azure_sql_db()

    # write to Azure SQL DB
    if WRITE_TO_AZURE:
        write_df_to_sql_db(df_output,
                           conn,
                           cursor,
                           target_table_name,
                           header=True,
                           delete_dates=False)