def learn_non_conf(df, global_models):
    """
    Learn a model for all non-conference games
    Could this be rolled into the learn_conf function easily?
    """
    # Filter by conference - should this happen inside or outside the for loop?
    dfc = df.loc[df.home_conf != df.away_conf].copy().dropna(axis=0)
    dfc = dfc.drop(['home_conf', 'away_conf'], axis=1)
    bayes_non_conf = dict()
    model_cols = [
        'sagarin_home', 'sagarin_away', 'kenpom_off_home', 'kenpom_def_home',
        'kenpom_off_away', 'kenpom_def_away', 'moore_home', 'moore_away',
        'score_diff'
    ]

    for day in pd.date_range(df.game_date.min(), df.game_date.max()):
        # start at min+1? I think that would avoid the cold-start issues encountered on days with no games
        day = str(day.date())
        df_conf = dfc.loc[(dfc.game_date == day),
                          model_cols]  # rename linreg_cols to model_cols?

        # other thought: have prev day outside of if, put isinstance statements in initial if?

        if (len(df_conf) == 0):
            try:
                prev_day = str(
                    (pd.to_datetime(day) - pd.Timedelta(days=1)).date())
                if isinstance(bayes_non_conf[prev_day], LinearRegression):
                    # use global lr?
                    bayes_non_conf[day] = global_models[day]
                elif isinstance(bayes_non_conf[prev_day], BayesianRidge):
                    bayes_non_conf[day] = bayes_non_conf[prev_day]
            except:
                bayes_non_conf[day] = global_models[
                    day]  # should only happen on first day
        else:
            bayes = BayesianRidge(normalize=True)
            bayes.coef_ = global_models[day].coef_

            X = df_conf.drop('score_diff', axis=1)
            y = df_conf.score_diff

            bayes_mod = bayes.fit(X, y)
            bayes_non_conf[day] = bayes_mod

    return bayes_non_conf