Exemplo n.º 1
0
codes = Sectoring(sql, f"bea{vintage}", fillna='')
naics = pstat.build_lookup('lpermno', 'naics', fillna=0)
caps, counts, rets = [], [], []
for year in years:
    date = bd.endyr(year - 1)
    univ = crsp.get_universe(date)
    univ['bea'] = codes[naics(univ.index, date)]
    univ = univ[univ['bea'].ne('')]
    grouped = univ.groupby('bea')
    caps.append(grouped['cap'].sum().rename(year))
    counts.append(grouped['cap'].count().rename(year))
        
    months = bd.date_range(date, bd.endyr(year), 'endmo')
    for rebaldate, end in zip(months[:-1], months[1:]):
        r = pd.concat([crsp.get_ret(bd.begmo(end), end),
                       crsp.get_cap(rebaldate, use_permco=False),
                       univ['bea']], axis=1, join='inner').dropna()
        grp = r.groupby('bea')   # industry ret is sum of weighted rets
        r['wtdret'] = r['ret'].mul(r['cap'].div(grp['cap'].transform('sum')))
        rets.append(grp['wtdret'].sum(min_count=1).rename(end))
        print(end, len(r), r['wtdret'].sum() / len(grp))

# collect and average market caps, counts and returns
caps = pd.concat(caps, axis=1).mean(axis=1)     # average cap over years
counts = pd.concat(counts, axis=1).mean(axis=1) # average count
rets = pd.concat(rets, axis=1)

# create node variables: count and cap (will take logs of)
nodevars = pd.concat([caps.rename('cap'), counts.rename('count')], axis=1)
rets = rets.T[nodevars.index]    # ensure same order of industries
n = len(nodevars.index)
print(ls.get_robustcov_results('HC0').summary())
print(ls.get_robustcov_results('HAC', maxlags=3).summary())
print(
    ls.get_robustcov_results('hac-panel', groups=rets['port'],
                             maxlags=3).summary())
print(ls.get_robustcov_results('cluster', groups=rets['port']).summary())

## Fama MacBeth with individual stocks and standardized scores as loadings
rebalbeg = 19640601
rebalend = LAST_DATE
rebaldates = crsp.bd.date_range(rebalbeg, rebalend, 'endmo')
loadings = dict()
for pordate in rebaldates:  # retrieve signal values every month
    date = bd.june_universe(pordate)
    univ = crsp.get_universe(date)
    cap = np.sqrt(crsp.get_cap(date)['cap'])
    smb = -np.log(cap).rename('size')
    hml = signals('hml', date, bd.endmo(date, -12))['hml'].rename('value')
    beta = (signals('beta', pordate, bd.begmo(pordate))['beta'] * 2 / 3) + (1 /
                                                                            3)
    mom = signals('mom', pordate)['mom'].rename('momentum')
    df = pd.concat(
        (beta, hml, smb, mom),  # inner join of signals with univ
        join='inner',
        axis=1).reindex(univ.index).dropna()
    loadings[pordate] = winsorized(df, quantiles=[0.05, 0.95])

## Compute coefficients from FM cross-sectional regressions
riskpremium = RiskPremium(user, bench, 'RF', LAST_DATE)
riskpremium(
    crsp,
df[label] = np.where(df[label].isna(), df['pstk'], df[label])
df[label] = np.where(df[label].isna(), 0, df[label])
df[label] = df['seq'] + df['txditc'].fillna(0) - df[label]
df.dropna(subset=[label], inplace=True)
df = df[df[label] > 0][['permno', 'gvkey', 'datadate', label]]

## years in Compustat
df = df.sort_values(by=['gvkey', 'datadate'])
df['count'] = df.groupby(['gvkey']).cumcount()

## construct b/m ratio
df['rebaldate'] = 0
for datadate in sorted(df['datadate'].unique()):
    f = df['datadate'].eq(datadate)
    df.loc[f, 'rebaldate'] = crsp.bd.endmo(datadate, abs(lag))  # 6 month lag
    df.loc[f, 'cap'] = crsp.get_cap(crsp.bd.endyr(datadate))\
                               .reindex(df.loc[f, 'permno']).values # Dec mktcap
    print(datadate, sum(f))
df[label] /= df['cap']
df = df[df[label].gt(0) & df['count'].gt(1)]  # 2+ years in Compustat

## compute HML portfolio holdings
signals = chunk_signal(df)
holdings = famafrench_sorts(crsp,
                            'hml',
                            signals,
                            rebalbeg,
                            LAST_DATE,
                            window=12,
                            months=[6],
                            rebals=rebals)['holdings']
Exemplo n.º 4
0
"""
- same year filings [yr]0101:[yr]1231 = bd.begyr(caldate) to caldate
- lagged [yr+1]0401:[yr+2]0331 = bd.begmo(caldate,4) - bd.endmo(caldate,15)
"""
for ifig, key in enumerate(['mdasent', 'mdachg', 'mdacos']):
    ret1 = {}  # to collect year-ahead spread returns
    ret0 = {}  # to collect current-year spread returns
    for year in sorted(np.unique(data['year'])):  # loop over years

        # compute current year spread returns
        beg = bd.begyr(year)
        end = bd.endyr(year)
        univ = data[data['year'] == year]\
                   .dropna(subset=[key])\
                   .set_index('permno')\
                   .join(crsp.get_cap(bd.offset(beg, -1)), how='inner')\
                   .join(crsp.get_ret(beg, end, delist=True), how='left')
        if len(univ):
            sub = fractiles(univ[key], [20, 80])
            pos = weighted_average(univ.loc[sub == 1, ['cap', 'ret']],
                                   'cap')['ret']
            neg = weighted_average(univ.loc[sub == 3, ['cap', 'ret']],
                                   'cap')['ret']
            ret0[end] = {
                'ret': pos - neg,
                'npos': sum(sub == 1),
                'nneg': sum(sub == 3)
            }
            if ECHO:
                print(end, len(univ), pos, neg)