Esempio n. 1
0
def test_pipeline():
    tri = cl.load_sample('clrd').groupby('LOB').sum()[[
        'CumPaidLoss', 'IncurLoss', 'EarnedPremDIR'
    ]]
    tri['CaseIncurredLoss'] = tri['IncurLoss'] - tri['CumPaidLoss']

    X = tri[['CumPaidLoss', 'CaseIncurredLoss']]
    sample_weight = tri['EarnedPremDIR'].latest_diagonal

    dev = [
        cl.Development(),
        cl.ClarkLDF(),
        cl.Trend(),
        cl.IncrementalAdditive(),
        cl.MunichAdjustment(paid_to_incurred=('CumPaidLoss',
                                              'CaseIncurredLoss')),
        cl.CaseOutstanding(paid_to_incurred=('CumPaidLoss',
                                             'CaseIncurredLoss'))
    ]
    tail = [cl.TailCurve(), cl.TailConstant(), cl.TailBondy(), cl.TailClark()]
    ibnr = [
        cl.Chainladder(),
        cl.BornhuetterFerguson(),
        cl.Benktander(n_iters=2),
        cl.CapeCod()
    ]

    for model in list(itertools.product(dev, tail, ibnr)):
        print(model)
        cl.Pipeline(
            steps=[('dev',
                    model[0]), ('tail',
                                model[1]), ('ibnr', model[2])]).fit_predict(
                                    X, sample_weight=sample_weight).ibnr_.sum(
                                        'origin').sum('columns').sum()
Esempio n. 2
0
def test_basic_transform(raa):
    cl.Development().fit_transform(raa)
    cl.ClarkLDF().fit_transform(raa)
    cl.TailClark().fit_transform(raa)
    cl.TailBondy().fit_transform(raa)
    cl.TailConstant().fit_transform(raa)
    cl.TailCurve().fit_transform(raa)
    cl.BootstrapODPSample().fit_transform(raa)
    cl.IncrementalAdditive().fit_transform(raa,
                                           sample_weight=raa.latest_diagonal)
def test_basic_transform():
    tri = cl.load_sample("raa")
    cl.Development().fit_transform(tri)
    cl.ClarkLDF().fit_transform(tri)
    cl.TailClark().fit_transform(tri)
    cl.TailBondy().fit_transform(tri)
    cl.TailConstant().fit_transform(tri)
    cl.TailCurve().fit_transform(tri)
    cl.BootstrapODPSample().fit_transform(tri)
    cl.IncrementalAdditive().fit_transform(tri,
                                           sample_weight=tri.latest_diagonal)
Esempio n. 4
0
def test_bondy1():
    tri = cl.load_sample('tail_sample')['paid']
    dev = cl.Development(average='simple').fit_transform(tri)
    assert round(cl.TailBondy().fit(dev).cdf_.values[0, 0, 0, -2], 3) == 1.028
Esempio n. 5
0
def test_bondy1():
    tri = cl.load_sample("tail_sample")["paid"]
    dev = cl.Development(average="simple").fit_transform(tri)
    assert round(
        float(cl.TailBondy(earliest_age=12).fit(dev).cdf_.values[0, 0, 0, -2]),
        3) == 1.028
Esempio n. 6
0
traditional Bondy method.
"""

import chainladder as cl

# Fit basic development to a triangle
tri = cl.load_sample('tail_sample')['paid']
dev = cl.Development(average='simple').fit_transform(tri)

# Return both the tail factor and the Bondy exponent in the scoring function
scoring = {
    'tail_factor': lambda x: x.tail_.values[0, 0],
    'bondy_exponent': lambda x: x.b_.values[0, 0]
}

# Vary the 'earliest_age' assumption in GridSearch
param_grid = dict(earliest_age=list(range(12, 120, 12)))
grid = cl.GridSearch(cl.TailBondy(), param_grid, scoring)
results = grid.fit(dev).results_

ax = results.plot(x='earliest_age',
                  y='bondy_exponent',
                  title='Bondy Assumption Sensitivity',
                  marker='o')
results.plot(x='earliest_age',
             y='tail_factor',
             grid=True,
             secondary_y=True,
             ax=ax,
             marker='o')