Ejemplo n.º 1
0
def test_validate_inputs():
    """
    Covers potential mistakes in parameters of the fit method that xlogit
    should be able to identify
    """
    model = MixedLogit()
    with pytest.raises(ValueError):  # wrong distribution
        model.fit(X,
                  y,
                  varnames=varnames,
                  alts=alts,
                  ids=ids,
                  n_draws=10,
                  maxiter=0,
                  verbose=0,
                  halton=True,
                  randvars={'a': 'fake'})

    with pytest.raises(ValueError):  # wrong var name
        model.fit(X,
                  y,
                  varnames=varnames,
                  alts=alts,
                  ids=ids,
                  n_draws=10,
                  maxiter=0,
                  verbose=0,
                  halton=True,
                  randvars={'fake': 'n'})
Ejemplo n.º 2
0
def test_fit():
    """
    Ensures the log-likelihood works for a single iterations with the default
    initial coefficients. The value of -1.794 was computed by hand for
    comparison purposes
    """
    # There is no need to initialize a random seed as the halton draws produce
    # reproducible results
    model = MixedLogit()
    model.fit(X,
              y,
              varnames=varnames,
              alts=alts,
              n_draws=10,
              panels=panels,
              ids=ids,
              randvars=randvars,
              maxiter=0,
              verbose=0,
              halton=True)

    assert pytest.approx(model.loglikelihood, -1.79451632)
Ejemplo n.º 3
0
def test_fit():
    """
    Ensures the log-likelihood works for multiple iterations with the default
    initial coefficients. The value of -1.473423 was computed by hand for
    comparison purposes
    """
    # There is no need to initialize a random seed as the halton draws produce
    # reproducible results
    model = MixedLogit()
    model.fit(X,
              y,
              varnames,
              alts,
              ids,
              randvars,
              n_draws=10,
              panels=panels,
              maxiter=0,
              verbose=0,
              halton=True,
              init_coeff=np.repeat(.1, 4))

    assert model.loglikelihood == pytest.approx(-1.473423)
Ejemplo n.º 4
0
    panels = None
    batch_size = 5000

if not use_gpu:
    device.disable_gpu_acceleration()
if profile:
    ini_ram = curr_ram()
    profiler = Profiler().start(measure_gpu_mem=use_gpu)

np.random.seed(0)
model = MixedLogit()
model.fit(X,
          y,
          varnames,
          alts=alts,
          ids=ids,
          n_draws=n_draws,
          panels=panels,
          verbose=0,
          randvars=randvars,
          batch_size=batch_size)

if profile:
    ellapsed, max_ram, max_gpu = profiler.stop()
    log("{:6} {:7.2f} {:11.2f} {:7.3f} {:7.3f} {}".format(
        n_draws, ellapsed, model.loglikelihood, max_ram - ini_ram, max_gpu,
        model.convergence))
    profiler.export('xlogit' + ('_gpu' if use_gpu else ''), dataset, n_draws,
                    ellapsed, model.loglikelihood, max_ram - ini_ram, max_gpu,
                    model.convergence)

if not profile:
Ejemplo n.º 5
0
# ===== STEP 3. CREATE MODEL SPECIFICATION =====
df['ASC_TRAIN'] = np.ones(len(df)) * (df['alt'] == 'TRAIN')
df['ASC_CAR'] = np.ones(len(df)) * (df['alt'] == 'CAR')
df['TT'], df['CO'] = df['TT'] / 100, df['CO'] / 100  # Scale variables
annual_pass = (df['GA'] == 1) & (df['alt'].isin(['TRAIN', 'SM']))
df.loc[annual_pass, 'CO'] = 0  # Cost zero for pass holders

# ===== STEP 4. ESTIMATE MODEL PARAMETERS =====
from xlogit import MixedLogit
varnames = ['ASC_CAR', 'ASC_TRAIN', 'CO', 'TT']
model = MixedLogit()
model.fit(X=df[varnames],
          y=df['CHOICE'],
          varnames=varnames,
          alts=df['alt'],
          ids=df['custom_id'],
          panels=df["ID"],
          avail=df['AV'],
          randvars={'TT': 'n'},
          n_draws=1500)
model.summary()
"""
OUTPUT:
Estimation time= 1.3 seconds
---------------------------------------------------------------------------
Coefficient              Estimate      Std.Err.         z-val         P>|z|
---------------------------------------------------------------------------
ASC_CAR                 0.2831101     0.0564250     5.0174585      3.29e-06 ***
ASC_TRAIN              -0.5722759     0.0791059    -7.2343003      7.96e-12 ***
CO                     -1.6601672     0.0778264   -21.3316760      3.38e-78 ***
TT                     -3.2289890     0.1749384   -18.4578595      6.36e-62 ***
Ejemplo n.º 6
0
    randvars = {'meals': 'n', 'petfr': 'n', 'emipp': 'n'}
    alts = [1, 2, 3]
    panels = None

if not use_gpu:
    device.disable_gpu_acceleration()
if profile:
    ini_ram = curr_ram()
    profiler = Profiler().start(measure_gpu_mem=use_gpu)

np.random.seed(0)
model = MixedLogit()
model.fit(X,
          y,
          varnames,
          alts=alts,
          n_draws=n_draws,
          panels=panels,
          verbose=0,
          randvars=randvars)

if profile:
    ellapsed, max_ram, max_gpu = profiler.stop()
    log("{:6} {:7.2f} {:11.2f} {:7.3f} {:7.3f} {}".format(
        n_draws, ellapsed, model.loglikelihood, max_ram - ini_ram, max_gpu,
        model.convergence))
    profiler.export('xlogit' + ('_gpu' if use_gpu else ''), dataset, n_draws,
                    ellapsed, model.loglikelihood, max_ram - ini_ram, max_gpu,
                    model.convergence)

if not profile:
    log("Variable    Estimate   Std.Err.")