Beispiel #1
0
display_cis(s_model)

print 'Fitting conditional model'
c_model = FixedMargins(StationaryLogistic())
net.new_row_covariate('r', np.int)[:] = r
net.new_col_covariate('c', np.int)[:] = c
c_model.fit = c_model.base_model.fit_conditional
for cov_name in cov_names:
    c_model.base_model.beta[cov_name] = None
c_model.fit(net)
print 'NLL: %.2f' % c_model.nll(net)
for cov_name in cov_names:
    print '%s: %.2f' % (cov_name, c_model.base_model.beta[cov_name])
print
for rep in range(params['n_samples']):
    c_samples[rep,:,:] = c_model.generate(net, coverage = 0.1)
c_model.confidence_boot(net, n_bootstrap = params['n_bootstrap'])
c_model.confidence_wald(net)
for cov_name in cov_names:
    c_model.confidence_cons(net, cov_name, L = 121, test = 'score')
    c_model.confidence_cons(net, cov_name, L = 121, test = 'lr')
display_cis(c_model)

# Offset extreme substructure only for Nonstationary model
net.offset_extremes()

print 'Fitting nonstationary model'
ns_model = NonstationaryLogistic()
for cov_name in cov_names:
    ns_model.beta[cov_name] = None
ns_model.fit(net)
Beispiel #2
0
    c_model.base_model.beta[cov_name] = None
c_model.base_model.fit_conditional(net, verbose = True)
print 'NLL: %.2f' % c_model.nll(net)
for cov_name in cov_names:
    print '%s: %.2f' % (cov_name, c_model.base_model.beta[cov_name])
print

# Sample typical networks from fit models
reps = 100
s_samples = np.empty((reps, net.N, net.N))
ns_samples = np.empty((reps, net.N, net.N))
c_samples = np.empty((reps, net.N, net.N))
for rep in range(reps):
    s_samples[rep,:,:] = s_model.generate(net)
    ns_samples[rep,:,:] = ns_model.generate(net)
    c_samples[rep,:,:] = c_model.generate(net, coverage = 0.2)

# Calculate sample means and variances
s_samples_mean = np.mean(s_samples, axis = 0)
s_samples_sd = np.sqrt(np.var(s_samples, axis = 0))
ns_samples_mean = np.mean(ns_samples, axis = 0)
ns_samples_sd = np.sqrt(np.var(ns_samples, axis = 0))
c_samples_mean = np.mean(c_samples, axis = 0)
c_samples_sd = np.sqrt(np.var(c_samples, axis = 0))

# Finish plotting
plt.subplot(334)
plt.title('Stationary')
heatmap(s_samples_mean)
plt.subplot(337)
residuals(s_samples_mean, s_samples_sd)
        arr = Array(M, N)
        arr.new_edge_covariate('x_0')[:] = logit_P
        arr.new_row_covariate('r', dtype = np.int)[:] = r
        arr.new_col_covariate('c', dtype = np.int)[:] = c
        
        base_model = StationaryLogistic()
        base_model.beta['x_0'] = 1.0
        data_model = FixedMargins(base_model)

    while True:
        # Advance random seed for data generation
        seed.next()

        # Generate data for this trial
        if conditional_sample:
            X = data_model.generate(arr, coverage = 100.0)
        else:
            P = 1.0 / (1.0 + np.exp(-logit_P))
            X = np.random.random((M,N)) < P

        yield X, v

def timing(func):
    def inner(*args, **kwargs):
        start_time = time()
        val = func(*args, **kwargs)
        elapsed = time() - start_time
        return val, elapsed

    return inner
def generate_data(case, theta, seed):
    # Advance random seed for parameter and covariate construction
    seed.next()

    case = params['case']
    alpha = beta = kappa = offset = 0
    conditional_sample = False
    if 'fixed_example' in case:
        # Load parameters and covariates
        with open(case['fixed_example'], 'r') as example_file:
            example = json.load(example_file)

            v = np.array(example['nu'])
            M, N = v.shape

            if 'alpha' in example:
                alpha = np.array(example['alpha']).reshape((M,1))
            if 'beta' in example:
                beta = np.array(example['beta']).reshape((1,N))
            if 'kappa' in example:
                kappa = example['kappa']
            if 'offset' in example:
                offset = example['offset']

            if ('r' in example) and ('c' in example):
                conditional_sample = True
                r = example['r']
                c = example['c']

    else:
        # Generate parameters and covariates
        M, N = case['M'], case['N']
        if 'alpha_min' in case:
            alpha = np.random.uniform(size = (M,1)) + case['alpha_min']
        if 'beta_min' in case:
            beta = np.random.uniform(size = (1,N)) + case['beta_min']
        if 'kappa' in case:
            kappa = case['kappa']
        if case['v_discrete']:
            v = np.sign(np.random.random(size = (M,N)) - 0.5) 
        elif case['v_uniform']:
            v = np.random.uniform(size = (M,N))
        elif case['v_normal']:
            v = np.random.normal(size = (M,N))
        if 'v_scale' in case:
            v *= case['v_scale']
        if 'v_loc' in case:
            v += case['v_loc']
        
        if ('r' in case) and ('c' in case):
            conditional_sample = True
            r = case['r']
            c = case['c']

    # Generate Bernoulli probabilities from logistic regression model
    logit_P = np.zeros((M,N)) + kappa
    logit_P += alpha
    logit_P += beta
    logit_P += theta * v
    logit_P += offset

    if conditional_sample:
        arr = Array(M, N)
        arr.new_edge_covariate('x_0')[:] = logit_P
        arr.new_row_covariate('r', dtype = np.int)[:] = r
        arr.new_col_covariate('c', dtype = np.int)[:] = c
        
        base_model = StationaryLogistic()
        base_model.beta['x_0'] = 1.0
        data_model = FixedMargins(base_model)

    while True:
        # Advance random seed for data generation
        seed.next()

        # Generate data for this trial
        if conditional_sample:
            X = data_model.generate(arr, coverage = 100.0)
        else:
            P = 1.0 / (1.0 + np.exp(-logit_P))
            X = np.random.random((M,N)) < P

        yield X, v
def generate_data(case, theta, seed):
    # Advance random seed for parameter and covariate construction
    seed.next()

    case = params["case"]
    alpha = beta = kappa = offset = 0
    conditional_sample = False
    if "fixed_example" in case:
        # Load parameters and covariates
        with open(case["fixed_example"], "r") as example_file:
            example = json.load(example_file)

            v = np.array(example["nu"])
            M, N = v.shape

            if "alpha" in example:
                alpha = np.array(example["alpha"]).reshape((M, 1))
            if "beta" in example:
                beta = np.array(example["beta"]).reshape((1, N))
            if "kappa" in example:
                kappa = example["kappa"]
            if "offset" in example:
                offset = example["offset"]

            if ("r" in example) and ("c" in example):
                conditional_sample = True
                r = example["r"]
                c = example["c"]

    else:
        # Generate parameters and covariates
        M, N = case["M"], case["N"]
        if "alpha_min" in case:
            alpha = np.random.uniform(size=(M, 1)) + case["alpha_min"]
        if "beta_min" in case:
            beta = np.random.uniform(size=(1, N)) + case["beta_min"]
        if "kappa" in case:
            kappa = case["kappa"]
        if case["v_discrete"]:
            v = np.random.random(size=(M, N)) < 0.5
        else:
            v = np.random.uniform(size=(M, N))
        if "v_min" in case:
            v += case["v_min"]

        if ("r" in case) and ("c" in case):
            conditional_sample = True
            r = case["r"]
            c = case["c"]

    # Generate Bernoulli probabilities from logistic regression model
    logit_P = np.zeros((M, N)) + kappa
    logit_P += alpha
    logit_P += beta
    logit_P += theta * v
    logit_P += offset

    if conditional_sample:
        arr = Array(M, N)
        arr.new_edge_covariate("x_0")[:] = logit_P
        arr.new_row_covariate("r", dtype=np.int)[:] = r
        arr.new_col_covariate("c", dtype=np.int)[:] = c

        base_model = StationaryLogistic()
        base_model.beta["x_0"] = 1.0
        data_model = FixedMargins(base_model)

    while True:
        # Advance random seed for data generation
        seed.next()

        # Generate data for this trial
        if conditional_sample:
            X = data_model.generate(arr, coverage=2.0)
        else:
            P = 1.0 / (1.0 + np.exp(-logit_P))
            X = np.random.random((M, N)) < P

        yield X, v