Example #1
0
 def test_model1_otu(self):
     """Tests that otus are created correctly according to given params."""
     seed(0)  #seed for reproducibility
     exp = lognorm.rvs(2, 0, size=10)
     seed(0)
     obs = model1_otu([lognorm, 2, 0], 10)
     assert_array_almost_equal(exp, obs)
Example #2
0
 def test_model1_otu(self):
     """Tests that otus are created correctly according to given params."""
     seed(0) #seed for reproducibility
     exp = lognorm.rvs(2,0,size=10)
     seed(0)
     obs = model1_otu([lognorm, 2, 0], 10)
     assert_array_almost_equal(exp, obs)
Example #3
0
def _generate_data(features, samples):
    '''Make featuresXsamples data matrix.'''
    return lognorm.rvs(3, 0, 1, size=features*samples).round(0).reshape(
        features, samples)
Example #4
0
#                 ecological table               #
##################################################

# seed at 0 for reproduccibility
seed(0)

######################
# Amensalism 1d
#####################

# choose 60 otus and relate them via o1^o2-> decrease to o2.
# note, odd otus will be affected by the last even otu. o0 will decrease 01,
# o2 will decrease 03 etc.
D = 30
strength = .5
os = lognorm.rvs(3, 0, size=(60, 50))
amensally_related_1d_st_5 = []
truth_amensally_related_1d_st_5 = np.zeros(2 * D, 2 * D)
for i in range(D):
    ind_i, ind_j = 2 * i, 2 * i + 1
    truth_amensally_related_1d_st_5[ind_i, ind_j] = 1
    am_otu = amensal_1d(os[ind_i], os[ind_j], strength)
    amensally_related_1d_st_5.extend([os[ind_i], am_otu])

strength = .3
os = lognorm.rvs(3, 0, size=(60, 50))
amensally_related_1d_st_3 = []
truth_amensally_related_1d_st_3 = np.zeros(2 * D, 2 * D)
for i in range(D):
    ind_i, ind_j = 2 * i, 2 * i + 1
    truth_amensally_related_1d_st_3[ind_i, ind_j] = 1
Example #5
0
        pass
    def rvs(self, loc, scale, size):
        pass








# test 2/25/2013
# create 200 otu X 50 sample table. use 4 otu's per rule 

# generate background otus which will be used to induce 
w = lognorm.rvs(2,1,size=10000).astype(int).reshape(200,50)
# generate model 1 rules
model1_rules = []
for i in range(10):
    rule_vals = uniform.rvs(-100,400,size=8)
    # sort rules so that left is lb <= ub
    rule_vals = where(rule_vals > 0, rule_vals, 0).reshape(4,2)
    rule_vals.sort()
    model1_rules.append(map(list, rule_vals))
# define weights and new otu distribution function
weights = array([1.0, 1.0, 1.0, 1.0, .2])
df_and_params = [lognorm, 2, 1]
# create new otus
table_index = 0
new_otus = []
table_map = []
def init_data(params, num_groups=30, num_samps=50):
    """
    Fills in the contingency tables and correlation matrices
    for each interaction group

    Parameters
    ----------
    params : dict of dicts:
       First set of keys - type of group interaction and strength
       Second set of keys - attributes for group
           strength : float
              strength of interaction
           func : function
              generator function
           dim : int
              number of individual per interaction
           data : np.array
              count table
           truth : np.array
              adjancency matrix
           name : str
              name of the interaction
    num_groups : int
        number of groups of interactions
    num_samps : int
        number of samples

    Returns
    -------
    dict of dicts:
       First set of keys - type of group interaction and strength
       Second set of keys - attributes for group
           strength : float
              strength of interaction
           func : function
              generator function
           dim : int
              number of individual per interaction
           data : np.array
              count table
           truth : np.array
              adjancency matrix
           name : str
              name of the interaction
    """
    D, S = num_groups, num_samps
    # Start filling in some tables
    for k in params.keys():
        x = params[k]['dim']+1

        os = lognorm.rvs(3, 0, size=(x*D, S))
        strength = params[k]['strength']
        params[k]['data'] = np.zeros((S, x*D))
        params[k]['truth'] = np.zeros((x*D, x*D))
        func = params[k]['func']
        for i in range(D):
            idx = range(x*i, x*i+x)
            if params[k]['name'] == 'partial_obligate_syntroph':
                if '1d' in k:
                    obs_otu = func(*list(os[idx]))
                else:
                    obs_otu = func(os[idx])
            else:
                if '1d' in k:
                    obs_otu = func(*(list(os[idx])+[strength]))
                else:
                    obs_otu = func(os[idx], strength)

            params[k]['data'][:, idx] = np.vstack(obs_otu).T

            if params[k]['name'] in {'commensal', 'mutual',
                                     'obligate_syntroph',
                                     'partial_obligate_syntroph'}:
                params[k]['truth'][idx[:-1], idx[-1]] = 1
            if params[k]['name'] in {'amensal', 'parasite', 'competition'}:
                params[k]['truth'][idx[:-1], idx[-1]] = -1

    return params
#                 ecological table               #
##################################################

# seed at 0 for reproduccibility
seed(0)

######################
# Amensalism 1d
#####################

# choose 60 otus and relate them via o1^o2-> decrease to o2.
# note, odd otus will be affected by the last even otu. o0 will decrease 01,
# o2 will decrease 03 etc.
D = 30
strength = .5
os = lognorm.rvs(3, 0, size=(60, 50))
amensally_related_1d_st_5 = []
truth_amensally_related_1d_st_5 = np.zeros(2*D, 2*D)
for i in range(D):
    ind_i, ind_j = 2*i, 2*i+1
    truth_amensally_related_1d_st_5[ind_i, ind_j] = 1
    am_otu = amensal_1d(os[ind_i], os[ind_j], strength)
    amensally_related_1d_st_5.extend([os[ind_i], am_otu])

strength = .3
os = lognorm.rvs(3,0,size=(60,50))
amensally_related_1d_st_3 = []
truth_amensally_related_1d_st_3 = np.zeros(2*D, 2*D)
for i in range(D):
    ind_i, ind_j = 2*i, 2*i+1
    truth_amensally_related_1d_st_3[ind_i, ind_j] = 1