def anova1rm(Y, A, SUBJ, equal_var=True, roi=None): ''' One-way repeated-measures ANOVA. :Parameters: - *Y* --- (J x Q) numpy array - *A* --- (J x 1) vector of integer group labels - *SUBJ* --- (J x 1) vector of subject labels - *equal_var* --- If *True*, equal group variance will be assumed :Returns: - F : An **spm1d._spm.SPM_F** instance :Example: >>> Y = np.random.randn(9, 101) >>> A = np.array([1,1,1, 2,2,2, 3,3,3]) >>> SUBJ = np.array([1,2,3, 1,2,3, 1,2,3]) >>> F = spm1d.stats.anova1(Y, A, SUBJ) >>> Fi = F.inference(alpha=0.05) >>> Fi.plot() ''' if not equal_var: raise( NotImplementedError( 'Non-sphericity corrections are not yet implemented. Set "equal_var" to "True" to force an assumption of equal variance.' ) ) design = designs.ANOVA1rm(A, SUBJ) model = models.LinearModel(Y, design.X, roi=roi) if (model.dim == 1) and ( design.check_for_single_responses() ): model.fit( approx_residuals=design.contrasts.C[:3] ) else: model.fit( ) F = aov(model, design.contrasts, design.f_terms)[0] return F
def anova3tworm(Y, A, B, C, SUBJ, equal_var=True, roi=None): ''' Three-way ANOVA with repeated-measures on two factors. :Parameters: - *Y* --- (J x Q) numpy array - *A* --- (J x 1) vector of integer labels for Factor A - *B* --- (J x 1) vector of integer labels for Factor B (a repeated-measures factor) - *C* --- (J x 1) vector of integer labels for Factor C (a repeated-measures factor) - *SUBJ* --- (J x 1) vector of integer subject labels - *equal_var* --- If *True*, equal group variance will be assumed :Returns: - List of seven **spm1d._spm.SPM_F** instances in the following order: 1. Main effect A 2. Main effect B 3. Main effect C 4. Interaction AB 5. Interaction AC 6. Interaction BC 7. Interaction ABC :Note: - Non-sphericity correction not implemented. Equal variance must be assumed by setting "equal_var=True". ''' if equal_var is not True: raise( NotImplementedError('Non-sphericity correction not implemented. To continue you must assume equal variance and set "equal_var=True".') ) design = designs.ANOVA3tworm(A, B, C, SUBJ) model = models.LinearModel(Y, design.X, roi=roi) if (model.dim == 1) and ( design.check_for_single_responses() ): model.fit( approx_residuals=design.contrasts.C[:8] ) else: model.fit( ) F = aov(model, design.contrasts, design.f_terms) return F
def anova3nested(Y, A, B, C, equal_var=True, roi=None): ''' Three-way fully nested ANOVA. :Parameters: - *Y* --- (J x Q) numpy array - *A* --- (J x 1) vector of integer labels for Factor A - *B* --- (J x 1) vector of integer labels for Factor B (nested in A) - *C* --- (J x 1) vector of integer labels for Factor C (nested in B) - *equal_var* --- If *True*, equal group variance will be assumed :Returns: - List of three **spm1d._spm.SPM_F** instances in the following order: 1. Main effect A 2. Main effect B 3. Main effect C :Note: - there are no interaction terms in fully-nested designs. ''' if equal_var is not True: raise( NotImplementedError('Non-sphericity correction not implemented. To continue you must assume equal variance and set "equal_var=True".') ) design = designs.ANOVA3nested(A, B, C) model = models.LinearModel(Y, design.X, roi=roi) model.fit() F = aov(model, design.contrasts, design.f_terms) return F
def anova2(Y, A, B, equal_var=True, roi=None): ''' Two-way ANOVA. :Parameters: - *Y* --- (J x Q) numpy array - *A* --- (J x 1) vector of integer labels for Factor A - *B* --- (J x 1) vector of integer labels for Factor B - *equal_var* --- If *True*, equal group variance will be assumed :Returns: - List of three **spm1d._spm.SPM_F** instances in the following order: 1. Main effect A 2. Main effect B 3. Interaction AB ''' if not equal_var: raise( NotImplementedError( 'Non-sphericity corrections are not yet implemented. Set "equal_var" to "True" to force an assumption of equal variance.' ) ) design = designs.ANOVA2(A, B) model = models.LinearModel(Y, design.X, roi=roi) model.fit() F = aov(model, design.contrasts, design.f_terms) # if not equal_var: # Y,X,r = model.Y, model.X, model.eij # QA,QB,C = design.A.get_Q(), design.B.get_Q(), design.contrasts.C.T # Q = QA + QB # u1,u2 = _reml.estimate_df_anova2(Y, X, r, Q, C) # for ff,u in zip(f,u1): # ff.df = u,u2 return F
def test_linear_model(num_genes, num_mirs, num_max_sites, num_features, maxiter): # generate random data np.random.seed(0) # get a random number of sites per mRNA/miRNA interaction features = np.zeros([num_genes, num_mirs, num_max_sites, num_features]) for i in range(num_genes): for j in range(num_mirs): nsites = np.random.choice(num_max_sites) features[i,j,:nsites,:] = np.random.rand(nsites, num_features) mask = ((np.abs(np.sum(features, axis=3))) != 0).astype(int) true_weights = (np.arange(num_features) + 1.0).reshape([1, 1, 1, -1]) true_weights = (true_weights - np.mean(true_weights)) / np.std(true_weights) labels = np.sum(np.multiply(np.sum(np.multiply(features, true_weights), axis=3), mask), axis=2) print(features.shape) print(mask.shape) print(labels.shape) tf.reset_default_graph() features_tensor = tf.placeholder(tf.float32, shape=[None, None, None, num_features], name='features') mask_tensor = tf.placeholder(tf.float32, shape=[None, None, None], name='nsites') labels_tensor = tf.placeholder(tf.float32, shape=[None, None], name='labels') data = { 'features': features_tensor, 'mask': mask_tensor, 'labels': labels_tensor } feed_dict = { features_tensor: features, mask_tensor: mask, labels_tensor: labels } model = models.LinearModel(num_features) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) model.fit(sess, data, feed_dict, maxiter) print('True weight diff: {}'.format(np.sum(np.abs(model.vars_evals['coefs'] - true_weights)))) print('Label r2: {}'.format(model.r2))
def anova1(Y, A=None, equal_var=False, roi=None): ''' One-way ANOVA. :Parameters (Option 1): - *Y* --- A list or tuple of (J x Q) numpy arrays - *equal_var* --- If *True*, equal group variance will be assumed :Parameters (Option 2): - *Y* --- (J x Q) numpy array - *A* --- (J x 1) vector of integer group labels - *equal_var* --- If *True*, equal group variance will be assumed :Returns: - F : An **spm1d._spm.SPM_F** instance :Example: >>> F = spm1d.stats.anova1((Y0,Y1,Y2)) >>> Fi = F.inference(alpha=0.05) >>> Fi.plot() ''' if isinstance(Y, (list,tuple)): _datachecks.check('anova1list', Y) A = np.hstack([[i]*y.shape[0] for i,y in enumerate(Y)]) Y = np.hstack(Y) if Y[0].ndim==1 else np.vstack(Y) else: _datachecks.check('anova1', Y, A) design = designs.ANOVA1(A) model = models.LinearModel(Y, design.X, roi=roi) model.fit() F = aov(model, design.contrasts, design.f_terms)[0] if not equal_var: warnings.warn('\nWARNING: Non-sphericity corrections for one-way ANOVA are currently approximate and have not been verified.\n', UserWarning, stacklevel=2) Y,X,r = model.Y, model.X, model.eij Q,C = design.A.get_Q(), design.contrasts.C.T F.df = _reml.estimate_df_anova1(Y, X, r, Q, C) return F
def __init__(self, n_stocks, feature_generator, gamma=0.95, alpha=0.1, epsilon=1.0, epsilon_min=0.001, epsilon_decay=0.995, momentum=0.9): self.gamma = gamma # discount rate self.epsilon = epsilon # exploration rate self.epsilon_min = epsilon_min self.epsilon_decay = epsilon_decay self.feature_generator = feature_generator self.action_mapping, self.action_reverse_mapping = get_all_actions( n_stocks) self.n_actions = len(self.action_mapping) self.model = models.LinearModel(self.feature_generator.size, self.n_actions, alpha=alpha, momentum=momentum)
def anova2rm(Y, A, B, SUBJ, equal_var=True, roi=None): ''' Two-way repeated-measures ANOVA. :Parameters: - *Y* --- (J x Q) numpy array - *A* --- (J x 1) vector of integer labels for Factor A - *B* --- (J x 1) vector of integer labels for Factor B - *SUBJ* --- (J x 1) vector of integer subject labels - *equal_var* --- If *True*, equal group variance will be assumed :Returns: - List of three **spm1d._spm.SPM_F** instances in the following order: 1. Main effect A 2. Main effect B 3. Interaction AB :Note: - Non-sphericity correction not implemented. Equal variance must be assumed by setting "equal_var=True". ''' if equal_var is not True: raise( NotImplementedError('Non-sphericity correction not implemented. To continue you must assume equal variance and set "equal_var=True".') ) design = designs.ANOVA2rm(A, B, SUBJ) model = models.LinearModel(Y, design.X, roi=roi) if (model.dim == 1) and ( design.check_for_single_responses() ): model.fit( approx_residuals=design.contrasts.C[:5] ) else: model.fit( ) F = aov(model, design.contrasts, design.f_terms) # if not equal_var: # Y,X,r = solver.Y, solver.X, solver.eij # QA,QB,C = design.A.get_Q(), design.B.get_Q(), [c.C.T for c in design.contrasts] # Q = QA + QB # u1,u2 = _reml.estimate_df_anova2(Y, X, r, Q, C) # for ff,u in zip(f,u1): # ff.df = u,u2 return F
print("You failed to provide a configuration file. Usage: {0} config.json". format(sys.argv[0])) if '.json' not in sys.argv[1]: raise ValueError("The configuration file must be in JSON format.") # parse config config = json.load(open(config_fp, 'r')) # parse config config = ConfigParser().parse_config(config=config, mode='evaluate') # load model if config['model'] == 'linear': model = models.LinearModel(input_shape=(config['history_length'], ), nb_output_units=1, nb_hidden_units=config['nb_hidden_units']) elif config['model'] == 'mlp': model = models.MLPModel(input_shape=(config['history_length'], ), nb_output_units=1, nb_hidden_units=config['nb_hidden_units'], nb_layers=config['nb_layers']) elif config['model'] == 'gru': model = models.GRUModel(input_shape=(config['history_length'], 1), nb_output_units=1, nb_hidden_units=config['nb_hidden_units'], nb_layers=config['nb_layers'], dropout=config['dropout'], recurrent_dropout=config['recurrent_dropout']) elif config['model'] == 'lstm': model = models.LSTMModel(input_shape=(config['history_length'], 1),
results += [{"Train score": trainscore, "Test score": testscore}] results = pd.DataFrame(results) results.plot() plt.show() elif part == '2': data = du.load_dataset("digits") X = data['X'] y = data['y'] Xtest = data['Xtest'] ytest = data['ytest'] model = models.LinearModel(n_features=X.shape[1], n_outputs=10) results = [] for i in range(50): model.fit(X, y, epochs=10, batch_size=100, verbose=0, optimizer_name="adam", learning_rate=1e-3, weights_name="linaer") # EVALUATE TRAIN AND TEST CLASSIFICATION yhat = np.argmax(model.predict(X), axis=1) trainscore = (yhat == y).mean()