def _train(agent_skill, shared_data, validate=True, model_dirname=None): model = models.catalog(DictTree( name=agent_skill.skill_model.name, arg_in_len=agent_skill.skill_model.arg_in_len, max_cnt=agent_skill.skill_model.max_cnt, num_sub=agent_skill.skill_model.num_sub, sub_arg_accuracy=agent_skill.sub_arg_accuracy, )) if validate: num_folds = min(len(agent_skill.data), NUM_FOLDS) kf = ms.KFold(num_folds, True) validation = [] for new_train_idxs, valid_idxs in kf.split(agent_skill.data): train_data = _process(agent_skill, [agent_skill.data[idx] for idx in new_train_idxs] + shared_data) valid_data = _process(agent_skill, [agent_skill.data[idx] for idx in valid_idxs]) model.fit(train_data) validation.append(models.validate(model, valid_data)) validated = models.total_validation(validation, agent_skill.sub_arg_accuracy) else: validated = True if validated: all_data = agent_skill.data if shared_data is not None: all_data += shared_data all_data = _process(agent_skill, all_data) model.fit(all_data) agent_skill.skill_model.model = model if model_dirname is not None: try: os.makedirs(model_dirname) except OSError: pass model_fn = "{}/{}.pkl".format(model_dirname, agent_skill.skill_name) pickle.dump(model, open(model_fn, 'wb'), protocol=2) return validated
def _validate(agent_skill, shared_data, validate=True, model_dirname=None): model = models.catalog( DictTree( name=agent_skill.skill_model.name, arg_in_len=agent_skill.skill_model.arg_in_len, max_cnt=agent_skill.skill_model.max_cnt, num_sub=agent_skill.skill_model.num_sub, sub_arg_accuracy=agent_skill.sub_arg_accuracy, )) model.fit(shared_data) if validate: valid_data = _process(agent_skill, agent_skill.data) validated = models.validate(model, valid_data, agent_skill.sub_arg_accuracy) else: validated = True if validated: agent_skill.skill_model.model = model if model_dirname is not None: try: os.makedirs(model_dirname) except OSError: pass model_fn = "{}/{}.pkl".format(model_dirname, agent_skill.skill_name) pickle.dump(model, open(model_fn, 'wb'), protocol=2) return validated
def post(self): """Validate the user input, and either prompt the user for a new answer, or prepare the page for a new question, by properly updating the CurrentQuestion entity for the given user.""" user = users.get_current_user() current = search_by_user(user.user_id()) answer = self.request.get('answer') if validate(answer,current.solution): current.valid = "Correct" current.solved += 1 if current.solved // 10 >= current.level: current.level += 1 previous = current.stmt[0]+' '+current.stmt[1]+' is solved by ' previous += ' '.join(current.solution) current.previous.append(previous) solution,stmt = next_question(current.level) current.stmt = stmt current.solution = solution else: current.valid = "Try Again" current.attempts += 1 current.put() #self.redirect('/questions') self.get()
def post(self): user = users.get_current_user() current = search_by_user(user.user_id()) answer = self.request.get('answer') if validate(answer,current.sample_sol): current.valid = "Correct" else: current.valid = "Try Again" current.put() self.get()
def studentlogin(): if request.method == "POST": print('inside post request') rollno = request.form['rollno'] password = request.form['password'] check = dbhandler.validate(rollno, password) if check: print('student login successfully!') return redirect(url_for("studentpage")) else: print('student login failed!') return redirect(url_for("studentlogin")) return render_template('studentlogin.html')
def login(): if request.method=="POST": print('inside post request') username=request.form['username'] password=request.form['password'] check = dbhandler.validate(username, password) if check: return 'login success' # print('Success') else : return 'login failed' # print('failed') return render_template('login.html');
def train_model_keras(): """ Train model using keras API """ datagen = ImageGenerator(batch_size=64) generator = datagen.generator() model = models.build_graph(input_shape=datagen.shape, output_dim=datagen.num_classes) model.compile(loss=tf.keras.losses.categorical_crossentropy, optimizer=tf.keras.optimizers.Adam(lr=0.01), metrics=['accuracy', models.precison, models.recall]) callbacks = models.get_callbacks() model.fit_generator(generator, steps_per_epoch=1000, callbacks=callbacks) results = models.validate(model) print(results)
with open(f"saved_data{suffix}.pkl", 'rb') as f: data = pickle.load(f) Xval = feature(data, "sm") # second arguments only "s", "m", "sm" yval = label_list # In[validate the algorithm through test dataset] from models.validate import * #import test.validate acc_values = [] logloss_values = [] sen_values = [] for i in range(10): acc, ll, sen = validate(Xval, yval, i, "sm", 3, 15, 39) # change the feature category blr[i], brf[i], bgb[i] acc_values.append(acc) logloss_values.append(ll) sen_values.append(sen) accdf = pd.DataFrame(acc_values) lldf = pd.DataFrame(logloss_values) sendf = pd.DataFrame(sen_values) total = pd.concat([accdf, lldf, sendf]) # axis = 1 #total.to_csv("total_stamor.csv") # sta, mor, sta_mor # In[make ROC curves for train and test dataset] X_train = Xtra y_train = ytra X_test = Xval y_test = yval from plots.roc import *
if arg.arch == 'vgg': input_size = 25088 model = models.vgg16(pretrained=True) elif arg.aech == 'densenet': input_size = 25088 model = models.densenet121(pretrained=True) for param in model.parameters(): param.requires_grad = False model.classifier = nn.Sequential(nn.Linear(input_size, arg.hidden_units), nn.ReLU(), nn.Dropout(0.5), nn.Linear(arg.hidden_units, 1000), nn.LogSoftmax(dim=1)) criterion = nn.NLLLoss() optimizer = optim.Adam(model.classifier.parameters(), lr=arg.learning_rate) model.to(device) print('First Step: Validation') validate(model, validloader, device, criterion) print('Second Step: Training') train_classifer(model, trainloader, arg.epochs, device, optimizer, criterion, validloader) print('Third Step: Testing') classifier_test(model, device, testloader, criterion) print('Fourth Step: Saving Checkpoint') save_checkpoint(model, train_data, arg.arch, arg.epochs, arg.learning_rate, arg.hidden_units, input_size) print('Finish')
transport.send_ignore() except Exception: raise Exception( f"Encountered connection issue with [{host.name}]") ssh.close() logger.info(f"Successful SSH connection to {host.name}") return if __name__ == '__main__': display_if_not_enabled = True while True: if config.config_file_has_been_updated(): logger.info("Config Updated!!!") models.populate_from_config() models.validate() if not config.ENABLED and display_if_not_enabled: logger.warning("Backup Conductor Is Disabled!") if config.RUNNING_IN_DOCKER: remove_jobs() display_if_not_enabled = False elif config.ENABLED and not display_if_not_enabled: logger.info("Backup Conductor Has Been Enabled!") display_if_not_enabled = True if config.ENABLED: test_connections() _ensure_backup_folders_exist() set_jobs() else: