def filter_simple_wifi(params): params['subject'] = "Filter WIFI Graphic " email_response = utils.show_info(params) + "\n" path_real, database_name = utils.download_database(params['filename'], full_path = False) only_name = database_name[:database_name.rfind(".")] params["all_time"] = utils.get_datetime() #TIME params["a_func"] = utils.get_datetime() #TIME try: email_response += "Exporting data (WIFI) ..." export_csv ="filter_wifi%s" % (only_name+".csv") wifi_list_mac, wifi_list_name = utils.parse_wifi_list(params['wifi_list']) ExportWifi(wifi_list_mac,wifi_list_name,params['is_blacklist']).run(path_real+database_name,path_real+export_csv) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("CSV conversion",params["a_func"]) email_response += "OK\n" email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(export_csv))+"\n\n\n" email_response += time_txt try: email_response += "Building Graphic (Bluetooth) ..." path_graphic ="filter_wifi_%s" % (only_name+".pdf") WifiGraphic().run(path_real+export_csv,path_real+path_graphic) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Graphic creation",params["a_func"]) email_response += "OK\n" email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(path_graphic))+"\n\n\n" email_response += time_txt email_response += utils.end_func("All process",params["all_time"]) return utils.response_email(params['email'],params['subject'], email_response)
def predict_full(self, testing_data, fine2coarse, results_file): x_test, y_test = testing_data yc_test = tf.linalg.matmul(y_test, fine2coarse) p = self.prediction_params self.load_best_cc_both_model() self.load_best_fc_both_model() self.build_full_model() [yh_s, ych_s] = self.full_model.predict(x_test, batch_size=p['batch_size']) fine_classification_error = utils.get_error(y_test, yh_s) logger.info('Fine Classifier Error: ' + str(fine_classification_error)) coarse_classification_error = utils.get_error(yc_test, ych_s) logger.info('Coarse Classifier Error: ' + str(coarse_classification_error)) mismatch = self.find_mismatch_error(yh_s, ych_s, fine2coarse) logger.info('Mismatch Error: ' + str(mismatch)) results_dict = {'Fine Classifier Error': fine_classification_error, 'Coarse Classifier Error': coarse_classification_error, 'Mismatch Error': mismatch} self.write_results(results_file, results_dict=results_dict) np.save(self.model_directory + "/fine_predictions.npy", yh_s) np.save(self.model_directory + "/coarse_predictions.npy", ych_s) np.save(self.model_directory + "/fine_labels.npy", y_test) np.save(self.model_directory + "/coarse_labels.npy", yc_test) tf.keras.backend.clear_session() return yh_s, ych_s
def simple_sensorHub(params): params['subject'] = "SensorHub Graphic " email_response = "" path_real, database_name = utils.download_database(params['filename'], full_path = False) only_name = database_name[:database_name.rfind(".")] params["all_time"] = utils.get_datetime() #TIME params["a_func"] = utils.get_datetime() #TIME try: email_response += "Exporting data (Sensor Hub) ..." export_csv ="SensorHub_%s" % (only_name+".csv") ExportSensorHub().run(path_real+database_name, path_real+export_csv) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("CSV conversion",params["a_func"]) email_response += "OK\n" email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(export_csv))+"\n\n\n" email_response += time_txt try: email_response += "Building Graphic (Sensor Hub) ..." path_graphic ="sensor_hub_%s" % (only_name+".pdf") SensorHubGraphic().run(path_real+export_csv,path_real+path_graphic) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Graphic creation",params["a_func"]) email_response += "OK\n" email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(path_graphic))+"\n\n\n" email_response += time_txt email_response += utils.end_func("All process",params["all_time"]) return utils.response_email(params['email'],params['subject'], email_response)
def E_get_database(params): try: params["email_response"] += "download database ..." utils.download_database_full(params, full_path = False) except Exception as e: utils.get_error(e, params) raise params["email_response"] += "OK\n" params["email_response"] += "\n Database = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['database_name']))+"\n\n\n" params["only_database_name"] = params['database_name'][:params['database_name'].rfind(".")] params["all_time"] = utils.get_datetime() #TIME
def E_graphic(params): params["a_func"] = utils.get_datetime() #TIME try: params["email_response"] += "Creating graphic..." params['pdfgraphic'] = (params["only_database_name"]+".pdf").replace("(","").replace(")","") print "Rscript machine_learning/pdf/pdf_lines.R \""+params['path_real']+params['csvcluster']+"\" \""+params['path_real']+params['pdfgraphic']+"\"" os.system("Rscript machine_learning/pdf/pdf_lines.R \""+params['path_real']+params['csvcluster']+"\" \""+params['path_real']+params['pdfgraphic']+"\"") except Exception as e: utils.get_error(e, params) raise time_txt = utils.end_func("Graphic creation",params["a_func"]) params["email_response"] += "OK\n" params["email_response"] += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['pdfgraphic']))+"\n\n" params["email_response"] += time_txt
def register(self, data): # check all fields if not check_all_parameters(data, [ 'id', 'name', 'surname', 'email', 'school_id', 'password', 'education' ]): return json.dumps({"error": "Недостатньо данних"}), 400 if (not 'phd' in data): data['phd'] = False # check fields that can be NULL data['patronymic'] = check_for_null(data, 'patronymic') data['phone'] = check_for_null(data, 'phone') # hash password data['password'] = get_hash(data['password']) # try to add to db try: sql = "INSERT INTO teachers (teacher_id, name, surname, patronymic, phd, email, phone, school_id, education, password) " \ "VALUES ('%s', '%s','%s', %s, '%s', '%s', %s, '%s', '%s','%s');" % ( data['id'], data['name'], data['surname'], data['patronymic'], data['phd'], data['email'], data['phone'], data['school_id'], data['education'], data['password']) self.db.execute(sql) except Exception as e: return get_error(e, 1) return "ok", 201
def add_olimp(self, data): # check all fields if not check_all_parameters(data, ['title', 'discipline', 'teacher_id']): return json.dumps({"error": "Недостатньо данних"}), 400 # check fields that can be NULL data['notes'] = check_for_null(data, 'notes') data['class_num'] = check_for_null(data, 'class_num') # generate code code = None while code is None: arr = [str(random.randint(0, 9)) for _ in range(10)] code = "".join(arr) res = self.db.execute("SELECT * FROM olimpiads WHERE olimp_id='%s';" % code) if len(res) > 0: code = None if not check_parameter(data, 'con_id'): sql = "INSERT INTO competition (name_id, ev_date, place, stage, notes) " \ "VALUES ('%s', '%s','%s', '%s', %s);" % ( data['name_id'], datetime.strptime(data['ev-date'], "%Y-%m-%dT%H:%M"), data['place'], data['stage'], data['con_notes']) res0 = self.db.execute(sql) data['con_id'] = res0 # try to add to db try: sql = "INSERT INTO olimpiads (olimp_id, title, teach_id, con_id, discipline, class_num, notes) " \ "VALUES ('%s', '%s','%s','%s','%s', %s, %s);" % (code, data['title'], data['teacher_id'], data['con_id'], data['discipline'], data['class_num'], data['notes']) self.db.execute(sql) return json.dumps({"code": code}), 200 except Exception as e: return get_error(e)
def delete_olimp(self, id): try: sql = "DELETE FROM olimpiads WHERE olimp_id='%s'" % id res = self.db.execute(sql) return json.dumps({"data": True}), 200 except Exception as e: return get_error(e)
def get_all_pupils(self, id): try: sql = "SELECT p.student_id, p.name, p.surname, p.patronymic, p.birth_date, p.class, p.email, p.notes," \ " p.phone, school_id, schools.name, YEAR(CURDATE()) - YEAR(birth_date) - If(Month(birth_date)<Month" \ "(CURDate()),0,If(Month(birth_date)>Month(CURDate()),1,If(Day(birth_date)>Day(CURDate()),1,0))) AS age, AVG(mark) " \ "FROM compete INNER JOIN pupils p on compete.student_id = p.student_id INNER JOIN schools" \ " ON p.school_id = schools.code LEFT OUTER JOIN answers ON " \ "p.student_id = answers.student_id WHERE compete.olimp_id='%s' GROUP BY student_id ;" % id res = self.db.execute(sql) result = [] for i in res: result.append({ "id": i[0], "name": i[2] + " " + i[1] + " " + ("" if i[3] is None else i[3]), "birth_date": i[4].strftime("%Y.%m.%d %H:%M"), "age": i[11], "class": i[5], "email": i[6], "notes": i[7], "phone": i[8], "school_id": i[9], "schoolname": i[10], "avg": "-" if i[12] is None else float(i[12]) }) return json.dumps(result), 200 except Exception as e: return get_error(e)
def delete_task(self, id): try: sql = "DELETE FROM competition_tasks WHERE task_id='%s';" % id self.db.execute(sql) return json.dumps({"data": True}), 200 except Exception as e: return get_error(e)
def register(self, data): # check all fields if not check_all_parameters(data, [ 'id', 'name', 'surname', 'school_id', 'password', 'email', 'class' ]): return json.dumps({"error": "Недостатньо данних"}), 400 # check fields that can be NULL data['patronymic'] = check_for_null(json, 'patronymic') data['phone'] = check_for_null(json, 'phone') data['birth_date'] = check_for_null(json, 'birth_date') # hash password data['password'] = get_hash(json['password']) # try to add to db try: sql = "INSERT INTO pupils (student_id,name, surname, patronymic, class, email, phone, birth_date, school_id, password) " \ "VALUES ('%s','%s', '%s', %s, '%s', '%s', %s, %s, '%s', '%s');" % ( data['id'], data['name'], data['surname'], data['patronymic'], data['class'], data['email'], data['phone'], data['birth_date'], data['school_id'], data['password']) self.db.execute(sql) except Exception as e: return get_error(e) return json.dumps({"data": True}), 201
def get_all_answers(self, id): try: sql = "SELECT answer_id, text, hyperlink, response, mark, p.name, p.surname, p.class FROM answers " \ "INNER JOIN pupils p ON answers.student_id = p.student_id WHERE task_id='%s' ORDER BY surname;" % id res = self.db.execute(sql) print(res) result = [] for i in res: result.append({ "id": i[0], "text": i[1], "hyperlink": i[2], "response": "" if i[3] is None else i[3], "mark": "" if i[4] is None else i[4], "name": i[6] + ' ' + i[5] + ' (' + str(i[7]) + ')' }) return json.dumps(result), 200 except Exception as e: return get_error(e)
def POST(self): i = web.input('email', 'password', 'username', agreement="no") i.displayname = i.get('displayname') or i.username f = self.get_form() if not f.validates(i): return render['account/create'](f) if i.agreement != "yes": f.note = utils.get_error("account_create_tos_not_selected") return render['account/create'](f) ia_account = InternetArchiveAccount.get(email=i.email) # Require email to not already be used in IA or OL if ia_account: f.note = LOGIN_ERRORS['email_registered'] return render['account/create'](f) try: # Create ia_account: require they activate via IA email # and then login to OL. Logging in after activation with # IA credentials will auto create and link OL account. ia_account = InternetArchiveAccount.create( screenname=i.username, email=i.email, password=i.password, verified=False, retries=USERNAME_RETRIES) except ValueError as e: f.note = LOGIN_ERRORS['max_retries_exceeded'] return render['account/create'](f) return render['account/verify'](username=i.username, email=i.email)
def delete_hometask(self, id): try: sql = "DELETE FROM hometasks WHERE hw_id='%s';" % (id) self.db.execute(sql) return json.dumps({"data": True}), 200 except Exception as e: return get_error(e)
def add(self, data): # check all fields if not check_all_parameters(data, ['cityid', 'name', 'street', 'house', 'phone']): return json.dumps({"error": "Недостатньо данних"}), 400 # check fields that can be NULL data['notes'] = check_for_null(data, 'notes') data['region'] = check_for_null(data, 'region') # generate school code code = None while code is None: arr = [str(random.randint(0, 9)) for _ in range(10)] code = "".join(arr) res = self.db.execute("SELECT code FROM schools WHERE code='%s';" % code) if len(res) > 0: code = None # try to add to db try: sql = "INSERT INTO schools (code, name, city, region, street, house_number, phone, notes) " \ "VALUES ('%s', '%s','%s', %s, '%s', '%s', '%s', %s);" % (code, data['name'], data['cityid'], data['region'], data['street'], data['house'], data['phone'], data['notes']) self.db.execute(sql) return json.dumps({"code": code}), 200 except Exception as e: return get_error(e)
def eval_on_test_set(): running_error = 0 num_batches = 0 for i in range(0, 10000, bs): minibatch_data = test_data[i:i + bs] minibatch_label = test_label[i:i + bs] minibatch_data = minibatch_data.to(device) minibatch_label = minibatch_label.to(device) inputs = (minibatch_data - mean) / std scores = net(inputs) error = utils.get_error(scores, minibatch_label) running_error += error.item() num_batches += 1 total_error = running_error / num_batches print('error rate on test set =', total_error * 100, 'percent')
def delete_sub(self, id): try: sql = "DELETE FROM subjects WHERE sub_id='%s';" % id self.db.execute(sql) return json.dumps({"data": True}), 200 except Exception as e: return get_error(e)
def POST(self): i = web.input('email', 'password', 'username', agreement="no") i.displayname = i.get('displayname') or i.username recap_plugin_active = 'recaptcha' in config.get('plugins') if recap_plugin_active: public_key = config.plugin_recaptcha.public_key private_key = config.plugin_recaptcha.private_key recap = recaptcha.Recaptcha(public_key, private_key) if not recap.validate(): return 'Recaptcha solution was incorrect. Please <a href="javascript:history.back()">go back</a> and try again.' f = forms.Register() if not f.validates(i): return render['account/create'](f) if i.agreement != "yes": f.note = utils.get_error("account_create_tos_not_selected") return render['account/create'](f) try: accounts.register(username=i.username, email=i.email, password=i.password, displayname=i.displayname) except ClientException, e: f.note = str(e) return render['account/create'](f)
def add(self, data): # check all fields if not check_all_parameters(data, ['title', 'class_num', 'teacher_id']): return json.dumps({"error": "Недостатньо данних"}), 400 # check fields that can be NULL data['notes'] = check_for_null(data, 'notes') # generate school code code = None while code is None: arr = [str(random.randint(0, 9)) for _ in range(10)] code = "".join(arr) res = self.db.execute("SELECT * FROM subjects WHERE sub_id='%s';" % code) if len(res) > 0: code = None # try to add to db try: sql = "INSERT INTO subjects (sub_id, title, class_num, notes, teacher_id) " \ "VALUES ('%s', '%s','%s', %s, '%s');" % (code, data['title'], data['class_num'], data['notes'], data['teacher_id']) self.db.execute(sql) return json.dumps({"code": code}), 200 except Exception as e: return get_error(e)
def delete_school(self, id): try: sql = "DELETE FROM schools WHERE code='%s';" % id res = self.db.execute(sql) return json.dumps({"data": True}), 200 except Exception as e: return get_error(e)
def get_hometask_info(self, id): try: sql = "SELECT * FROM hometasks WHERE hw_id=%s;" % id res1 = self.db.execute(sql)[0] sql = "SELECT hyperlink FROM hometask_hyperlinks WHERE homework_id=%s;" % id res2 = self.db.execute(sql) links = [] if res2 is not None: for i in res2: links.append(i[0]) date = str(res1[3])[:-3].replace(" ", 'T') result = { "hw_title": res1[1], "content": res1[2], "deadline": res1[3].strftime("%Y.%m.%d %H:%M"), "deadline_iso": date, "subject_id": res1[4], "active": datetime.now() > res1[3], "notes": "" if res1[5] is None else res1[5], "remaining_time": str(abs(datetime.now() - res1[3])), "hyperlinks": links } return json.dumps(result), 200 except Exception as e: return get_error(e)
def get_all_pupils_learn(self, data): surname = check_for_null(data, 'surname') if surname == 'NULL': return json.dumps({"error": "Недостатньо данних"}), 400 try: sql = "SELECT student_id, surname, name, email, class, school_id FROM pupils WHERE NOT EXISTS (SELECT * FROM studying AS A WHERE subject_id IN (SELECT " \ "sub_id FROM subjects WHERE teacher_id IN (SELECT teacher_id FROM teachers WHERE surname=%s)) AND " \ "NOT EXISTS (SELECT * FROM studying WHERE studying.student_id=pupils.student_id AND " \ "A.subject_id=studying.subject_id));" % surname res = self.db.execute(sql) sql2 = "SELECT teacher_id FROM teachers WHERE surname=%s;" % surname res2 = self.db.execute(sql2) if len(res2) < 1: return json.dumps([]), 200 result = [] for pupil in res: result.append({ "id": pupil[0], "name": pupil[1] + " " + pupil[2], "email": pupil[3], "class": pupil[4], "school_id": pupil[5] }) print(result) return json.dumps(result), 200 except Exception as e: return get_error(e)
def get_avarage_pupil(self, id): try: sql = "SELECT AVG(mark) FROM answers WHERE student_id='%s' GROUP BY student_id" % id res = self.db.execute(sql) print(res) return json.dumps({"data": float(res[0][0])}), 200 except Exception as e: return get_error(e)
def delete_sub(self, data): try: sql = "DELETE FROM studying WHERE student_id='%s' AND subject_id='%s';" % ( data['student_id'], data['sub_id']) self.db.execute(sql) return json.dumps({"data": True}), 200 except Exception as e: return get_error(e)
def delete_olimp(self, data): try: sql = "DELETE FROM compete WHERE student_id='%s' AND olimp_id='%s';" % ( data['student_id'], data['olimp_id']) self.db.execute(sql) return json.dumps({"data": True}), 200 except Exception as e: return get_error(e)
def train_model(model, dataset, num_samples=10000): loader, num_classes = _get_loader(dataset, num_samples=num_samples) loss_fn = torch.nn.CrossEntropyLoss() # Edit the model to match # classes model.fc = torch.nn.Linear(model.fc.in_features, num_classes).cuda() model.layers[-1] = model.fc model.reset_classifier() # Initial linear phase optimizer = torch.optim.Adam(model.classifier.parameters()) for epoch_i in range(10): print("Linear Fit Epoch: {}/10".format(epoch_i)) metrics = AverageMeter() for data, target in loader: data = data.cuda() target = target.cuda() optimizer.zero_grad() output = model(data) loss = loss_fn(output, target) error = get_error(output, target) loss.backward() optimizer.step() metrics.update(n=data.size(0), loss=loss.item(), error=error) print(f"[epoch {epoch_i}]: " + "\t".join(f"{k}: {v}" for k, v in metrics.avg.items())) # Full fine-tuning phase optimizer = torch.optim.SGD(model.parameters(), weight_decay=5e-4, lr=1e-3) for epoch_i in range(60): print("Finetuning Epoch: {}/60".format(epoch_i)) metrics = AverageMeter() for data, target in loader: data = data.cuda() target = target.cuda() optimizer.zero_grad() output = model(data) loss = loss_fn(output, target) error = get_error(output, target) loss.backward() optimizer.step() metrics.update(n=data.size(0), loss=loss.item(), error=error) print(f"[epoch {epoch_i}]: " + "\t".join(f"{k}: {v}" for k, v in metrics.avg.items())) if epoch_i == 39: optimizer.param_groups[0]['lr'] *= 0.1 return model
def update_error_integrals(self, next_position): next_error = utils.get_error(next_position, self.goal_position) seconds_since_last_update = (rospy.Time.now() - self.last_position_update_time).to_sec() for dimension in range(6): self.error_integral[dimension] = ( next_error[dimension] * seconds_since_last_update) + self.error_integral[dimension]
def predict_coarse(self, testing_data, results_file, fine2coarse): x_test, y_test = testing_data p = self.prediction_params yh_s = self.full_classifier.predict(x_test, batch_size=p['batch_size']) single_classifier_error = utils.get_error(y_test, yh_s) logger.info('Single Classifier Error: ' + str(single_classifier_error)) yh_c = np.dot(yh_s, fine2coarse) y_test_c = np.dot(y_test, fine2coarse) coarse_classifier_error = utils.get_error(y_test_c, yh_c) logger.info('Single Classifier Error: ' + str(coarse_classifier_error)) results_dict = {'Single Classifier Error': single_classifier_error, 'Coarse Classifier Error': coarse_classifier_error} utils.write_results(results_file, results_dict=results_dict)
def transfer_model(model, train_dataset, test_dataset, num_samples=10000): train_loader, num_classes = _get_loader(train_dataset, num_samples=num_samples) loss_fn = torch.nn.CrossEntropyLoss() # Edit the model to match # classes model.fc = torch.nn.Linear(model.fc.in_features, num_classes).cuda() model.layers[-1] = model.fc model.reset_classifier() # Initial linear phase optimizer = torch.optim.Adam(model.classifier.parameters(), lr=1e-4, weight_decay=5e-4) for epoch_i in range(16): print("Linear Fit Epoch: {}/16".format(epoch_i)) metrics = AverageMeter() for data, target in train_loader: data = data.cuda() target = target.cuda() optimizer.zero_grad() output = model(data) loss = loss_fn(output, target) error = get_error(output, target) loss.backward() optimizer.step() metrics.update(n=data.size(0), loss=loss.item(), error=error) print(f"[epoch {epoch_i}]: " + "\t".join(f"{k}: {v}" for k, v in metrics.avg.items())) test_loader, num_classes = _get_loader(test_dataset, num_samples=num_samples) print("Validation") metrics = AverageMeter() with torch.no_grad(): for data, target in test_loader: data = data.cuda() target = target.cuda() output = model(data) loss = loss_fn(output, target) error = get_error(output, target) metrics.update(n=data.size(0), loss=loss.item(), error=error) mean_error = metrics.avg['error'] print("Error:", mean_error) return mean_error
def train_fine_classifiers(self, training_data, validation_data, fine2coarse): logger.info('Training fine classifiers') x_train, y_train = training_data x_val, y_val = validation_data p = self.fine_training_params for i in range(self.n_coarse_categories): logger.info( f'Training fine classifier {i+1}/{self.n_coarse_categories}') # Get all training data for the coarse category ix = np.where([(y_train[:, j] == 1) for j in [ k for k, e in enumerate(fine2coarse[:, i]) if e != 0]])[1] x_tix = tf.gather(x_train, ix) y_tix = tf.gather(y_train, ix) # Get all validation data for the coarse category ix_v = np.where([(y_val[:, j] == 1) for j in [ k for k, e in enumerate(fine2coarse[:, i]) if e != 0]])[1] x_vix = tf.gather(x_val, ix_v) y_vix = tf.gather(y_val, ix_v) sgd_coarse = tf.keras.optimizers.SGD( lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) self.fine_classifiers['models'][i].compile( optimizer=sgd_coarse, loss='categorical_crossentropy', metrics=['accuracy']) index = 0 while index < p['coarse_stop']: self.fine_classifiers['models'][i].fit( x_tix, y_tix, batch_size=p['batch_size'], initial_epoch=index, epochs=index+p['step'], validation_data=(x_vix, y_vix)) index += p['step'] sgd_fine = tf.keras.optimizers.SGD( lr=0.001, decay=1e-6, momentum=0.9, nesterov=True) self.fine_classifiers['models'][i].compile( optimizer=sgd_fine, loss='categorical_crossentropy', metrics=['accuracy']) while index < p['fine_stop']: self.fine_classifiers['models'][i].fit( x_tix, y_tix, batch_size=p['batch_size'], initial_epoch=index, epochs=index + p['step'], validation_data=(x_vix, y_vix)) index += p['step'] yh_f = self.fine_classifiers['models'][i].predict( x_val[ix_v], batch_size=p['batch_size']) logger.info('Fine Classifier '+str(i)+' Error: ' + str(utils.get_error(y_val[ix_v], yh_f)))
def POST(self): i = web.input(email='') f = forms.ForgotPassword() if not f.validates(i): return render['account/password/forgot'](f) account = accounts.find(email=i.email) if account.is_blocked(): f.note = utils.get_error("account_blocked") return render_template('account/password/forgot', f) send_forgot_password_email(account.username, i.email) return render['account/password/sent'](i.email)
def POST(self): i = web.input('email', 'password', 'username', agreement="no") i.displayname = i.get('displayname') or i.username f = forms.Register() if not f.validates(i): return render['account/create'](f) if i.agreement != "yes": f.note = utils.get_error("account_create_tos_not_selected") return render['account/create'](f) try: web.ctx.site.register(i.username, i.displayname, i.email, i.password) except ClientException, e: f.note = str(e) return render['account/create'](f)
def error(self, name, i): f = forms.Login() f.fill(i) f.note = utils.get_error(name) return render.login(f)
def error(name): f = forms.Login() f.fill(i) f.note = utils.get_error(name) print "error: %r %r" % (f.note, web.websafe(f.note)) return render.login(f)
""" logging.info('starting median_unique') if not args.output_file: logging.error('no output file defined') sys.exit(-1) # init vars mf = MedianFinder(args.output_file) # read file bringing the word list tfile = TweetFile(args.input_file) for words in tfile.get_words(): # make an unique set of words and get its length mf.process_tweet(words) # save to file the current median mf.write_results() # log the program is finished logging.info('program finished') if __name__ == '__main__': args = parse_args() # run logging any error try: setup_log() main(args) except: logging.error(get_error()) logging.info('Exiting. Bye')