def self_home(): req_data = request.get_data() info = '' req = {} ret_data = {} if request.cookies.has_key('session_id'): session_id = request.cookies['session_id'] else: info = 'user not login' return check_result(False, USER_NOT_LOGIN, info, ret_data) sessionInfo = getSessionInfo(red, session_id, ['uid', 'nickname', 'head_url', 'authority']) if not session_id: info = 'user not login' return check_result(False, USER_NOT_LOGIN, info, ret_data) req['uid'] = sessionInfo['uid'] import loadSelfHome result, mtype, info, ret_data = loadSelfHome.loadSelfHome(req) check_result(result, mtype, info, ret_data) return render_template('selfhome.html', user=sessionInfo, userinfo=ret_data)
def update_userinfo(): req_data = request.get_data() info = '' ret_data = {} if request.cookies.has_key('session_id'): session_id = request.cookies['session_id'] else: info = 'user not login' return check_result(False, USER_NOT_LOGIN, info, ret_data) sessionInfo = getSessionInfo(red, session_id, ['uid']) if not session_id: info = 'user not login' return check_result(False, USER_NOT_LOGIN, info, ret_data) try: req = json.loads(req_data) except ValueError as e: info = 'register failed:json analyse failure' logger.error(info) return check_result(False, JSON_ERR_ANALY, info, ret_data) req['uid'] = sessionInfo['uid'] import updateUserInfo result, mtype, info, ret_data = updateUserInfo.updateUserInfo(req, req['userinfo'], req['content']) return check_result(result, mtype, info, ret_data)
def pause_rename_resume_pv(self, pv, new, debug=False): """Pause, rename and resume a PV :param pv: name of the pv :param new: new name of the pv :param bool debug: enable debug logging :return: None """ result = self.get_pv_status(pv) if result[0]["status"] != "Being archived": sys.stderr.write( "PV {} isn't being archived. Skipping.\n".format(pv)) return result = self.get_pv_status(new) if result[0]["status"] != "Not being archived": sys.stderr.write( "New PV {} already exists. Skipping.\n".format(new)) return result = self.pause_pv(pv) if not utils.check_result(result, "Error while pausing {}".format(pv)): return result = self.rename_pv(pv, new) if not utils.check_result(result, "Error: renaming {} to {}").format( pv, new): return result = self.resume_pv(new) if not utils.check_result(result, "Error while resuming {}".format(new)): return if debug: print("PV {} successfully renamed to {}".format(pv, new))
def comment_article(): req_data = request.get_data() info = '' ret_data = {} if request.cookies.has_key('session_id'): session_id = request.cookies['session_id'] else: info = 'user not login' return check_result(False, USER_NOT_LOGIN, info, ret_data) sessionInfo = getSessionInfo(red, session_id, ['uid']) if not session_id: info = 'user not login' return check_result(False, USER_NOT_LOGIN, info, ret_data) try: req = json.loads(req_data) except ValueError as e: info = 'register failed:json analyse failure' logger.error(info) return check_result(False, JSON_ERR_ANALY, info, ret_data) req['uid'] = sessionInfo['uid'] import writeComment result, mtype, info, ret_data = writeComment.writeComment(req) return check_result(result, mtype, info, ret_data)
def user_login(request, red): ret_data = {} req_data = request.get_data() print req_data try: req = json.loads(req_data) except ValueError: info = 'register failed:json analyse failure' logger.error(info) return check_result(False, JSON_ERR_ANALY, info, ret_data) import login result, mtype, info, ret_data = login.login(req, red) print ret_data response = make_response(check_result(result, mtype, info, ret_data)) if result == True: response.set_cookie('session_id', ret_data['session_id'], expires=time.time() + 30 * 60 * 60 * 24) response.set_cookie('nickname', ret_data['nickname'], expires=time.time() + 30 * 60 * 60 * 24) response.headers['Access-Control-Allow-Origin'] = '*' return response
def _main(): from utils import check_result inputs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] targets = [0, 0, 1, 2, 2, 3, 3, 4, 4, 4, 4, 5] check_result(Solution().countPrimes)(inputs, targets)
def upload_head(): info = '' ret_data = {} req = {} if request.cookies.has_key('session_id'): session_id = request.cookies['session_id'] else: info = 'user not login' return check_result(False, USER_NOT_LOGIN, info, ret_data) sessionInfo = getSessionInfo(red, session_id, ['uid']) if not session_id: info = 'user not login' return check_result(False, USER_NOT_LOGIN, info, ret_data) req['uid'] = sessionInfo['uid'] print 'upload' if request.method == 'POST': try: file = request.files['File1'] except Exception as e: print e print e.message if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(server.config['UPLOAD_HEAD_FOLDER'], filename)) ret = { "success":1, "message":"success", "url":"http://" + HOST + ":" + str(PORT) + "/static/upload_head/" + filename } import updateUserInfo result, mtype, info, ret_data = updateUserInfo.updateUserInfo(req, 'head_url', ret['url']) red.hset(session_id, 'head_url', ret['url']) ret = json.dumps(ret) response = make_response(ret) response.headers['Access-Control-Allow-Origin'] = '*' return response
def agree_question(request, red): agree = 0 ret_data = {} req_data = request.get_data() try: req = json.loads(req_data) except ValueError: info = 'register failed:json analyse failure' logger.error(info) return check_result(False, JSON_ERR_ANALY, info, ret_data) import voteQuestion result, mtype, info, ret_data = voteQuestion.voteQuestion(req, agree) response = make_response(check_result(request, mtype, info, ret_data)) response.headers['Access-Control-Allow-Origin'] = '*' return response
def multiprocess_link_tables(self, table_dir, dest_dir, alpha=0.5, beta=0.5, verbose=1, max_mention_cnt=100000, test_cnt=10000): if os.path.exists(dest_dir): print('clear', dest_dir) shutil.rmtree(dest_dir) create_dir(dest_dir) time_begin = time.time() table_names = os.listdir(table_dir) all_param = [] for i, table_name in enumerate(table_names): table_fp = os.path.join(table_dir, table_name) dest_fp = os.path.join(dest_dir, table_name) if i >= test_cnt: break with open(table_fp, 'r', encoding='utf-8') as f: table = json.load(f) mention_cnt = len(table) if mention_cnt > max_mention_cnt: print('Too large table({}), give up {}'.format(mention_cnt, table_fp)) continue part_prior = self.get_part_prior(table) m_e_pred = self.get_m_e_pred(table) assert m_e_pred, 'm_e_pred没被正确' all_param.append((table_fp, dest_fp, m_e_pred, part_prior, alpha, beta, verbose)) print('table param %d/%d' % (i+1, len(table_names))) print('*' * 40) print('prepare parameters', time.time() - time_begin, 's') print('execute tables', len(all_param)) pool = Pool(processes=8) # 只开四个进程,防止资源过度占用 pool.map(run_proc, all_param) pool.close() pool.join() time.sleep(10) print('***', 'alpha', alpha, 'beta', beta) check_result(dest_dir)
def write_article(request, red): ret_data = {} req_data = request.get_data() try: req = json.loads(req_data) except ValueError: info = 'register failed:json analyse failure' logger.error(info) return check_result(False, JSON_ERR_ANALY, info, ret_data) session_id = request.cookies['session_id'] sessionInfo = getSessionInfo(red, session_id, ['uid']) req['uid'] = sessionInfo['uid'] import writeArticle result, mtype, info, ret_data = writeArticle.writeArticle(req) response = make_response(check_result(result, mtype, info, ret_data)) response.headers['Access-Control-Allow-Origin'] = '*' return response
def link_tables(self, table_dir, dest_dir, max_mention_cnt=100, test_cnt=None, verbose=0, alpha=0.5, beta=0.5): if os.path.exists(dest_dir): print('clear', dest_dir) shutil.rmtree(dest_dir) create_dir(dest_dir) table_names = os.listdir(table_dir) if test_cnt is None: test_cnt = len(table_names) cnt = 0 for i, name in enumerate(table_names): if cnt >= test_cnt: break status = self.link_table(os.path.join(table_dir, name), os.path.join(dest_dir, name), verbose=verbose, max_mention_cnt=max_mention_cnt, alpha=alpha, beta=beta) if status: cnt += 1 print_line(s='%d/%d' % (i + 1, len(table_names))) print('****', 'alpha', alpha, 'beta', beta) check_result(dest_dir)
def populate_graph(graph, node1, node2): if node1 in graph.keys(): graph[node1].append(node2) else: graph[node1] = [node2] return graph def calc(lines): parser = re.compile("[A-Z0-9]+") values = [tuple(parser.findall(line.strip())) for line in lines] graph = {} nodes = [] for value in values: nodes.append(value[1]) graph = populate_graph(graph, value[0], value[1]) you_path = find_path(graph, "YOU", "COM", []) san_path = find_path(graph, "SAN", "COM", []) xor = set(you_path) ^ set(san_path) return len(xor) if __name__ == '__main__': lines = read_input() result = str(calc(lines)) write_output(result) check_result(result)
optimizer_ft = optim.Adam(model.parameters(), lr=10e-4) # Decay LR by a factor of 0.1 every 7 epochs exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=7, gamma=0.1) if __name__ == '__main__': # Running the model print('Starting Training with params: \nN_EPOCH = {},' ' \nBATCH_SIZE = {},' ' \nTRAIN_ON_GPU = {},' ' \nSAVE_FILE_NAME = {},' ' \nPATH_TO_SAVE = {}'.format(N_EPOCH, BATCH_SIZE, train_on_gpu, SAVE_FILE_NAME, PATH)) model, history = train_loop(model, criterion, optimizer_ft, train_loader, valid_loader, train_on_gpu, save_file_name=SAVE_FILE_NAME, max_epochs_stop=3, n_epochs=N_EPOCH, print_every=1, path_to_save=PATH) print("Starting check result on test data") # Check test result check_result(model, test_loader, criterion, train_on_gpu)
__author__ = 'christianvriens' __license__ = '' import utils # Challenge 1 the_string = '49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d' should_produce = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t' result = utils.hex_to_base64(the_string) print(utils.check_result(result, should_produce))
def _main(): from utils import check_result inputs = [27, 30, 2, 3, 4, 5, 6, 7, 8] targets = [9, 10, 2, 3, 4, 5, 5, 7, 6] check_result(Solution().minSteps)(inputs, targets)