def load_instagram(): if request.args: instagram_name = cache.get('instagram_name') page = int(request.args.get('page')) instagram_embeds = [] instagram_url = 'https://www.instagram.com' embed_url = 'https://graph.facebook.com/v9.0/instagram_oembed' access_token = environ.get('INSTAGRAM_APP_ID') + '|' + environ.get( 'INSTAGRAM_CLIENT_ID') # busy wait until the posts are put into the cache while cache.get('instagram_cache') == []: pass instagram_cache = cache.get('instagram_cache') # delay embed request to lower the chance of reaching the embed API limit for post in instagram_cache[page * 12:(page + 1) * 12]: post_name = post post_url = instagram_url + post_name params = { 'url': post_url, 'access_token': access_token, 'omitscript': 'true' } embed = requests.get(embed_url, params).json()['html'] instagram_embeds.append(embed) return jsonify({'instagram': instagram_embeds}) return jsonify({})
def index(key): """Confirm with the user that the requested action is to be performed.""" message = cache.get('{0}/message'.format(key)) agreement = cache.get('{0}/agreement'.format(key)) severity = cache.get('{0}/severity'.format(key)) backward = cache.get('{0}/previous'.format(key)) forward = cache.get('{0}/next'.format(key)) form = ConfirmationForm() if form.validate_on_submit(): if form.confirmed.data: cache.set(forward, True) return redirect(forward) else: return redirect(backward) form.confirmed.label.text = agreement flash(message, severity) page_vars = { 'title': 'Confirm Action', 'form': form, 'cancel_path': backward } return render_template('confirm/index.html', **page_vars)
def _fetch_polls(self, year): politician_position = self.politician_data.position registered_id = self.politician_data.registered_id polls_dataset_key = 'deputies_votes_dataset' polls_dataset = cache.get(polls_dataset_key) votes_key = "{}-votes".format(self.politician_data.id) saved_votes = cache.get(votes_key) filtered_df = None df = None if saved_votes is None: if politician_position == 'senator': df = fs.get_votes_from_senator(registered_id, year) update_cache_value(votes_key, df) filtered_df = df elif politician_position == 'federal-deputy': polls_df = pd.DataFrame(polls_dataset, columns=self.POLLS_DF_COLUMNS) if polls_dataset is None: polls_df = fd.get_voting_data(year) elif year not in polls_df.year.tolist(): polls_df = pd.concat(polls_df, fd.get_voting_data(year)) update_cache_value(polls_dataset_key, polls_df) df = fd.get_votes_from_deputy(registered_id, polls_df) update_cache_value(votes_key, df) filtered_df = df[df.year == year] else: df = pd.DataFrame(saved_votes, columns=self.VOTES_DF_COLUMNS) years = df.year.tolist() if year not in years: if politician_position == 'senator': df = pd.concat( [df, fs.get_votes_from_senator(registered_id, year)], sort=True) update_cache_value(votes_key, df) elif politician_position == 'federal-deputy': polls_df = pd.DataFrame(polls_dataset, columns=self.POLLS_DF_COLUMNS) if year not in polls_df.year.tolist(): polls_df = pd.concat( [polls_df, fd.get_voting_data(year)]) update_cache_value(polls_dataset_key, polls_df) df = pd.concat([ df, fd.get_votes_from_deputy(registered_id, polls_df) ]) update_cache_value(votes_key, df) filtered_df = df[df.year == year] return filtered_df
def get_playlist(): current = cache.get('current') playlist = cache.get('playlist') if playlist == None: playlist = [] cache.set('playlist', playlist) #playlist = [] return current, playlist
def get_playlist(): """Get the primary keys of the songs in the current playlist""" current = cache.get("current") playlist = cache.get("playlist") if playlist == None: playlist = [] cache.set("playlist", playlist) # playlist = [] return current, playlist
def adminLogin(params): # 将验证码字符串储存在session中 if cache.get('captcha') is None: return BaseController().error(msg='验证码错误') if params['captcha'].lower() != cache.get('captcha').lower(): return BaseController().error(msg='验证码错误') res = AdminService().login(params) """ if res['code'] == Code.SUCCESS: return BaseController.json(res) """ return BaseController().json(res)
def generate_result(mission_id): mission = Mission.query.filter_by(id=mission_id).first() if mission: avg_rt, min_rt, max_rt, rt_p50, rt_p75, rt_p95, rt_p99, f_rate, samples, ok, ko, tps = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0 if mission.loadtool == "gatling": Domino.get("report-%s.tar.gz" % mission_id, save=True, unzip=True, savepath="report-gatling") stat = json.loads(cache.get("mission_%s_stats" % mission_id)) api = stat.get("stats") samples = api.get("numberOfRequests").get("total") ok = api.get("numberOfRequests").get("ok") ko = api.get("numberOfRequests").get("ko") f_rate = round(float(ko) / ok * 100, 1) if ok else 100 min_rt = api.get("minResponseTime").get("ok") max_rt = api.get("maxResponseTime").get("ok") avg_rt = api.get("meanResponseTime").get("ok") rt_p50 = api.get("percentiles1").get("ok") rt_p75 = api.get("percentiles2").get("ok") rt_p95 = api.get("percentiles3").get("ok") rt_p99 = api.get("percentiles4").get("ok") elif mission.loadtool == "jmeter": dashboard = cache.get("dashboard_js-%s" % mission_id) graph = cache.get("graph_js-%s" % mission_id) with open(os.path.join(Config.REPORT_FOLDER, "report-jmeter", "content", "js", "dashboard-%s.js" % mission_id), "wb") as f: f.write(dashboard) with open(os.path.join(Config.REPORT_FOLDER, "report-jmeter", "content", "js", "graph-%s.js" % mission_id), "wb") as f: f.write(graph) for page in ["OverTime", "ResponseTimes", "Throughput"]: with open(os.path.join(Config.REPORT_FOLDER, "report-jmeter", "content", "pages", "%s-%s.html" % (page, mission_id)), "wb") as f: f.write(Template(jmeter_report_template_dict.get(page)).render(mission_id=mission_id)) with open(os.path.join(Config.REPORT_FOLDER, "report-jmeter", "index-%s.html" % mission_id), "wb") as f: f.write(Template(jmeter_report_template_dict.get("jmeter_report_index_template")).render(mission_id=mission_id)) stat = json.loads(dashboard.split("statisticsTable\"), ")[1].split(", function")[0]) api = stat.get("overall").get("data") samples = api[1] ko = api[2] ok = samples - ko f_rate = round(api[3], 1) min_rt = api[5] max_rt = api[6] avg_rt = api[4] rt_p50 = 0 rt_p75 = 0 rt_p95 = api[8] rt_p99 = api[9] tps = api[10] result = Result( mission.project, mission.api_name, mission.concurrent, avg_rt, min_rt, max_rt, rt_p50, rt_p75, rt_p95, rt_p99, f_rate, samples, ok, ko, tps, mission_id ) mission.status = 1 db.session.add(result) db.session.add(mission) db.session.commit() db.session.close()
def outbound(account): if request.method != 'POST': abort(405, "Only Post allowed.") try: param_dict = get_param_dict(request.json) if 'error' in param_dict: return jsonify(param_dict) cache_val = cache.get({param_dict['to']: param_dict['from']}) if cache_val: return jsonify({ 'message': '', 'error': 'sms from %s to %s blocked by STOP request' % ( param_dict['from'], param_dict['to']) }) phone_number = models.PhoneNumber.query.filter( and_(models.PhoneNumber.number == param_dict['from'], models.PhoneNumber.account_id == account.id)).first() if not phone_number: return jsonify({'message': '', 'error': 'from parameter not found'}) from_val = cache.get(param_dict['from']) if from_val: from_val = cPickle.loads(from_val) if from_val['counter'] >= config.API_LIMIT: time_diff = ( datetime.utcnow() - from_val['timestamp']).total_seconds() if time_diff < config.COUNTER_RESET_TIME: return jsonify({ 'message': '', 'error': 'limit reached for from %s' % param_dict['from'] }) else: # Reset counter and timestamp from_val['counter'] = 1 from_val['timestamp'] = datetime.utcnow() cache.set(param_dict['from'], cPickle.dumps(from_val)) else: # Increase counter from_val['counter'] += 1 cache.set(param_dict['from'], cPickle.dumps(from_val)) else: cache.set( param_dict['from'], cPickle.dumps( {'counter': 1, 'timestamp': datetime.utcnow() })) return jsonify({'message': 'outbound sms ok', 'error': ''}) except Exception as ex: return jsonify({'message': '', 'error': 'unknown failure'})
def getcachedthreads(): newcollection = None cachedmessagesetids = cache.get('cachedmessagesetids') if cachedmessagesetids: for emailthreadid in cachedmessagesetids: cachedthread = cache.get(emailthreadid['id']) if cachedthread: parsedmessageset.append(cachedthread) newcollection = deepcopy(parsedmessageset) parsedmessageset[:] = [] return newcollection
def last_good_data(): try: r = retrieve_all_data() except Exception as exception: logging.error(exception) return cache.get('last-good-data') if r.status_code != 200: logging.error('Problem getting data. Check logs.') return cache.get('last-good-data') cache.set('last-good-data', r, timeout=0) cache.set('last-good-time', datetime.now(), timeout=0) return r
def download_csv_custom(self): query = get_database_readable(db) if cache.get("search_query") is not None: ids = cache.get("search_query") query = query.filter(Exposure.id.in_(ids)) df = pd.DataFrame(query) response = make_response(df.to_csv()) cd = 'attachment; filename=celltox_database.csv' response.headers['Content-Disposition'] = cd response.mimetype = 'text/csv' return response
def load_user(user_id): """Load user by ID from cache, if not in cache, then cache it.""" # make a unique cache key for each user user = '******'.format(user_id) # check if the user_object is cached user_obj = pickle.loads(cache.get(user)) if cache.get(user) else None if user_obj is None: query = User.query.get(int(user_id)) user_obj = pickle.dumps(query) cache.set(user, user_obj, timeout=3600) return query return user_obj
def index(): if request.remote_addr != "127.0.0.1": return "UNAUTHORIZED ACCESS ATTEMPT REJECTED" if cache.get('rerun_setup'): return "Please restart the application" if not cache.get('ip_dict_valid'): flash("You need to set up your profile!") return render_template("indexFirst.html") return redirect(url_for('edit_profile')) ip_dict = cache.get('ip_dict') return render_template('index.html', ipDict=ip_dict)
def load_youtube(): youtube_id = cache.get('youtube_id') next_page_token = cache.get('youtube_next_page_token') result = get_youtube.delay(youtube_id, next_page_token) youtube_res, youtube_next_page_token = result.get() cache.set('youtube_next_page_token', youtube_next_page_token) youtube_embeds = [] for post in youtube_res: time_posted, embed = post youtube_embeds.append(embed) return jsonify({'youtube': youtube_embeds})
def get_access_token(): token = cache.get(TOKEN_KEY) token_expired_at = cache.get(TOKEN_EXPIRED_AT_KEY) if token: return token, token_expired_at b = WechatBasic( appid=settings.APP_ID, appsecret=settings.SECRET) print 'get_access_token at:', datetime.datetime.now() d = b.get_access_token() token = d['access_token'] expired_at = d['access_token_expires_at'] cache.set(TOKEN_KEY, token, int(expired_at - time.time())*60) cache.set(TOKEN_EXPIRED_AT_KEY, expired_at, int(expired_at - time.time())*60) return token, expired_at
def download_xls_custom(self): query = get_database_readable(db) if cache.get("search_query") is not None: ids = cache.get("search_query") query = query.filter(Exposure.id.in_(ids)) output = io.BytesIO() writer = pd.ExcelWriter(output) df = pd.DataFrame(query) df.to_excel(writer, 'Tab1') writer.close() response = make_response(output.getvalue()) response.headers[ 'Content-Disposition'] = 'attachment; filename=celltox_database.xlsx' response.headers["Content-type"] = "text/csv" return response
def post(self): ''' 提交评论,清除blog_id_{blog_id}的缓存 改用token验证 user_id通过缓存获取 ''' if 'manager_id' not in session: return {'status_code': 400, 'message': 'illegal request'}, 400 manager_id = session['manager_id'] parser = reqparse.RequestParser() parser.add_argument('token', type=str) parser.add_argument('blog_id', type=int) parser.add_argument('content', type=str) args = parser.parse_args() token = args['token'] if not token: return {'status_code': 400, 'message': 'bad requests'}, 400 blog_id = args['blog_id'] content = args['content'] cache.delete(('blog_id_%s' % blog_id)) ###清除blog的缓存 user_message = cache.get(token) ### 从缓存中获取用户信息 if user_message is None: return {'status_code': 400, 'message': '登陆已过期'}, 400 user_id = user_message['id'] post_time = cache.get('%s_comment_post' % user_id) if post_time: return {'status_code': 400, 'message': '请求频繁'}, 400 else: cache.set('%s_comment_post' % user_id, 1, timeout=30) comment_message = addComment(blog_id=blog_id, user_id=user_id, content=content, manager_id=manager_id) if comment_message is not None: # 删除该blog_id下的comment_list缓存 cache.delete('blog_id_%s_comment' % blog_id) return { 'status_code': 200, 'message': 'add successfully', 'data': { 'comment_message': comment_message } } else: return { 'status_code': 400, 'message': 'some error happened, please check log' }, 400
def _fetch_propositions(self, registered_id, year, callback): """Obtém os dados das proposições por ano. Caso não tenha do ano solicitado, é baixado novamente e adicionado aos dados já salvos. Args: registered_id (int): id do político year (int): ano solicitado callback (function): função que retorna os dados. Returns: list: dados solicitados """ propositions_key = "{}-propositions".format(self.politician_data.id) saved_propositions = cache.get(propositions_key) filtered_propositions = dict() df = None if saved_propositions is None: df = callback(registered_id, year) update_cache_value(propositions_key, df) else: df = pd.DataFrame(saved_propositions, columns=self.PROP_DF_COLUMNS) years = df.year.tolist() if year not in years: df = pd.concat([df, callback(registered_id, year)]) update_cache_value(propositions_key, df) filtered_propositions = df[df.year == year] return filtered_propositions
def create_token(punya_toko=False): if punya_toko: cachename = 'test-punya-toko-token' data = {'email': '*****@*****.**', 'password': '******'} else: cachename = 'test-token' data = {'email': '*****@*****.**', 'password': '******'} token = cache.get(cachename) if token is None: #prepare request input #do request req = call_client(request) res = req.post('/auth/login', json=data) #store response res_json = json.loads(res.data) logging.warning('RESULT : %s', res_json) ## assert / compare with expected result assert res.status_code == 200 ## save token into cache cache.set(cachename, res_json['token'], timeout=60) ## return, because it useful for other test return res_json['token'] else: return token
def test_token_auth(self): # add user from app.models import User username = '******' password = '******' User.add_user(User(username=username, password=password, is_admin=False, email='*****@*****.**', confirmed=True)) # request without token response = self.client.get(url_for('api_1.echo'), headers=self.get_api_headers('bad-token', '')) self.assertEqual(response.status_code, 401) # get token response = self.client.post(url_for('api_1.login'), data=json.dumps({'username': username, 'password': password}), content_type='application/json') json_response = json.loads(response.data) token = json_response['auth_token'] self.assertIsNotNone(token) self.assertIsNotNone(response.headers[2]) self.assertEqual(cache.get(json_response['auth_token']), User.query.filter_by(username=username).first().id) # request with correct token response = self.client.get(url_for('api_1.echo'), headers=self.get_api_headers(token, ''), data=json.dumps({'data': 'echo test'})) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.data)['data'], 'echo test')
def show_tech_json(title, page = 1, pagesize = app.config['PAGE_SIZE']): result = cache.get('tech_' + title + '_website_' + str(page) + '_' + str(pagesize)) if not result: websites = Website.query\ .filter(Website.technologies.any(Technology.title == title))\ .order_by(Website.frequency.desc())\ .paginate(page, pagesize, True) websites.items = filter_website(websites.items) result = [] for i in websites.items: techs = [] for t in i.technologies: if t.title != 'Unknown': techs.append({ 'title': t.title, 'detail' : t.detail }) result.append({ 'hostname' : i.hostname, 'port' : i.port, 'technologies' : techs }) cache.set('tech_' + title + '_website_' + str(page) + '_' + str(pagesize), result, timeout=app.config['SQL_CACHE_TIMEOUT']) return jsonify(status = 'ok', page = page, pagesize = pagesize, websites = result)
def create_token(role): if role == "user": cachename = 'test-token-user' data = { 'username': '******', 'password': "******" } elif role == "agent": cachename = 'test-token-agent' data = { 'username': '******', 'password': "******" } elif role == "seller": cachename = 'test-token-seller' data = { 'username': '******', 'password': "******" } elif role == "ok": cachename = 'test-token-admin' data = { 'username': '******', 'password': "******" } token = cache.get(cachename) if token is not None: return token req = call_client(request) res = req.get('/login', query_string=data) resjson = json.loads(res.data) logging.warning('RESULT: %s', resjson) assert res.status_code==200 cache.set(cachename, resjson['token'], timeout=60) return resjson['token']
def request_access_token(self, use_cache=True): if use_cache: cache_token = cache.get(ease_mob_token_cache_key) if cache_token: return cache_token params = { "grant_type": "client_credentials", "client_id": self.client_id, "client_secret": self.client_secret } request = requests.post(self.generate_token_url(), headers=self.request_header, data=json.dumps(params)) access_token = request.json().get("access_token", None) # 请求access_token失败的log if not access_token: log.error( json.dumps({ "params": params, "result": request.json(), })) if access_token: cache.set(ease_mob_token_cache_key, access_token, 60 * 60 * 24) return access_token
def create_token_noninternal(): token = cache.get('test-token-noninternal') if token is None: ##prepare request input data = {'client_key': 'CLIENT01', 'client_secret': 'SECRET01'} # do request req = call_client(request) res = req.get('/token', query_string=data) ## store responese res_json = json.loads(res.data) logging.warning('RESULT : %s', res_json) # assert /compare with expected result assert res.status_code == 200 ## save token into cache cache.set('test-token-noninternal', res_json['token'], timeout=60) ## return, because it useful for other test return res_json['token'] else: return token
def create_token_noninternal(): token = cache.get('test-token-noninternal') if token is None: ##prepare request input data = {'client_id': 'testusername', 'client_secret': 'testpassword'} # do request req = call_client(request) res = req.post('/login', data=json.dumps(data), content_type='application/json') ## store responese res_json = json.loads(res.data) logging.warning('RESULT : %s', res_json) # assert /compare with expected result assert res.status_code == 200 ## save token into cache cache.set('test-token-noninternal', res_json['token'], timeout=60) ## return, because it useful for other test return res_json['token'] else: return token
def get_num_commits(): num = cache.get('num_commits') if num is None: num = sum((len(requests.get(GITHUB_API%i).json()) for i in range(1, MAX_PAGES))) cache.set('num_commits', num, timeout=5*60) print "caclulated" return num
def get_top_technology(category, order = 'DESC', limit = 5): """获取最高使用率的不同种类的技术类型""" result = cache.get('top_' + category + '_item_' + order) if result is None: rs = db.session.execute('select technology.title, count(website.id) from website \ left join website_tech on website_tech.website_id = website.id \ left join technology on technology.id = website_tech.tech_id \ and technology.title in (\ select distinct technology.title from technology \ where technology.category = \'' + category + '\' \ and technology.title != \'Unknown\') \ group by technology.title \ order by count(website.id) ' + order + ' limit 1,' + str(limit)) # 添加网站总数 result = [] for i in rs: if i[0] and i[0] != 'Unknown': result.append({ 'title' : i[0], 'total' : i[1] }) cache.set('top_' + category + '_item_' + order, result, timeout=app.config['SQL_CACHE_TIMEOUT']) return result
def fetch_cached_data(args=None): """ Retrieves a cache object when given an optional cache key. Because most cache keys within this app are URL dependent, the code which retrieves the cache has been refactored here to maximize consistency. :param cache_key: The identifier for the cache object. This must be unique :type cache_key: str :returns: A dictionary of JSON data :rtype: dict """ cache_key = request.base_url if args: cache_key += args cache_key = sha224(cache_key).hexdigest() rv = cache.get(cache_key) # logcat(str(rv)) if rv is not None: rv["meta"]["loaded_from_cache"] = True return rv
def createTokenUser(): token = cache.get('token-user') if token is None: # prepare request input data = {'email': '*****@*****.**', 'password': '******'} # do request req = call_client(request) res = req.post('/v1/auth', data=json.dumps(data), content_type='application/json') # store response res_json = json.loads(res.data) logging.warning('RESULT : %s', res_json) # assert / compare with expected result assert res.status_code == 200 # save token into cache cache.set('token-user', res_json['token'], timeout=60) # return because it useful for other test return res_json['token'] else: return token
def query_black_people(user_id, refresh=False): """ 查询被我拉黑或我拉黑的用户的列表 :param user_id: 查询用户id :param refresh: 是否刷新缓存 """ cache_key = cache_black_key + str(user_id) if not refresh: result = cache.get(cache_key) if result: return result black_model_list = UserBlackModel.query.filter_by(user_id=user_id, status=1).all() if not black_model_list: black_model_list = [] black_id_list = array_column(black_model_list, "black_user_id") be_black_model_list = UserBlackModel.query.filter_by(black_user_id=user_id, status=1).all() if not be_black_model_list: be_black_model_list = [] be_black_id_list = array_column(be_black_model_list, "user_id") id_list = list(set(black_id_list).union(set(be_black_id_list))) if id_list: cache.set(cache_key, id_list) return id_list
def show_tech_json(title, page=1, pagesize=app.config['PAGE_SIZE']): result = cache.get('tech_' + title + '_website_' + str(page) + '_' + str(pagesize)) if not result: websites = Website.query\ .filter(Website.technologies.any(Technology.title == title))\ .order_by(Website.frequency.desc())\ .paginate(page, pagesize, True) websites.items = filter_website(websites.items) result = [] for i in websites.items: techs = [] for t in i.technologies: if t.title != 'Unknown': techs.append({'title': t.title, 'detail': t.detail}) result.append({ 'hostname': i.hostname, 'port': i.port, 'technologies': techs }) cache.set('tech_' + title + '_website_' + str(page) + '_' + str(pagesize), result, timeout=app.config['SQL_CACHE_TIMEOUT']) return jsonify(status='ok', page=page, pagesize=pagesize, websites=result)
def get_top_technology(category, order='DESC', limit=5): """获取最高使用率的不同种类的技术类型""" result = cache.get('top_' + category + '_item_' + order) if result is None: rs = db.session.execute( 'select technology.title, count(website.id) from website \ left join website_tech on website_tech.website_id = website.id \ left join technology on technology.id = website_tech.tech_id \ and technology.title in (\ select distinct technology.title from technology \ where technology.category = \'' + category + '\' \ and technology.title != \'Unknown\') \ group by technology.title \ order by count(website.id) ' + order + ' limit 1,' + str(limit)) # 添加网站总数 result = [] for i in rs: if i[0] and i[0] != 'Unknown': result.append({'title': i[0], 'total': i[1]}) cache.set('top_' + category + '_item_' + order, result, timeout=app.config['SQL_CACHE_TIMEOUT']) return result
def cache_clear_forced(): from app import cache val = cache.get('clear_cache') if val and cache.cache and cache.cache.clear: cache.cache.clear() return True return False
def get_coverart(self, album): path = "app/static/media/" filename = "%s.jpg" % album.id musicbrainzngs.set_useragent("python-musicplayer-flask","0.1","*****@*****.**") covers = cache.get('covers') if album.id in covers: return covers.append(album.id) cache.set("covers", covers) if not album.musicbrainz_albumid or album.coverimage: return #raise NameError('musicbrainz_albumid not set') try: data = musicbrainzngs.get_image_list(album.musicbrainz_albumid) except Exception as e: return e if len(data['images']) == 0: raise NameError('No images returned from service') urllib.request.urlretrieve(data['images'][0]['image'], "%s%s" % (path, filename)) ci = open("%s%s" % (path, filename), 'rb') album.coverimage.put(ci, content_type = 'image/jpeg') return
def get(self): ''' 获取用户信息 ''' parser = reqparse.RequestParser() parser.add_argument('token', type=str) args = parser.parse_args() token = args['token'] if not token: return { 'status_code': 400, 'message': '' }, 400 user_message = cache.get(token) ### 从缓存中获取用户信息 if user_message is None: return { 'status_code': 400, 'message': '登陆已过期' }, 400 cache.set(token, user_message, timeout=10080) return { 'status_code': 200, 'messaga': '', 'data': { 'user_message': user_message } }
def create_token(role): if role == 'officer': cache_user = "******" elif role == 'surveyor': cache_user = "******" else: cache_user = "******" token = cache.get(cache_user) if token is None: # prepare request input if role == 'officer': data = {"nip": "P2001", "pin": "11223344"} elif role == 'surveyor': data = {"nip": "P2002", "pin": "11223344"} else: data = {"npwpd": "P1002", "pin": "11223344"} # do request req = call_user(request) res = req.post("/login/", json=data) # store response res_json = json.loads(res.data) logging.warning("RESULT: %s", res_json) # compare with expected result assert res.status_code == 200 assert res_json["message"] == "Token is successfully created" # save token into cache cache.set(cache_user, res_json["token"], timeout=30) # return return res_json["token"] return token
def getData(self, collection_id): from app import cache cache_key = 'collection_' + str(collection_id) collection_data = cache.get(cache_key) if collection_data: return collection_data cursor = mysql.connect().cursor() cursor.execute( """SELECT c.*, (select group_concat(ci.item_id order by ci.sort_order asc separator ',') from collections_items ci where ci.collection_id = c.collection_id) as item_ids, (select group_concat(concat(cm.meta_key,":",cm.meta_value) separator '&') from collections_metadata cm where cm.collection_id = c.collection_id) as metadata FROM collections c WHERE c.collection_id = %s""", (collection_id, )) data = Utils.fetchOneAssoc(cursor) if data['metadata']: collections_metadata_raw = data['metadata'] data['metadata'] = {} for props in collections_metadata_raw.split('&'): props_formatted = props.split(':') data['metadata'][props_formatted[0]] = props_formatted[1] if data['item_ids']: data['item_ids'] = [int(_) for _ in data['item_ids'].split(',')] data['items'] = Search().getById(data['item_ids']) else: data['items'] = [] if not data: data = {} cache.set(cache_key, data) return data
def create_token_internal(): token = cache.get('token-internal') if token is None: ## prepare request input data = { 'client_key': 'aul', 'client_secret': '123' } ## do request req = call_client(request) res = req.get('/login', query_string=data) # seperti nembak API luar (contoh weather.io) ## store response res_json = json.loads(res.data) logging.warning('RESULT : %s', res_json) ## assert / compare with expected result assert res.status_code == 200 ## save token into cache cache.set('token-internal', res_json['token'], timeout=60) ## return because it useful for other test return res_json['token'] else: return token
def edit_profile(): if request.remote_addr != "127.0.0.1": return "UNAUTHORIZED ACCESS ATTEMPT REJECTED" form = ProfileForm() if form.validate_on_submit(): if cache.get('ip_dict_valid'): cache.set('rerun_setup', True) cache.set('ip_dict_valid', True) file = request.files['picture'] file.save(os.path.join(basedir,"app/static/profile.jpg")) pickling = {} #Get form data here! pickling["name"] = form.name.data pickling["location"] = form.location.data pickling["organization"] = form.organization.data pickling["about"] = form.about.data pickling["project"] = form.project.data pickling["project_description"] = form.project_description.data pickle.dump(pickling, open('pickledUser.p', 'wb')) #form.picture.save(filename) return redirect(url_for('profile')) #Get cpickle stuff here return render_template('edit_profile.html', form=form)
def create_token_int(): token = cache.get('test-token-int') # supaya tokennya bisa diambil lagi if token is None: data = { 'client_key': 'internal1', 'client_secret': 'th1s1s1nt3rn4lcl13nt' } # do request req = call_client(request) res = req.get('/auth', query_string=data, content_type='application/json') # store response res_json = json.loads(res.data) logging.warning('RESULT : %s', res_json) # assert if the result is as expected assert res.status_code == 200 # save token into cache cache.set('test_token-int', res_json['token'], timeout=60) return res_json['token'] else: return token
def create_token(isinternal=True): if isinternal: cachename = 'test-internal-token' data = {'client_name': 'admin', 'client_password': '******'} else: cachename = 'test-noninternal-token' data = {'client_name': 'non-admin', 'client_password': '******'} token = cache.get(cachename) if token is None: # data={ # 'client_name':'admin', # 'client_password':'******' # } req = app.test_client(request) res = req.get('/login', query_string=data) res_json = json.loads(res.data) logging.warning('RESULT: %s', res_json) if res.status_code == 200: assert res.status_code == 200 cache.set(cachename, res_json['token'], timeout=60) return res_json['token'] else: pass else: return token
def contact(): from app import cache contact = request.form['text'] if not contact: return "Please enter address and phone after /contact" book_id = cache.get(request.form['user_id'] + '_book_id') if not book_id: return "Please initiate the rent process first using /rent command" group = re.search('\d+$', contact) if group: phone = group.group(0) if len(phone) != 10: return "Please enter a 10 digit phone number after address" address = contact.replace(phone, '') if not address: return "Please enter your address" else: return "Please enter a 10 digit phone number after address" user_data = { 'email': '', 'phone': phone, 'address': address, 'book_id': book_id, 'org': request.form['team_domain'] } placeOrder(request.form, user_data) return "Placing your order now :grinning:. Download me from <https://play.google.com/store/apps/details?id=in.hasslefree.ostrichbooks&hl=en|play store> to track your order status"
def decorated_function(*args, **kwargs): cache_key = key % request.path rv = cache.get(cache_key) if rv is not None: return rv rv = f(*args, **kwargs) cache.set(cache_key, rv, timeout=timeout) return rv
def get_home_page(commits): page = cache.get('homepage') if page is None: users = User.objects() page = render_template("home.html", users=users, commits=commits) cache.set('homepage', page, timeout=60) print 'rendered homepage' return page
def wrapper(*args, **kwargs): key = 'memo' + request.path + str(request.form.values()) key = key.translate(dict.fromkeys(range(33))) output = cache.get(key) if output is None: output = func(*args, **kwargs) cache.set(key, output, 3600) return output
def view(ip): cached = cache.get(ip+"page") if cached: return cached else: output = urllib2.urlopen("http://"+ip+":1337/profile").read().replace("^url_placeholder^", ip) cache.set(ip+"page", output) return output
def top_arts(update=False): key = "top" all_art = cache.get(key) if all_art is None or update: logging.error("DB QUERY") all_art = AsciiArt.query.order_by(AsciiArt.id.desc()).all() all_art = list(all_art) cache.set(key, all_art) return all_art
def top_users(update=False): key = "all_uses" all_users = cache.get(key) if all_users is None or update: logging.error("Users DB QUERY") all_users = Users.query.all() all_users = list(all_users) cache.set(key, all_users) return all_users
def test_message(message): ip_dict = cache.get('ip_dict') for key, value in ip_dict.iteritems(): print 'sending' req = urllib2.Request("http://"+key+":1337/catch_message") req.add_data(urllib.urlencode({'data':message['data']})) output = urllib2.urlopen(req) emit('my response', {'data': message['data']}, broadcast=True)
def send_script(uri): compiled = cache.get(uri) if compiled is None: try: s = Script.get(Script.uri == uri) compiled = s.compiled except Script.DoesNotExist: abort(404) cache.set(uri, compiled, timeout=CACHE_TIMEOUT) return Response(compiled, status=200, content_type='application/javascript')
def fortune(): options = { 'all': ('fortune_count', 'fortune_%d'), 'off': ('offensive_fortune_count', 'offensive_fortune_%d'), 'tame': ('tame_fortune_count', 'tame_fortune_%d') } intersection = set(request.values.keys()) & set(options.keys()) if intersection: selection = intersection.pop() else: selection = 'tame' count, key = options[selection] fortune_number = random.randrange(0, cache.get(count)) fortune = cache.get(key % fortune_number) return Response(fortune.strip(), mimetype="text/plain")
def request_user_slack(user_id): identifier = 'slack_id/%s' % user_id username = cache.get(identifier) if username is None: username = request_username_slack(user_id) if username is not None: cache.set(identifier, username) if username is not None: user = User.query.filter_by(username=username.lower()).first() return user return None
def wrapper(*args, **kwargs): lock_id = "celery-single-instance-" + func.__name__ check_lock = lambda: cache.get(lock_id) acquire_lock = lambda: cache.set(lock_id, True) release_lock = lambda: cache.delete(lock_id) if check_lock: acquire_lock() try: func(*args, **kwargs) finally: release_lock()
def introduce_reply(loc, name): ip_dict = cache.get('ip_dict') ip_dict[request.remote_addr] = {'name':name, 'location':loc} cache.set('ip_dict', ip_dict) try: pickled = pickle.load(open('pickledUser.p', 'rb')) except Exception: pickled = {'name': "error", 'location': "error"} infodict = {'name':pickled['name'], 'location':pickled['location']} return json.dumps(infodict)
def get(uid): cached = cache.get('web_user_{}'.format(uid)) if cached: return cached else: user = User.where('uid', uid).get()[0] if user: cache.set('web_user_{}'.format(uid), user) return user else: return False
def get(cid): cached = cache.get("web_chat_{}".format(cid)) if cached: return cached else: chat = Chat.where("cid", cid).get()[0] if chat: cache.set("web_chat_{}".format(cid), chat) return chat else: return False
def get_jsapi_ticket(): ticket = cache.get(TICKET_KEY) expired_at = cache.get(TICKET_EXPIRED_AT_KEY) if ticket: return ticket, expired_at token, expired_at = get_access_token() b = WechatBasic( appid=settings.APP_ID, appsecret=settings.SECRET, access_token=token, access_token_expires_at=expired_at) print 'get_ticket at:', datetime.datetime.now() d = b.get_jsapi_ticket() ticket = d['jsapi_ticket'] expired_at = d['jsapi_ticket_expires_at'] cache.set(TICKET_KEY, ticket, int(expired_at - time.time())*60) cache.set(TICKET_EXPIRED_AT_KEY, expired_at, int(expired_at - time.time())*60) return ticket, expired_at
def _setup_keybase(self, username): if cache.exists("keybase_" + username): miss = False # Load it without autofetching. k = keybaseapi.User(username, autofetch=False) # JSON load the data from the cache. data = json.loads(cache.get("keybase_" + username).decode()) # Load the raw keybase data in. k.raw_keybase_data = k._translate_into_configkey(data) # Map the data structure. k._map_data() else: miss = True # Load it with autofetching. k = keybaseapi.User(username) # JSON dump the key structure. data = json.dumps(k.raw_keybase_data.dump()) # Set it on the cache and set it to expire in a day. # Note: StrictRedis uses name,time,value. Normal redis uses name,value,time. cache.setex("keybase_" + username, 60 * 60 * 24, data) self.api_keybase = k.raw_keybase_data.dump() # Second cache pass, check if it was verified. if miss: try: k.verify_proofs() except keybaseapi.VerificationError: verified = False else: verified = True # Set it on cache. cache.setex("keybase_" + username + "_ver", 60 * 60 * 24 * 3, "1" if verified else "0") else: # Load it from cache. verified = bool(int(cache.get("keybase_" + username + "_ver"))) self.keybase = (k, verified)
def login(provider_name='nyuad'): """ Login handler, must accept both GET and POST to be able to use OpenID. """ if g.user is not None and g.user.is_authenticated(): return redirect(url_for('home')) # We need response object for the WerkzeugAdapter. response = make_response() # Log the user in, pass it the adapter and the provider name. result = authomatic.login(WerkzeugAdapter(request, response), provider_name) # If there is no LoginResult object, the login procedure is still pending. if result: if result.user: # We need to update the user to get more info. result.user.update() #Check if passport returns an error, if so, that means the user is not a student, therefore we redirect the user to landing if hasattr(result.user, "error"): flash("Sorry, it seems that you are not a student, so you can't use NYUAD Coursereview.", "error") return redirect(url_for('landing')) #Check the user group, if belongs to any restricted group redirect login for gr in result.user.groups: if gr in AUTHORIZED_GROUPS: authorized = True break else: authorized = False if not authorized: flash("Sorry, it seems that you are not a student, so you can't use NYUAD Coursereview.", "error") return redirect(url_for('landing')) #check if the user is in the database already user = User.query.filter_by(net_id = result.user.NetID).first() if user is None: user = User(net_id = result.user.NetID) db.session.add(user) db.session.commit() login_user(user) flash("You were logged in successfully.", "success") # The rest happens inside the template. return redirect(cache.get('next_url') or url_for('home')) # Don't forget to return the response. return response