def edit_project(): organization_email = get_jwt_identity() project_id = request.form.get('project_id') # check if already in database project = Project.query.filter_by(project_id=project_id).first() if project: if organization_email != project.organization_id: return jsonify("Project does not belong to this account") project.project_name = request.form.get('project_name') project.project_short_description = request.form.get('project_short_description') project.project_description = request.form.get('project_description') project.project_goal = request.form.get('project_goal') project.project_city = request.form.get('project_city') project.project_state = request.form.get('project_state') project.project_country = request.form.get('project_country') project.newspaper_id = request.form.get('newspaper_id') url = '' # print(request.files) # Setting up picture upload if 'project_picture_link' in request.files: # print("Does it come here") f = request.files['project_picture_link'] filename_split = secure_filename(f.filename).split('.') filename = filename_split[0] + str(project_id) + filename_split[1] s3.put_object(ACL='public-read', Bucket='newspark-charity-data', Key=filename, Body=f) project.project_picture_link = 'https://newspark-charity-data.s3.amazonaws.com/' + filename url = 'https://newspark-charity-data.s3.amazonaws.com/' + filename else: if 'project_picture_link' in request.form: project.project_picture_link = request.form.get('project_picture_link') url = request.form.get('project_picture_link') db.session.commit() # Remove the edited versions from the cache sql_query = '''select article_link from articles where project_id1={} or project_id2={} or project_id3={} or project_id4={} or project_id5={} or project_id6={};'''.format(project_id, project_id, project_id, project_id, project_id, project_id) conn = db.engine.connect().connection df = pd.read_sql(sql_query, conn) conn.close() unique_articles = list(df['article_link'].unique()) for i in unique_articles: if redis.exists(i): redis.delete(i) return jsonify("Success") else: return jsonify("Initiative Does Not Exist")
def get_app_data(socket, uuid, app_uuid, app_info, global_data): key = app_uuid + "_result" if redis.exists(key) == 0: s, result = execute( app_dir=app_info["app_dir"], data=app_info["data"], global_data=global_data ) if not s: return s, result global_data.update(result["data"]) output = result["output"] redis.set(key, output, ex=3) print("uuid : ", app_uuid) print("name : ", app_info["data"]["node_name"]) print("result : ", output) print("===================================") add_execute_logs( socket=socket, uuid=uuid, app_uuid=app_uuid, app_name=app_info["data"]["node_name"], result=output, ) return s, result else: return True, redis.get(key).decode()
def get_team_id(sport, team): redis_key = app.config["REDIS_KEY_TEAMS"].replace( app.config["REDIS_KEY_TOKEN_SPORT"], "nfl" if "fb" == sport else sport) # First, check if the redis key exists and if it doesn't, recreate # if not redis.exists(redis_key): teams_helper(sport) # If we can't find it now, then something is definitely wrong. teams = redis.get(redis_key) if not teams: return None teams = loads(teams) # Replace usual tokens which represent whitespace with an actual # space. team = sub(r"(\+|_|-)", ' ', team) # Search for the requested team ID rv = [ idx for idx, val in enumerate(teams["data"]) if val.lower().startswith(team) ] # If there is more than 1 result or if there are no results if len(rv) > 1 or not rv: return None # Add 1 to fix the off-by-one error. Internally, the team with the # ID #1 is actually 0. return 1 + int(rv.pop())
def init_job(key, first_page_func): if not redis.exists(key): # 不存在该键,说明是第一次订阅该博客 for item in first_page_func(): redis.hset(key, item['title'], item['url']) else: # 存在该键,检测是否有更新 check_update(key, first_page_func)
def get_content_list(url, key=None): if key and redis.exists(key): txt = redis.get(key).decode() else: txt = get_content_text(url) lis = txt.split('\n') li = [] for l in lis: if l != '' and l != '\t': li.append(l) return li
def is_user_exists(openid): """ 判断用户是否已关注公众号, 是否存在数据库中 """ redis_prefix = "wechatpy:user:" cache = redis.exists(redis_prefix + openid) if not cache: user_info = WechatUser.query.filter_by(openid=openid).first() if not user_info: return False else: return True else: return True
def addIPAddress(self, ip_address): """ Add an IP address to the IP address list """ self._data['ip_addresses'].add(ip_address) keyExists = True if redis.exists(f'session::{self.id}::ip_addresses') == 0: keyExists = False redis.sadd(f'session::{self.id}::ip_addresses', ip_address) if not keyExists: redis.expire(f'session::{self.id}::ip_addresses', self.timeToEnd().seconds + (500))
def test_getCharities1(self): """ log into existing profile """ article = 'https://www.idsnews.com/article/2020/06/wildlife-to-look-for-in-monroe-county' curr_article = Article.query.filter_by(article_link=article).first() if curr_article is not None: db.session.delete(curr_article) db.session.commit() response1 = test_app.post('/get_charities', data=dict( article_link=article, article_title='', article_date='', articel_text='', )) result1 = response1.get_json() self.assertEqual(response1.status_code, 200) self.assertEqual(len(result1), num_choices) self.assertTrue(redis.exists(article)) response2 = test_app.post('/get_charities', data=dict( article_link=article, article_title='', article_date='', article_text='', )) result2 = response2.get_json() self.assertEqual(response1.status_code, 200) self.assertEqual(len(result1), num_choices) self.assertEqual(result1, result2) self.assertTrue(redis.exists(article)) # Delete the article and then make sure the results are the same again curr_article = Article.query.filter_by(article_link=article).first() redis.delete(curr_article.article_link) db.session.delete(curr_article) db.session.commit()
def test_getCharities2(self): article = 'https://www.idsnews.com/article/2020/06/former-iu-mens-soccer-all-american-ken-snow-dies-at-50' curr_article = Article.query.filter_by(article_link=article).first() if curr_article is not None: db.session.delete(curr_article) db.session.commit() response1 = test_app.post('/get_charities', data=dict( article_link=article, article_title='blah blah', article_date='Jun 29 2020', article_text='here is some text', )) result1 = response1.get_json() self.assertEqual(response1.status_code, 200) self.assertEqual(len(result1), num_choices) self.assertTrue(redis.exists(article)) response2 = test_app.post('/get_charities', data=dict( article_link=article, article_title='blah bah', article_date='Jun 23 2020', article_text='here is some text', )) result2 = response2.get_json() self.assertEqual(response1.status_code, 200) self.assertEqual(len(result1), num_choices) self.assertEqual(result1, result2) self.assertTrue(redis.exists(article)) # Delete the article and then make sure the results are the same again curr_article = Article.query.filter_by(article_link=article).first() redis.delete(curr_article.article_link) db.session.delete(curr_article) db.session.commit()
def test_default_behaviour(self): response = self.app.get(self.home_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, 'Hello, visitor!') response = self.app.get(self.home_url) self.assertEqual(response.status_code, 200) self.assertEqual( response.data, 'Hello, visitor!\nThis page viewed 2 time(s).' ) self.assertTrue(redis.exists(app.config['COUNTER_KEY'])) response = self.app.get(self.forget_us_url) self.assertEqual(response.status_code, 302) self.assertTrue(response.headers['Location'].endswith(self.home_url)) self.assertFalse(redis.exists(app.config['COUNTER_KEY'])) response = self.app.get(self.home_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.data, 'Hello, visitor!')
def addPost(self, post): """ Add a post to the post list """ if self.isEnded(): raise Exception('This session has ended.') self._data['posts'].append(post) keyExists = True if redis.exists(f'session::{self.id}::posts') == 0: keyExists = False redis.lpush(f'session::{self.id}::posts', post) if not keyExists: redis.expire(f'session::{self.id}::posts', self.timeToEnd().seconds + (500))
def server_enable(service, resource): pool_entity = service[0:2].upper() if not has_ability(flask.g, "edit_config", pool_entity): abort(make_response(jsonify(message="insufficient credentials for edit_config " "(entity %s)" % pool_entity), 403)) service_module = get_service(service) if resource not in service_module.list_resources(): abort(make_response(jsonify(message="unknown resource '%s' in '%s'" % (resource, service)), 400)) keyr = "busy:%s:%s" % (service, resource) if redis.exists(keyr): redis.delete("busy:%s:%s" % (service, resource)) return flask.jsonify("ok") else: abort(flask.make_response(flask.jsonify(message="resource was not disabled"), 400))
def find_next_apps(edges, next_app=None): num = 0 for r in edges: if next_app: key = next_app + "_sum" if redis.exists(key) == 1: sum = redis.get(key) if str(r["source"]) == next_app: if num != int(sum): num = num + 1 else: return r["label"], r["source"], r["target"] else: if str(r["source"]) == next_app: return r["label"], r["source"], r["target"]
def test_add_project_to_article(self): owner = Owner.query.all()[1] headers = auth_token_helper(owner.username) publisher_name = Owning.query.filter_by(owner_id=owner.username).first().publisher_id article = Article.query.filter_by(publisher_id=publisher_name).first() article_link = article.article_link old_ids = [article.project_id1, article.project_id2, article.project_id3, article.project_id4, article.project_id5, article.project_id6] project_id = article.project_id1 # assumes project id is not None article.project_id1 = None db.session.commit() response = test_app.post( '/add_project_to_article', headers=headers, data=dict( article_link=article_link, project_id=project_id ) ) updated_article = Article.query.filter_by(article_link=article_link).first() ids = [updated_article.project_id1, updated_article.project_id2, updated_article.project_id3, updated_article.project_id4, updated_article.project_id5, updated_article.project_id6] result = response.get_json() self.assertEqual(response.status_code, 200) self.assertEqual(result, 'Success') self.assertTrue(project_id in ids) self.assertTrue(redis.exists(article_link)) # Revert back to original state updated_article.project_id1 = old_ids[0] updated_article.project_id2 = old_ids[1] updated_article.project_id3 = old_ids[2] updated_article.project_id4 = old_ids[3] updated_article.project_id5 = old_ids[4] updated_article.project_id6 = old_ids[5] redis.delete(article_link) db.session.commit()
def before(): g.start_time = time.time() if request.content_type == 'application/json;charset=UTF-8': g.payload = request.json else: payload = {} for k, v in request.values.to_dict().items(): payload[k] = v g.payload = json.dumps(payload, ensure_ascii=False) if "api/v1" in request.path and request.path not in no_path: if request.method == "POST": token = request.headers.get("token") from app import redis if redis.exists(token) == 0: return Response.code(ErrToken)
def change_widget_status(): """ Change the status of the widget for an article :param: JWT token for a owner, article link :return: Success if widget status changed, Article Not Found if article not found in database, or Publisher Not Found if owner not found in database """ username = get_jwt_identity() exists = Owner.query.filter_by(username=username).first() if exists: article_link = request.form.get('article_link') article = Article.query.filter_by(article_link=article_link).first() if article: # Check if the value is in the cache if not article.widget_status and redis.exists(article.article_link): redis.delete(article.article_link) # Checks admin privileges for account if username == 'admin': article.edited_by_newspark = True else: article.edited_by_publisher = True article.widget_status = not article.widget_status db.session.commit() return jsonify("Success") else: return jsonify("Article Not Found") else: return jsonify("Account Not Found")
def run_game(game_id): tv_mode = request.args.get('mode') == "tv" key = "{}|*".format(game_id) keys = redis.keys(key) if len(keys) >= 4: flash("This game is full, start a new one", "errors") return redirect(url_for('main.index')) form = GameForm() if form.validate_on_submit(): session['nickname'] = form.data["nickname"] session['handle'] = form.data["handle"] if not redis.exists(form.data["game_id"]): impact_map = ImpactMap() impact_map.initialize() redis.set(form.data["game_id"], json.dumps(impact_map.impact_map)) return redirect("/{}".format(game_id)) if tv_mode: return render_template( 'tv.html', game_id=game_id ) if 'nickname' in session and len(keys) > 0: try: with open("initial-code.txt") as f: initial_code = f.read() except IOError: initial_code = "function main() {" + os.linesep + "}" return render_template( 'game.html', game_id=game_id, nickname=session["nickname"], handle=session["handle"], initial_code=initial_code ) else: return render_template('setup.html', game_id=game_id, form=form)
def addEmail(self, email): """ Add email to email list, emails on this list will be sent a message with the final results """ # Don't allow modifications to data after the session has ended if self.isEnded(): raise Exception('This session has ended.') # Force emails to lowercase email = email.lower() # Add email address to this session's email set self._data['emails'].add(email) # Save to redis keyExists = True if redis.exists(f'session::{self.id}::emails') == 0: keyExists = False redis.sadd(f'session::{self.id}::emails', email) ## set ttl on the key if it was just created if not keyExists: redis.expire(f'session::{self.id}::emails', self.timeToEnd().seconds + (500))
def test_editProject(self): user = Organization.query.first() headers = auth_token_helper(user.email) project = Project.query.filter_by(organization_id=user.email).first() project_data = project.serialize() # Check if the cache was updated properly sql_query = '''select article_link from articles where project_id1={} or project_id2={} or project_id3={} or project_id4={} or project_id5={} or project_id6={};'''.format(project.project_id, project.project_id, project.project_id, project.project_id, project.project_id, project.project_id) conn = db.engine.connect().connection df = pd.read_sql(sql_query, conn) conn.close() unique_articles = list(df['article_link'].unique()) data = dict( project_id=project.project_id, project_name="Diff Donate", project_description="We need money", project_goal=10000, project_city="Briarcliff Manor", project_state="New York", project_country="United States", newspaper_id="The Emory Wheel", project_short_description="YADDA YADDA YADDA" ) my_file = FileStorage( stream=open('karthik_headshot.jpg', "rb"), filename="karthik.jpg", content_type="jpeg", ) data['project_picture_link'] = my_file response = test_app.post( '/edit_project', headers=headers, data=data, content_type='multipart/form-data' ) result = response.get_json() self.assertEqual(response.status_code, 200) self.assertEqual(result, "Success") # Make sure the correct project was edited new_project = Project.query.filter_by(project_name="Diff Donate").first() self.assertEqual(new_project.project_name, "Diff Donate") self.assertEqual(new_project.project_goal, 10000) self.assertEqual(new_project.newspaper_id, "The Emory Wheel") self.assertEqual(new_project.project_city, "Briarcliff Manor") # Check the redis cache for i in unique_articles: self.assertFalse(redis.exists(i)) # Change Project Back project.project_name = project_data['project_name'] project.project_description = project_data['project_description'] project.project_goal = project_data['project_goal'] project.project_city = project_data['project_city'] project.project_state = project_data['project_state'] project.project_country = project_data['project_country'] project.newspaper_id = project_data['newspaper_id'] db.session.commit()
def get_charities(): """ Gets the recommended projects for an article. :param: request.form params: article link :return: Info for each recommended project. """ return jsonify([]) charity_amount = application.config['CHARITY_AMOUNT'] num_choices = application.config['NUM_CHOICES'] article_url = request.form.get('article_link').split('?')[0] article_url = article_url.split('#')[0] if redis.exists(article_url): # Get from the redis cache charity_match = pickle.loads(redis.get(article_url)) return jsonify(charity_match) else: print('checking article') # Check the article article = Article.query.filter_by(article_link=article_url).first() # article exists if article: print('article exists') widget_status = article.widget_status print(widget_status) if widget_status: # get number of ids that are not null and assume nulls follow consecutively project_ids = article.get_project_ids() if None in project_ids: num_ids = project_ids.index(None) else: num_ids = charity_amount project_info_list = get_projects_from_article( article_url=article_url, num_ids=num_ids) # Save to redis cache redis.set(article_url, pickle.dumps(project_info_list)) return jsonify(project_info_list) else: return jsonify([]) else: widget_status = True article_title = request.form.get('article_title') # testing client on local host if article_title == '': article_info = indiana_scraper(article_url) article_title = article_info['title'] article_date_time = article_info['date'] else: article_date_time = datetime.strptime( request.form.get('article_date'), "%b %d %Y") article_text = request.form.get('article_text').replace( u'\xa0', u'') # store article data in a json file and upload to a s3 bucket save_article_data(s3_client=s3, article_link=article_url, article_title=article_title, article_date_time=article_date_time, article_text=article_text) # turn off widget for too old articles date_cut_off = datetime.strptime('Jun 28 2020', '%b %d %Y') if article_date_time < date_cut_off: widget_status = False # select projects that are not removed and are from verified charities projects_df = get_available_projects() project_ids = [None for i in range(charity_amount)] matching_project_ids = full_json_with_matching( projects_df, article_title, num_choices) # relevant project ids for i in range(len(matching_project_ids)): project_ids[i] = matching_project_ids[i] # all_project_ids = list(projects_df['project_id']) # project_ids = [None for i in range(charity_amount)] # random_project_ids = random.sample(all_project_ids, num_choices) # for i in range(len(random_project_ids)): # project_ids[i] = random_project_ids[i] # Add to database article = Article( article_link=article_url, article_title=article_title, publisher_id="Indiana Daily Student", widget_status=widget_status, date_published=article_date_time, fund_name=None, project_id1=project_ids[0], project_id2=project_ids[1], project_id3=project_ids[2], project_id4=project_ids[3], project_id5=project_ids[4], project_id6=project_ids[5], edited_by_publisher=False, edited_by_newspark=False, ) db.session.add(article) db.session.commit() if widget_status: project_info_list = get_projects_from_article( article_url=article_url, num_ids=num_choices) # Save to redis cache redis.set(article_url, pickle.dumps(project_info_list)) return jsonify(project_info_list) else: return jsonify([])
def get_redis_string(key): if redis.exists(key): txt = redis.get(key).decode() else: return None return txt
def chessboard(game_id): current_app.logger.error("chessboard, game_ida: " + str(game_id)) pc_id = request.args.get('pc_id') if not pc_id: return "No pc" # Existing game if redis.exists(game_id): current_app.logger.error(" Existing game") chess_game = load_game(game_id) current_app.logger.error(str(chess_game)) # if new player if not get_player_color(chess_game, pc_id): current_app.logger.error(" New player") # if empty seat if chess_game.headers["Black"] == "?": chess_game.headers["Black"] = str(pc_id) # if game full else: return "Game full" color = get_player_color(chess_game, pc_id) current_app.logger.error( str(" Player {} is color {}").format(str(pc_id), str(color))) # New game else: chess_game = chess.pgn.Game() current_app.logger.error(" New game") chess_game.headers["White"] = str(pc_id) chess_game.headers["Event"] = str(game_id) color = "White" current_app.logger.error( str(" Player {} is color {}").format(str(pc_id), str(color))) redis.set(game_id, str(chess_game)) board = chess_game.board() for move in chess_game.mainline_moves(): board.push(move) current_app.logger.error(str(board.fen())) turn = "White" if board.turn else "Black" if board.is_checkmate() or board.is_stalemate(): if board.result() == "1-0": winner = chess_game.headers["White"] if board.result() == "0-1": winner = chess_game.headers["Black"] else: winner = "" return render_template('chessboard.html', color=color, white=chess_game.headers["White"], black=chess_game.headers["Black"], game_id=game_id, pgn=str(chess_game), fen=str(board.fen()), turn=turn, game_over=True, winner=winner) else: return render_template('chessboard.html', color=color, white=chess_game.headers["White"], black=chess_game.headers["Black"], game_id=game_id, pgn=str(chess_game), fen=str(board.fen()), turn=turn, game_over=False, winner=None)
def get_fund(): article_url = request.form.get('article_link') if redis.exists(article_url): # Get from the redis cache fund_match = pickle.loads(redis.get(article_url)) return jsonify(fund=fund_match) else: print('checking article') # Check the article article = Article.query.filter_by(article_link=article_url).first() # article exists if article and article.fund_name is not None: print('article exists') widget_status = article.widget_status print(widget_status) if widget_status: fund = Fund.query.filter_by( fund_name=article.fund_name).first() # Save to redis cache redis.set(article_url, pickle.dumps(fund.serialize())) return jsonify(fund=fund.serialize()) else: article_title = request.form.get('article_title') article_text = request.form.get('article_text') if 'emorywheel' in article_url: publisher_id = 'The Emory Wheel' else: publisher_id = 'Indiana Daily Student' if article_title is None: if publisher_id == 'The Emory Wheel': article_info = emory_scraper(article_url) else: article_info = indiana_scraper(article_url) article_title = article_info['title'] article_text = article_info['content'] article_text = article_text[:len(article_text) // 2] else: article_title = article_title.lower() article_text = article_text.lower()[:len(article_text) // 2] # store article data in a json file and upload to a s3 bucket # save_article_data(s3_client=s3, article_link=article_url, article_title=article_title, # article_date_time=datetime.now(), article_text=article_text) fund = get_best_fund(article_text) # fund name is null if article: article.fund_name = fund.fund_name else: # Add to database article = Article( article_link=article_url, article_title=article_title, publisher_id=publisher_id, widget_status=True, date_published=datetime.now(), fund_name=fund.fund_name, project_id1=None, project_id2=None, project_id3=None, project_id4=None, project_id5=None, project_id6=None, edited_by_publisher=False, edited_by_newspark=False, ) db.session.add(article) db.session.commit() redis.set(article_url, pickle.dumps(fund.serialize())) return jsonify(fund=fund.serialize())
def remove_project_from_article(): """ Removes project from this article :param: JWT token for a owner, article link, project id :return: Success if project removed from this article """ username = get_jwt_identity() exists = Owner.query.filter_by(username=username).first() if exists: article_link = request.form.get('article_link') project_id = int(request.form.get('project_id')) article = Article.query.filter_by(article_link=article_link).first() if article: ids = article.get_project_ids() for i in range(len(ids)): if ids[i] == project_id: ids[i] = None ids = sorted(ids, key=lambda x: x is None) # move None's to back article.project_id1 = ids[0] article.project_id2 = ids[1] article.project_id3 = ids[2] article.project_id4 = ids[3] article.project_id5 = ids[4] article.project_id6 = ids[5] if username == 'admin': article.edited_by_newspark = True else: article.edited_by_publisher = True db.session.commit() sql_query = '''select project_id from projects, organizations where projects.organization_id=organizations.email and projects.removed=FALSE and organizations.verified=TRUE;''' conn = db.engine.connect().connection df = pd.read_sql(sql_query, conn) other_ids = list(filter(lambda x: not (x in ids), list(df['project_id']))) # store recommendations in a json file and upload to a s3 bucket update_recommendations(s3_client=s3, article_link=article_link, ids=ids, other_ids=other_ids) # Re-run the commands to get the right data for the articles in the cache # TODO: set up a celery worker to do all of this if redis.exists(article_link): redis.delete(article_link) project_info_list = get_projects_from_article(article_url=article_link, num_ids=application.config['NUM_CHOICES']) # Save to redis cache redis.set(article_link, pickle.dumps(project_info_list)) return jsonify("Success") else: return jsonify("Article does not exist.") else: return jsonify("Publisher does not exist.")
def run_exec(socket, uuid): workflow_info = ( db.table("zbn_workflow") .select("uuid", "name", "start_app", "end_app", "flow_json", "flow_data") .where("uuid", uuid) .first() ) if workflow_info: start_app = workflow_info["start_app"] end_app = workflow_info["end_app"] flow_json = json.loads(workflow_info["flow_json"]) flow_data = json.loads(workflow_info["flow_data"]) # for r in flow_json["edges"]: # print(r["label"], r["source"], r["target"]) global_data = {} target_app = find_start_app(edges=flow_json["edges"], start_app=start_app) add_execute_logs( socket=socket, uuid=uuid, app_uuid=start_app, app_name="开始", result="剧本开始执行" ) is_while = True while is_while: try: # 拿到当前APP数据 if_else, source_app, next_app = find_next_app( edges=flow_json["edges"], next_app=target_app ) except Exception: add_execute_logs( socket=socket, uuid=uuid, app_uuid="", app_name="", result="当前剧本不具有可执行条件", ) is_while = False break key = target_app + "_sum" if redis.exists(key) == 1: sum = redis.get(key) redis.set(key, int(sum) + 1, ex=3) else: redis.set(key, 1, ex=3) # 当前APP执行数据 source_info = flow_data[source_app] # print(source_app) s, ifelse_result = get_app_data( socket=socket, uuid=uuid, app_uuid=source_app, app_info=source_info, global_data=global_data, ) if not s: add_execute_logs( socket=socket, uuid=uuid, app_uuid=end_app, app_name=flow_data.get(source_app).get("name"), result="执行错误:{}".format(ifelse_result), ) add_execute_logs( socket=socket, uuid=uuid, app_uuid=end_app, app_name="结束", result="剧本执行结束", ) is_while = False if if_else != "": if if_else == ifelse_result: target_app = next_app else: target_app = next_app if next_app == end_app: add_execute_logs( socket=socket, uuid=uuid, app_uuid=end_app, app_name="结束", result="剧本执行结束", ) is_while = False
def teams_helper(sport=None): """ Generic helper function to scrape scoring data from STATS's JavaScript file. """ flat_list = query_string_arg_to_bool(PARAM_FLAT_LIST) rv = fetch_cached_data(args=PARAM_FLAT_LIST if flat_list else None) if rv is not None: return rv # STATs does not order NFL teams nfl_teams = [ "Atlanta Falcons", "Buffalo Bills", "Chicago Bears", "Cincinnati Bengals", "Cleveland Browns", "Dallas Cowboys", "Denver Broncos", "Detroit Lions", "Green Bay Packers", "Tennessee Titans", "Indianapolis Colts", "Kansas City Chiefs", "Oakland Raiders", "St. Louis Rams", "Miami Dolphins", "Minnesota Vikings", "New England Patriots", "New Orleans Saints", "New York Giants", "New York Jets", "Philadelphia Eagles", "Arizona Cardinals", "Pittsburgh Steelers", "San Diego Chargers", "San Francisco 49ers", "Seattle Seahawks", "Tampa Bay Buccaneers", "Washington Redskins", "Carolina Panthers", "Jacksonville Jaguars", '', '', "Baltimore Ravens", "Houston Texans" ] soup = help_fetch_soup(url=TEAMS_URL.replace(URL_TOKEN, sport)) stack = [] redis_stack = [] league_stack = [] division_stack = [] league = None division = None # Iterate over each conference for table in soup("table"): for row in table("tr"): if row.get("class") is None: continue cells = row("td") # Conference Row if "shsTableTtlRow" in row.get("class"): if flat_list: continue if division_stack and division: league_stack.append({division: division_stack}) division_stack = [] if league_stack and league: stack.append({league: league_stack}) league_stack = [] league = format_division(row) # Division Row elif "shsTableSubttlRow" in row.get("class"): if flat_list: continue if division_stack and division: league_stack.append({division: division_stack}) division_stack = [] division = format_division(row) # Team Row else: team = cells[0].extract().text.strip().encode("utf-8") # Save the team as a flat list for persistent storage redis_stack.append(team) if flat_list: stack.append(team) else: division_stack.append(team) else: if division_stack and division: league_stack.append({division: division_stack}) division_stack = [] if league_stack and league: stack.append({league: league_stack}) league_stack = [] out = prepare_json_output(stack) del soup, division_stack, league_stack, stack redis_key = app.config["REDIS_KEY_TEAMS"].replace( app.config["REDIS_KEY_TOKEN_SPORT"], "nfl" if "fb" == sport else sport) if not redis.exists(redis_key): if "fb" == sport: redis_stack = nfl_teams # Convert the object to a JSON string redis.set(name=redis_key, value=dumps(prepare_json_output(redis_stack))) del redis_key, redis_stack cache_data( data=out, args=PARAM_FLAT_LIST if flat_list else None, timeout=60 * 60 * 24 * 300 # Cache for 300 days ) return out