def load_user(user_id): session = Session() user = session.query(User).filter_by(id=user_id).first() if user is None: session.close() return None session.expunge(user) session.close() return user
def admin(site): session = Session() us_graph = session.query(GraphCache).filter_by( country='us', data_type='country').first() us_graphjs = us_graph.last_updated summary_graph = session.query(GraphCache).filter_by( country='us', data_type='summary').first() summary_graphjs = summary_graph.last_updated page, per_page, offset = get_page_args(page_parameter='page', per_page_parameter='per_page') actsJS = [] acts = session.query(ActivityTrack).filter_by(site=site).order_by( desc(ActivityTrack.created), ActivityTrack.id).limit(per_page).offset(offset) for act in acts: actsJS.append(act.publicJSON(site)) pagination = Pagination( page=page, per_page=per_page, total=session.query(ActivityTrack).filter_by(site=site).count(), css_framework='bootstrap4') session.close() headers = {'Content-Type': 'text/html'} return make_response( render_template('admin/admin_panel_home.html', usgraph=us_graphjs, summary=summary_graphjs, activities=actsJS, pagination=pagination, site=site), 200, headers)
def redirect_homoe(): session = Session() sites = session.query(Site).filter_by(public=True).count() session.close() page = 'Pages_home' if sites > 1: page = 'select_screen' if ENV_NAME() == 'prod': return redirect( url_for(page, _scheme='https', _external=True, site='covid-19')) else: return redirect(url_for(page, site='covid-19'))
def admin_select(): session = Session() sites = session.query(Site).order_by(Site.order, Site.id).all() sitesJS = [] for site in sites: sitesJS.append(site.publicJSON()) session.close() headers = {'Content-Type': 'text/html'} return make_response( render_template('admin/admin_panel_disaster_type.html', sites=sitesJS), 200, headers)
def get(self): session = Session() sitesJS = [] sites = session.query(Site).filter_by(public=True).all() for site in sites: js = site.publicJSON() js['has_literature'] = site.hasLiterature(session) js['has_news'] = site.hasNews(session) sitesJS.append(js) session.close() return jsonify({'sites': sitesJS})
def select_screen(): session = Session() sites = session.query(Site).filter_by(public=True).order_by( Site.order, Site.title).all() private_sites = session.query(Site).filter_by(public=False).order_by( Site.order, Site.title).all() if len(sites) == 1: if ENV_NAME() == 'prod': return redirect( url_for('Pages_home', _scheme='https', _external=True, site='covid-19')) else: return redirect(url_for('Pages_home', site='covid-19')) sitesJS = [] for site in sites: sitesJS.append(site.publicJSON()) info = get_site_info(['how_to_disaster_consult'], 'covid-19', session) session.close() headers = {'Content-Type': 'text/html'} return make_response( render_template('pages/select-disaster.html', sites=sites, private_sites=private_sites, info=info, site='covid-19'), 200, headers) '''
def cache_graph(): print('Fetching Fresh US Graph Data') final_js_str = '' try: url = 'https://api.covid19api.com/country/us/status/confirmed' resp = requests.get(url) js = resp.json() day_dict = {} for case in js: if case['Cases'] > 0 and case['Status'] == 'confirmed': date = case['Date'].split('T')[0] fixed_date = '%sT00:00:00Z' % date if fixed_date not in day_dict: day_dict[fixed_date] = 0 day_dict[fixed_date] += case['Cases'] day_js = [] for key in day_dict.keys(): small_js = { 'Date': key, 'Cases': day_dict[key], 'Status': 'confirmed' } day_js.append(small_js) final_js_str = json.dumps(day_js) except: print('Failed Fetching US Graph Data') session = Session() us_graph = session.query(GraphCache).filter_by( country='us', data_type='country').first() if us_graph == None: us_graph = GraphCache(country='us', data_type='country') session.add(us_graph) if final_js_str != '' and final_js_str != None: us_graph.json = final_js_str session.commit() session.close()
def cache_summary(): print('Fetching Fresh Graph Summary Data') final_js_str = '' try: final_dict = {} url = 'https://api.covid19api.com/summary' resp = requests.get(url) js = resp.json() final_dict['Global'] = js['Global'] country_dict = [] for case in js["Countries"]: if case['Slug'] == 'united-states' and case['TotalDeaths'] == 0: return country_dict.append({ "Country": case['Country'], "Slug": case['Slug'], "TotalConfirmed": case['TotalConfirmed'], "TotalDeaths": case['TotalDeaths'], "TotalRecovered": case['TotalRecovered'] }) final_dict['Countries'] = country_dict final_js_str = json.dumps(final_dict) except: print('Failed Fetching Graph Summary Data') return session = Session() us_graph = session.query(GraphCache).filter_by( country='us', data_type='summary').first() if us_graph == None: us_graph = GraphCache(country='us', data_type='summary') session.add(us_graph) if final_js_str != '' and final_js_str != None: us_graph.json = final_js_str session.commit() session.close()
def search_reindex(): print('STARTING Elastic Search Cache Update') try: session = Session() Category.reindex(session) session.close() except: print('FAILED Category SEARCH REINDEX') try: session = Session() Section.reindex(session) session.close() except: print('FAILED Section SEARCH REINDEX') try: session = Session() Post.reindex(session) session.close() except: print('FAILED POST SEARCH REINDEX') print('SUCCESSFUL Elastic Search Cache Update')
return session = Session() us_graph = session.query(GraphCache).filter_by( country='us', data_type='summary').first() if us_graph == None: us_graph = GraphCache(country='us', data_type='summary') session.add(us_graph) if final_js_str != '' and final_js_str != None: us_graph.json = final_js_str session.commit() session.close() if scheduler_enabled: session = Session() #us_graph = session.query(GraphCache).filter_by(country='us', data_type='country').first() #if us_graph is None: # cache_graph() summary_graph = session.query(GraphCache).filter_by( country='us', data_type='summary').first() if summary_graph is None: cache_summary() session.close() sched = BackgroundScheduler(daemon=True) #sched.add_job(cache_graph,'interval',minutes=60) sched.add_job(search_reindex, 'interval', minutes=60 * 4) sched.add_job(cache_summary, 'interval', minutes=10) sched.start()