def drop_all(): """Drop appropriate tables.""" User.__table__.drop(db.get_engine()) Role.__table__.drop(db.get_engine()) Mole.__table__.drop(db.get_engine()) db.create_all() db.session.commit()
def teardown(): app = create_app('test') with app.app_context(): db.session.remove() db.drop_all() db.engine.execute("drop table alembic_version") db.get_engine(app).dispose()
def notify_db(notify_api, worker_id): assert 'test_notification_api' in db.engine.url.database, 'dont run tests against main db' # create a database for this worker thread - from flask import current_app current_app.config['SQLALCHEMY_DATABASE_URI'] += '_{}'.format(worker_id) current_app.config['SQLALCHEMY_DATABASE_READER_URI'] += '_{}'.format(worker_id) uri_db_writer = current_app.config['SQLALCHEMY_DATABASE_URI'] uri_db_reader = current_app.config['SQLALCHEMY_DATABASE_READER_URI'] current_app.config['SQLALCHEMY_BINDS'] = { 'reader': uri_db_reader, 'writer': uri_db_writer } create_test_db(uri_db_writer) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ALEMBIC_CONFIG = os.path.join(BASE_DIR, 'migrations') config = Config(ALEMBIC_CONFIG + '/alembic.ini') config.set_main_option("script_location", ALEMBIC_CONFIG) with notify_api.app_context(): upgrade(config, 'head') grant_test_db(uri_db_writer, uri_db_reader) yield db db.session.remove() db.get_engine(notify_api).dispose()
def teardown_database(self): with self.app.app_context(): db.session.remove() for table in reversed(db.metadata.sorted_tables): if table.name not in ["lots", "frameworks", "framework_lots"]: db.engine.execute(table.delete()) Framework.query.filter(Framework.id >= 100).delete() db.session.commit() db.get_engine(self.app).dispose()
def get(self, search_term): """ 查询按资产类别分类的资产列表, 返回匹配相关字符的全部分组结果(供搜索使用只返回前50条记录) """ ret_dic = {"results": []} logger.debug('搜索条件:%s', search_term) param_str = "%%" + search_term + '%%' # 查找投资组合 sql_str = """SELECT 'portfolio' AS asset_type, pl_id asset_code, name asset_name, concat(pl_id, ' : ', pl_info.name) text FROM pl_info WHERE pl_info.name LIKE %s LIMIT 50""" data_df = pd.read_sql(sql_str, db.engine, params=[param_str]) logger.debug('投资组合数据 %d 条数据', data_df.shape[0]) if data_df.shape[0] > 0: data_dic = data_df.to_dict('record') ret_dic['results'].append({"text": "投资组合", "children": data_dic}) # 查找指数 sql_str = """SELECT 'index' AS asset_type, wind_code asset_code, sec_name asset_name, concat(wind_code, ' : ',sec_name) text FROM wind_index_info WHERE sec_name LIKE %s OR wind_code LIKE %s LIMIT 50""" data_df = pd.read_sql(sql_str, db.get_engine(db.get_app(), bind=config.BIND_DB_NAME_MD), params=[param_str, param_str]) logger.debug('指数数据 %d 条数据', data_df.shape[0]) if data_df.shape[0] > 0: data_dic = data_df.to_dict('record') ret_dic['results'].append({"text": "指数", "children": data_dic}) # 查找股票 sql_str = """SELECT 'stock' AS asset_type, wind_code asset_code, sec_name asset_name, concat(wind_code, ' : ',sec_name) text FROM wind_stock_info WHERE sec_name LIKE %s OR prename LIKE %s OR wind_code LIKE %s LIMIT 50""" data_df = pd.read_sql(sql_str, db.get_engine(db.get_app(), bind=config.BIND_DB_NAME_MD), params=[param_str, param_str, param_str]) logger.debug('股票数据 %d 条数据', data_df.shape[0]) if data_df.shape[0] > 0: data_dic = data_df.to_dict('record') ret_dic['results'].append({"text": "股票", "children": data_dic}) # 查找期货 sql_str = """SELECT 'future' AS asset_type, wind_code asset_code, sec_name asset_name, concat(wind_code, ' : ',sec_name) text FROM wind_future_info WHERE sec_name LIKE %s OR wind_code LIKE %s LIMIT 50""" data_df = pd.read_sql(sql_str, db.get_engine(db.get_app(), bind=config.BIND_DB_NAME_MD), params=[param_str, param_str]) logger.debug('期货数据 %d 条数据', data_df.shape[0]) if data_df.shape[0] > 0: data_dic = data_df.to_dict('record') ret_dic['results'].append({"text": "期货", "children": data_dic}) # logger.debug(ret_dic) # return jsonify(ret_dic) return ret_dic
def teardown(): app = create_app('test') with app.app_context(): db.session.remove() db.drop_all() db.engine.execute("drop table alembic_version") insp = inspect(db.engine) for enum in insp.get_enums(): db.Enum(name=enum['name']).drop(db.engine) db.get_engine(app).dispose()
def teardown(): app = create_app('test') with app.app_context(): db.session.remove() db.engine.execute("drop sequence suppliers_supplier_id_seq cascade") db.drop_all() db.engine.execute("drop table alembic_version") insp = inspect(db.engine) for enum in insp.get_enums(): db.Enum(name=enum['name']).drop(db.engine) db.get_engine(app).dispose()
def test_db(test_app): assert 'wxd_test' in db.engine.url.database, 'Only run tests against the test database' BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ALEMBIC_CONFIG = os.path.join(BASE_DIR, 'migrations') config = Config(ALEMBIC_CONFIG + '/alembic.ini') config.set_main_option("script_location", ALEMBIC_CONFIG) upgrade(config, 'head') yield db db.session.remove() db.get_engine(test_app).dispose()
def get(): try: db.engine.execute(sql) except exc.SQLAlchemyError: return {"message": "api is down -- error connecting to colin"}, 500 try: db.get_engine(app, 'db2').execute(sql) except exc.SQLAlchemyError: return { "message": "api is down -- error connecting to namesU" }, 500 return {"message": "api is healthy"}, 200
def teardown_database(self): with self.app.app_context(): db.session.remove() for table in reversed(db.metadata.sorted_tables): if table.name not in [ "lot", "framework", "framework_lot", "service_category", "service_role", "domain", "agency", "council" ]: db.engine.execute(table.delete()) FrameworkLot.query.filter( FrameworkLot.framework_id >= 100).delete() Framework.query.filter(Framework.id >= 100).delete() db.session.commit() db.get_engine(self.app).dispose()
def get_gif_ids_by_tags(tag_ids, inactive_tags=None): my_db_bind = db.get_engine(app, 'gifs_db') tag_ids = [tag_id for tag_id in tag_ids if str(tag_id)] result = [gif.id for gif in models.Gif.query.all()] if tag_ids: print 'Active tags passed!' ids_string = '(' + ','.join(map(str, tag_ids)) + ')' sql = 'SELECT gif_id FROM gif_tags WHERE tag_id IN %s' % ids_string if inactive_tags is not None: print 'Also passing inactive tags!' inactive_ids_string = '(' + ','.join(map(str, inactive_tags)) + ')' sql += ' AND tag_id NOT IN %s' % inactive_ids_string rows = db.session.execute(sql, bind=my_db_bind) result = [entry[0] for entry in rows] else: print 'No active tags passed...' if inactive_tags is not None: print 'But passing inactive tags!' all_gif_ids = [gif.id for gif in models.Gif.query.all()] inactive_ids_string = '(' + ','.join(map(str, inactive_tags)) + ')' sql = 'SELECT gif_id FROM gif_tags WHERE tag_id IN %s' % inactive_ids_string rows = db.session.execute(sql, bind=my_db_bind) inactive_gif_ids = [entry[0] for entry in rows] result = filter_inactive_tags(all_gif_ids, inactive_gif_ids) return result
def calc_asset_rr(asset_code, date_from, date_to, asset_type) -> pd.Series: """ 计算资产日期区间的收益率 :param asset_code: :param date_from: :param date_to: :param asset_type: :return: """ # if asset_type == 'stock': # table_name = 'wind_stock_daily' # elif asset_type == 'index': # table_name = 'wind_index_daily' # elif asset_type == 'future': # table_name = 'wind_future_daily' # else: # raise ValueError("asset_type:%s 无效" % asset_type) bind_db, table_name = ASSET_TYPE_TABLE_NAME_DIC[asset_type] # TODO: 增加对 portfolio, value 两类资产的处理 sql_str = """select trade_date, close from {table_name} where wind_code=%s and trade_date between %s and %s order by trade_date""".format( table_name=table_name) md_df = pd.read_sql( sql_str, db.get_engine(bind=bind_db) if bind_db is not None else db.engine, params=[asset_code, date_from, date_to], index_col='trade_date') rr_s = md_df["close"].pct_change().fillna(0).rename(asset_code) return rr_s
def get_last_14_imported_days(last_import_date) -> pd.DataFrame: sql = """ SELECT incidence."bfsNr", incidence.date, incidence.cases, municipality.population, municipality.area, municipality.name, municipality.region, municipality.canton FROM public.incidence LEFT JOIN public.municipality ON (incidence."bfsNr" = municipality."bfsNr") WHERE date BETWEEN '{}' AND '{}' AND incidence.cases IS NOT NULL ORDER BY incidence.date ASC""".format( last_import_date - timedelta(days=14), last_import_date) df_db = pd.read_sql_query(sql, db.get_engine()) dict_db_cols = { 'population': 'Einwohner', 'area': 'Gesamtflaeche_in_km2', 'name': 'Gemeindename', 'region': 'Bezirksname', 'canton': 'Kanton', 'bfsNr': 'BFS_Nr', 'date': 'Datum', 'cases': 'Neue_Faelle_Gemeinde' } df = df_db[dict_db_cols.keys()].copy() df.rename(columns=dict_db_cols, inplace=True) return df
def crypto_data(subreddit): if subreddit in SUBREDDITS: all_logs = Log.query.filter(Log.subreddit == subreddit) df = pandas.read_sql(all_logs.statement, db.get_engine()) df.drop(['id', 'subreddit', 'currency_sentiment'], axis=1, inplace=True) df_dict = df.to_dict(orient="records") status_and_results = { "subreddit": subreddit, "status": "success", "code": 200, "results": df_dict } response = make_response(json.dumps(status_and_results), 200) response.headers.add('Access-Control-Allow-Origin', '*') return response else: status_and_results = { "subreddit": subreddit, "status": "bad_request", "code": 400 } response = make_response(json.dumps(status_and_results), 400) response.headers.add('Access-Control-Allow-Origin', '*') return response
def query_friends_list(params): user_id = params["user_id"] if not user_id: return [] sql = "SELECT t1.* FROM a_social.follow t1, a_social.follow t2 WHERE t1.user_id = t2.follow_id AND t1.follow_id = t2.user_id AND t1.status = 1 AND t2.status = 1 AND t1.user_id = " sql += str(params["user_id"]) if params["last_id"]: sql += " AND t1.id < " + str(params["last_id"]) sql += " ORDER BY t1.id DESC" if params["limit"]: sql += " LIMIT " + str(params["limit"]) from heron import app result_dict_list = db.session.execute( sql, bind=db.get_engine(app=app, bind='a_social')).fetchall() if not result_dict_list: result_dict_list = [] result = [] for item in result_dict_list: model = FollowModel() model.id = item[0] model.user_id = item[1] model.follow_id = item[2] model.status = item[3] result.append(model) return result
def fetch_to_dict(sql, params={}, fecth='all', bind=None): """ dict的方式返回数据 :param sql: select * from xxx where name=:name :param params:{'name':'zhangsan'} :param fecth:默认返回全部数据,返回格式为[{},{}],如果fecth='one',返回单条数据,格式为dict :param bind:连接的数据,默认取配置的SQLALCHEMY_DATABASE_URL, :return: """ resultProxy = db.session.execute(sql, params, bind=db.get_engine(bind=bind)) if fecth == 'one': result_tuple = resultProxy.fetchone() if result_tuple: result = dict(zip(resultProxy.keys(), list(result_tuple))) else: return None else: result_tuple_list = resultProxy.fetchall() if result_tuple_list: result = [] keys = resultProxy.keys() for row in result_tuple_list: result_row = dict(zip(keys, row)) result.append(result_row) else: return None return result
def notify_db(notify_api): assert db.engine.url.database != 'notification_api', 'dont run tests against main db' Migrate(notify_api, db) Manager(db, MigrateCommand) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ALEMBIC_CONFIG = os.path.join(BASE_DIR, 'migrations') config = Config(ALEMBIC_CONFIG + '/alembic.ini') config.set_main_option("script_location", ALEMBIC_CONFIG) with notify_api.app_context(): upgrade(config, 'head') yield db db.session.remove() db.get_engine(notify_api).dispose()
def detection(): datas = request.json detectionType = datas['detectionType'] imageId = datas['imageid'] username = datas['username'] print("detection type %s , imageID %s, username %s." % (detectionType, imageId, username)) #send msg to websocket to deteciton user = User.query.filter_by(username=username).first() engine = db.get_engine(bind=user.dbname) DBSession = sessionmaker(bind=engine) db_session = DBSession() existResult = db_session.query(DetectionResult).filter_by(pathimage_imageid=imageId, detectionType=detectionType).all() if len(existResult) == 0: detectionResult = DetectionResult() detectionResult.detectionType = detectionType detectionResult.pathimage_imageid = imageId db_session.add(detectionResult) db_session.commit() image = db_session.query(PathImage).filter_by(imageid=imageId).first() db_session.close() detection_task.doDetection.apply_async(args=[image.filepath,imageId,detectionType,user.dbname]) return "detecting" elif existResult[0].filepath is None or existResult[0].filepath == "": db_session.close() return "detecting" else: db_session.close() return existResult[0].filepath
def tearDown(self): db.session.remove() # Replacement for db.session.drop_all() # http://www.mbeckler.org/blog/?p=218 engine = db.get_engine(self.app) conn = engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( ForeignKeyConstraint((), (), name=fk['name']) ) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit() engine.dispose()
def app(): app = get_test_app() with app.app_context(): server_config = ConfigManager.get_instance().get_server_config() application_root = ConfigManager.get_instance().get_application_root() app.register_blueprint(incidence_controller, url_prefix=application_root) app.register_blueprint(municipality_controller, url_prefix=application_root) app.register_blueprint(swaggerui_controller, url_prefix=application_root) app.config['DEVELOPMENT'] = server_config["development"] with app.open_resource( "tests/api_blackbox/testdata/dump_municipalities.sql" ) as f_municipalities: with app.open_resource( "tests/api_blackbox/testdata/dump_incidences.sql" ) as f_incidences: engine = db.get_engine() with engine.connect() as con: create_incidence = ''' CREATE TABLE incidence ( "incidencesId" integer NOT NULL, "bfsNr" integer NOT NULL, date date NOT NULL, incidence double precision NOT NULL, cases integer NOT NULL, cases_cumsum_14d integer NOT NULL ); ''' create_municipality = ''' CREATE TABLE municipality ( "bfsNr" integer NOT NULL, name character varying(256) NOT NULL, canton character varying(2) NOT NULL, area double precision NOT NULL, population integer NOT NULL, region character varying(256) NOT NULL ); ''' con.execute(create_municipality) con.execute(create_incidence) query_municipalities = text( f_municipalities.read().decode("utf8")) con.execute(query_municipalities) query_incidences = text(f_incidences.read().decode("utf8")) con.execute(query_incidences) yield app
def image(filename): user = current_user engine = db.get_engine(bind=user.dbname) DBSession = sessionmaker(bind=engine) db_session = DBSession() annotationsInfo = db_session.query(Annotation).filter_by(pathimage_imageid=filename).all() slug = slugify(filename) slidempp = 'slide_mpp' imagesize = 'imagesize' if not hasattr(app,'slides'): app.slides = {} print('app.slides is null') if not app.slides.has_key(slug): print('image not exsits ' + slug) image = db_session.query(PathImage).filter_by(filename=filename).first() slidefile = image.filepath config_map = { 'DEEPZOOM_TILE_SIZE': 'tile_size', 'DEEPZOOM_OVERLAP': 'overlap', 'DEEPZOOM_LIMIT_BOUNDS': 'limit_bounds', } opts = dict((v, app.config[k]) for k, v in config_map.items()) if 'kfb' in slidefile: slide = kfbslide.KfbSlide(slidefile) deepzoom = kfb_deepzoom.KfbDeepZoomGenerator(slide, **opts) else: slide = open_slide(slidefile) deepzoom = DeepZoomGenerator(slide, **opts) slideinfo = {SLIDE_NAME: deepzoom} try: mpp_x = slide.properties[openslide.PROPERTY_NAME_MPP_X] mpp_y = slide.properties[openslide.PROPERTY_NAME_MPP_Y] slideinfo[slidempp] = (float(mpp_x) + float(mpp_y)) / 2 except (KeyError, ValueError): slideinfo[slidempp] = 0 slideinfo[imagesize] = [int(deepzoom.level_dimensions[-1][0]), int(deepzoom.level_dimensions[-1][1])] slideinfo['active'] = True app.slides[slug] = slideinfo annotaions = getannotations(annotationsInfo, app.slides[slug][imagesize], db_session,user) db_session.close() slidename = SLIDE_NAME + slug slide_url = url_for('dzi', slug=slidename) return render_template("display.html", user=user, slide_url=slide_url, slide_mpp=app.slides[slug][slidempp], canlogout=True, image_name=filename, annotations=annotaions, imagesize=app.slides[slug][imagesize] )
def test_user_add(): # u = UserTest(name='fire') #db.session.add(u) #db.session.commit() name = 'fire' sql = ("insert into test_user(name) values('%s'),('feizi')") % name query = db.get_engine(app, bind='kakou').execute(sql) query.close()
def teardown(self): db.session.remove() for table in reversed(db.metadata.sorted_tables): if table.name not in ["lots", "frameworks", "framework_lots"]: db.engine.execute(table.delete()) FrameworkLot.query.filter(FrameworkLot.framework_id >= 100).delete() Framework.query.filter(Framework.id >= 100).delete() # Remove any framework variation details frameworks = db.session.query(Framework).filter(Framework.framework_agreement_details is not None) for framework in frameworks.all(): framework.framework_agreement_details = None db.session.add(framework) db.session.commit() db.get_engine(self.app).dispose() self.app_context.pop()
def teardown(self): db.session.remove() for table in reversed(db.metadata.sorted_tables): if table.name not in ["lots", "frameworks", "framework_lots"]: db.engine.execute(table.delete()) FrameworkLot.query.filter(FrameworkLot.framework_id >= 100).delete() Framework.query.filter(Framework.id >= 100).delete() # Remove any framework variation details frameworks = db.session.query(Framework).filter( Framework.framework_agreement_details is not None) for framework in frameworks.all(): framework.framework_agreement_details = None db.session.add(framework) db.session.commit() db.get_engine(self.app).dispose() self.app_context.pop()
def teardown_database(self): with self.app.app_context(): db.session.remove() for table in reversed(db.metadata.sorted_tables): if table.name not in [ "lot", "framework", "framework_lot", "service_category", "service_role", "domain", "agency", "council"]: db.engine.execute(table.delete()) FrameworkLot.query.filter(FrameworkLot.framework_id >= 100).delete() Framework.query.filter(Framework.id >= 100).delete() db.session.commit() db.get_engine(self.app).dispose()
def get_last_import_date(): max_date = None sql = "SELECT max(date) AS max_date FROM public.incidence WHERE cases IS NOT NULL" with db.get_engine().connect() as connection: result = connection.execute(sql) for row in result: max_date = row['max_date'] return max_date
def test_stat(): st = '2017-03-20 12:00:00' et = '2017-03-20 13:00:00' kkdd = 441302001 sql = ( "select count(*) from traffic_vehicle_pass where pass_time >='{0}' and pass_time <='{1}' and crossing_id = {2}" .format(st, et, kkdd)) query = db.get_engine(app, bind='kakou').execute(sql) r = query.fetchone()[0] print r
def db(app): assert _db.engine.url.database.endswith('_test'), 'dont run tests against main db' create_test_db_if_does_not_exist(_db) Migrate(app, _db) Manager(_db, MigrateCommand) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ALEMBIC_CONFIG = os.path.join(BASE_DIR, 'migrations') config = Config(ALEMBIC_CONFIG + '/alembic.ini') config.set_main_option("script_location", ALEMBIC_CONFIG) with app.app_context(): upgrade(config, 'head') yield _db _db.session.remove() _db.get_engine(app).dispose()
def get_tag_gif_counts(): my_db_bind = db.get_engine(app, 'gifs_db') all_tags = models.Tag.query.all() for tag in all_tags: sql = 'SELECT count(gif_id) FROM gif_tags WHERE tag_id = %s' % str(tag.id) rows = db.session.execute(sql, bind=my_db_bind) result = [entry[0] for entry in rows] tag.gif_count = int(result[0]) return all_tags
def test_traffic_add2(): vals = [ u"('441302123', 1, 'IN', '粤L12345', '', '2015-12-13 01:23:45', '0')", u"('441302123', 1, 'IN', '粤L12345', '', '2015-12-13 01:23:45', '0')" ] print vals #print ','.join(vals) sql = ( "insert into traffic(crossing_id, lane_no, direction_index, plate_no, plate_type, pass_time, plate_color) values %s" ) % ','.join(vals) query = db.get_engine(app, bind='kakou').execute(sql) query.close()
def allowed_table_name(table_schema=None): table_name = [] if table_schema: data = db.get_engine(bind="information_schema").execute( "select distinct t.TABLE_NAME from columns t where t.TABLE_SCHEMA='{0}'" .format(table_schema)).fetchall() for value, lable in enumerate(data): table_name.append((value, lable)) return table_name # columns = db.get_engine(bind="information_schema").execute( # "select t.TABLE_SCHEMA,t.TABLE_NAME,t.COLUMN_NAME,t.ORDINAL_POSITION,t.IS_NULLABLE,t.DATA_TYPE,t.COLUMN_KEY,t.EXTRA from columns t where t.TABLE_SCHEMA='report'").fetchall() return None
def get(self): df1 = pd.read_sql("select * from ttt", db.engine) # df1["registerdate"] = df1["registerdate"].apply(lambda x: x.strftime("%Y-%m-%d %H:%M:%S") if x is not pd.NaT else None) df1 = df1.replace({np.nan: None}) for dtype in df1.dtypes: print(dtype) df1["birthday"] = df1["birthday"].apply( lambda x: x.strftime("%Y-%m-%d") if x is not None else None) df1["registerdate"] = df1["registerdate"].apply(lambda x: x.strftime( "%Y-%m-%d %H:%M:%S") if x is not None else None) df2 = pd.read_sql("select * from clinic_accrual_reg_rate limit 12", db.get_engine(bind="xiaonuan")).replace( {np.nan: None}) # df3 = pd.read_sql("select brand,brand_code,clinic_name from data_xiaonuan_final.hospital_base_information", db.get_engine(bind="impala")) # print(df3) df4 = pd.read_sql( "select part_dt, sum(price) as total_selled, count(distinct seller_id) as sellers from kylin_sales group by part_dt order by part_dt", db.get_engine(bind="kylin")) # print(df4) # data = { # "local": df1.to_json(orient="records"), # "xiaonuan": df2.to_json(orient="records"), # "impala": df3.to_json(orient="records"), # "kylin": df4.to_json(orient="records"), # "testing": "I'm 中文测试" # } data = { "local": df1.to_dict("records"), "xiaonuan": df2.to_dict("records"), # "impala": df3[:12].to_dict("records"), "kylin": df4[:4].to_dict("records"), "testing": "I'm 中文测试" } return data
def detection_result(imageid,detectionType,username): user = User.query.filter_by(username=username).first() engine = db.get_engine(bind=user.dbname) DBSession = sessionmaker(bind=engine) db_session = DBSession() existResult = db_session.query(DetectionResult).filter_by(pathimage_imageid=imageid, detectionType=detectionType).first() if existResult is not None and existResult.filepath is not None: with open(existResult.filepath,'rb') as f: resp = base64.b64encode(f.read()) return resp return ''
def delete_where_cases_is_null(new_dates): if (len(new_dates) >= 1): string_dates = [ f"'{new_date.strftime('%Y-%m-%d')}'" for new_date in new_dates ] sep = ', ' date_filter = sep.join(string_dates) sql = f'DELETE FROM public.incidence WHERE cases IS NULL AND date IN ({date_filter});' with db.get_engine().connect() as connection: result = connection.execute(sql) return result
def fetch_usage_data_from_db(): """ Fetch the usage data to use it further """ # big_data = FlaskUsage.query.all() pd_db = pd.read_sql_table('flask_usage', db.get_engine(bind='trackusage')) usage_dict = {} col_names = [] for col_name in pd_db.columns: usage_dict[col_name] = pd_db[col_name] # it's a dataframe! col_names.append(col_name) return usage_dict, col_names, pd_db
def get_gif_ids_by_tags(tags): tag_ids = [] for tag in tags: tag_ids.append(models.Tag.query.filter_by(name=tag).first().id) ids_string = '(' + ','.join(map(str, tag_ids)) + ')' sql = 'SELECT gif_id FROM gif_tags WHERE tag_id IN %s' % ids_string my_db_bind = db.get_engine(app, 'gifs_db') rows = db.session.execute(sql, bind=my_db_bind) gifs_ids = [entry[0] for entry in rows] return gifs_ids
def getUser(userId): logger.info('getUser start') result = () try: engine = db.get_engine() conn = engine.connect() sql = "select userId id, userName name, photo avatar from vw_user where userId = %(userId)s and status != 'D'" rs = conn.execute(sql, {'userId': userId}) result = rs.fetchall() result = [dict(row.items()) for row in result] except Exception as e: logger.error("getUser fail") logger.error(e) logger.info(result) return result[0]
def getNF(cname): nF='[' try: resultset = db.session.execute("select * from "+ cname +" order by 1",bind=db.get_engine(app, 'data')) except: import sys resultset='' for r in resultset: nF+='[new Date("'+str(r[0])+'"),'+','.join(str(x) for x in list(r)[1:])+'],\n' config=Control_container.query.filter_by(name=cname).first() if config and str(config.g_config)!='None': config=str(config.g_config) else: if resultset!='': config='labels: ["'+'","'.join(resultset.keys())+'"],' else: config='' return nF+'],{'+config
def main(app=create_app(), options=[]): with app.app_context(): engine = db.get_engine(app) conn = engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): if table_name != 'alembic_version': fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( ForeignKeyConstraint((), (), name=fk['name']) ) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit() db.create_all() engine.dispose() print "Deleted all existing data" # Audit triggers aren't in SQLAlchemy schema definition, so create_all # won't recreate them. Run the separate script to add them. if '-local' in options: os.system('psql -d {} -a -f app/audit_triggers.sql'.format(engine.url.database)) print "Added audit triggers"
def tearDownClass(cls): db.session.remove() db.drop_all() db.get_engine(cls.app).dispose()
def index(): form = SearchPatientForm() if request.method == 'POST': session['first_name'] = form.search_patient_first_name.data session['last_name'] = form.search_patient_last_name.data # session['dob'] = form.search_patient_dob.data session['ssn'] = form.search_patient_ssn.data return redirect(url_for('screener.new_patient')) """Display the initial landing page, which lists patients in the network and allows users to search and filter them. """ all_patients = Patient.query.all() # ORGANIZATION-BASED QUERIES org_users = [user.id for user in AppUser.query.filter( AppUser.service_id == current_user.service_id )] # Get patients that this organization referred out who have results entered org_completed_referrals_outgoing = Patient.query.join(Patient.referrals).filter( Patient.referrals.any( and_( PatientReferral.from_app_user_id.in_(org_users), PatientReferral.status == 'COMPLETED' ) ) ).order_by(func.coalesce(PatientReferral.last_modified, PatientReferral.created)) # Get patients that this organization referred out who are waiting for results org_open_referrals_outgoing = Patient.query.join(Patient.referrals).filter( Patient.referrals.any( and_( PatientReferral.from_app_user_id.in_(org_users), PatientReferral.status.in_(('SENT', 'RECEIVED')) ) ) ).order_by(func.coalesce(PatientReferral.last_modified, PatientReferral.created)) # Get patients with open referrals at this user's organization org_open_referrals_incoming = Patient.query.join(Patient.referrals).filter( Patient.referrals.any(and_( PatientReferral.to_service_id == current_user.service_id, PatientReferral.status.in_(('SENT', 'RECEIVED')) )) ).order_by(func.coalesce(PatientReferral.last_modified, PatientReferral.created)) # Get patients with completed referrals at this user's organization org_completed_referrals_incoming = Patient.query.join(Patient.referrals).filter( Patient.referrals.any(and_( PatientReferral.to_service_id == current_user.service_id, PatientReferral.status == 'COMPLETED' )) ).order_by(func.coalesce(PatientReferral.last_modified, PatientReferral.created)) # Get patients who were most recently screened and found eligible for this organization # more than 11 months ago # No idea how to do this in SQLAlchemy query = text( "select * from ( " "select " "patient.id, " "patient.first_name, " "patient.middle_name, " "patient.last_name, " "patient.dob, " "max(patient_screening_result.created) as most_recent_result " "from " "patient, " "patient_screening_result " "where " "patient.id = patient_screening_result.patient_id " "and patient_screening_result.eligible_yn = 'Y' " "and patient_screening_result.service_id = :service_id " "group by " "patient.id, " "patient.first_name, " "patient.middle_name, " "patient.last_name, " "patient.dob " ") subquery where most_recent_result < :eleven_months_ago " "order by subquery.most_recent_result " ) conn = db.get_engine(current_app).connect() org_need_renewal = conn.execute( query, service_id=current_user.service_id, eleven_months_ago=datetime.date.today() - relativedelta(months=11) ).fetchall() # USER-BASED QUERIES # Get patients this user created or updated in the last week your_recently_updated = Patient.query.filter(or_( and_( Patient.last_modified > datetime.date.today() - datetime.timedelta(days=7), Patient.last_modified_by_id == current_user.id ), and_( Patient.created > datetime.date.today() - datetime.timedelta(days=7), Patient.created_by_id == current_user.id ) )).order_by(func.coalesce(Patient.last_modified, Patient.created)) # Get patients this user referred out who have results entered your_completed_referrals_outgoing = Patient.query.join(Patient.referrals).filter( Patient.referrals.any( and_( PatientReferral.from_app_user_id == current_user.id, PatientReferral.status == 'COMPLETED' ) ) ).order_by(func.coalesce(PatientReferral.last_modified, PatientReferral.created)) # Get patients this user referred out who are waiting for results your_open_referrals_outgoing = Patient.query.join(Patient.referrals).filter( Patient.referrals.any( and_( PatientReferral.from_app_user_id == current_user.id, PatientReferral.status.in_(('SENT', 'RECEIVED')) ) ) ).order_by(func.coalesce(PatientReferral.last_modified, PatientReferral.created)) # Queries to maybe add later: # Your starred patients # Applications that are inactive/patient not responding return render_template( 'index.html', user=current_user, all_patients=all_patients, org_completed_referrals_outgoing=org_completed_referrals_outgoing, org_open_referrals_outgoing=org_open_referrals_outgoing, org_open_referrals_incoming=org_open_referrals_incoming, org_completed_referrals_incoming=org_completed_referrals_incoming, org_need_renewal=org_need_renewal, your_recently_updated=your_recently_updated, your_completed_referrals_outgoing=your_completed_referrals_outgoing, your_open_referrals_outgoing=your_open_referrals_outgoing, form=form )
def test_final_find(): sql = ("select max(id) from final_lm") q = db.get_engine(app, bind='kakou').execute(sql).fetchone() print q[0]
def teardown(): db.session.remove() db.drop_all() db.engine.execute("drop table alembic_version") db.get_engine(notify_api).dispose()
def index(): """Display the initial landing page, which lists patients in the network and allows users to search and filter them. """ form = SearchPatientForm() if request.method == 'POST': session['first_name'] = form.search_patient_first_name.data session['last_name'] = form.search_patient_last_name.data # session['dob'] = form.search_patient_dob.data session['ssn'] = form.search_patient_ssn.data return redirect(url_for('screener.new_patient')) all_patients = Patient.query.all() # ORGANIZATION-BASED QUERIES org_users = [user.id for user in AppUser.query.filter( AppUser.service_id == current_user.service_id )] # Get patients that this organization referred out who have open referrals or # referrals closed in the last month org_referrals_outgoing = db.session.query( Patient.id, Patient.first_name, Patient.last_name, Patient.dob, func.max(func.coalesce(PatientReferral.last_modified, PatientReferral.created)).label( "referral_last_modified" ) ).join(Patient.referrals).filter( and_( or_( and_( and_( PatientReferral.from_app_user_id.in_(org_users), PatientReferral.status == 'COMPLETED' ), func.coalesce( PatientReferral.last_modified, PatientReferral.created ) > datetime.date.today() - relativedelta(months=1) ), and_( PatientReferral.from_app_user_id.in_(org_users), PatientReferral.status == 'SENT' ) ), Patient.deleted == None ) ).group_by( Patient.id, Patient.first_name, Patient.last_name ).order_by( desc(func.max(func.coalesce(PatientReferral.last_modified, PatientReferral.created))) ) # Get patients with open referrals or referrals closed in the last month at # this user's organization org_referrals_incoming = db.session.query( Patient.id, Patient.first_name, Patient.last_name, Patient.dob, func.max(func.coalesce(PatientReferral.last_modified, PatientReferral.created)).label( "referral_last_modified" ) ).join(Patient.referrals).filter( and_( or_( and_( PatientReferral.to_service_id == current_user.service_id, PatientReferral.status == 'SENT' ), and_( and_( PatientReferral.to_service_id == current_user.service_id, PatientReferral.status == 'COMPLETED' ), func.coalesce( PatientReferral.last_modified, PatientReferral.created ) > datetime.date.today() - relativedelta(months=1) ) ), Patient.deleted == None ) ).group_by( Patient.id, Patient.first_name, Patient.last_name ).order_by( desc(func.max(func.coalesce(PatientReferral.last_modified, PatientReferral.created))) ) # Get patients who were most recently screened and found eligible for this organization # more than 11 months ago # No idea how to do this in SQLAlchemy query = text( "select * from ( " "select " "patient.id, " "patient.first_name, " "patient.middle_name, " "patient.last_name, " "patient.dob, " "max(patient_screening_result.created) as most_recent_result " "from " "patient, " "patient_screening_result " "where " "patient.id = patient_screening_result.patient_id " "and patient_screening_result.eligible_yn = 'Y' " "and patient_screening_result.service_id = :service_id " "and patient.deleted is null " "group by " "patient.id, " "patient.first_name, " "patient.middle_name, " "patient.last_name, " "patient.dob " ") subquery where most_recent_result < :eleven_months_ago " "order by subquery.most_recent_result " ) conn = db.get_engine(current_app).connect() org_need_renewal = conn.execute( query, service_id=current_user.service_id, eleven_months_ago=datetime.date.today() - relativedelta(months=11) ).fetchall() # USER-BASED QUERIES # Get patients this user created or updated in the last week your_recently_updated = Patient.query.filter(or_( and_( Patient.last_modified > datetime.date.today() - datetime.timedelta(days=7), Patient.last_modified_by_id == current_user.id, Patient.deleted == None ), and_( Patient.created > datetime.date.today() - datetime.timedelta(days=7), Patient.created_by_id == current_user.id, Patient.deleted == None ) )).order_by(desc(func.coalesce(Patient.last_modified, Patient.created))) # Get patients this user referred out who have open referrals or referrals closed in # the last month your_referrals_outgoing = db.session.query( Patient.id, Patient.first_name, Patient.last_name, Patient.dob, func.max(func.coalesce(PatientReferral.last_modified, PatientReferral.created)).label( "referral_last_modified" ) ).join(Patient.referrals).filter( and_( or_( and_( and_( PatientReferral.from_app_user_id == current_user.id, PatientReferral.status == 'COMPLETED' ), func.coalesce( PatientReferral.last_modified, PatientReferral.created ) > datetime.date.today() - relativedelta(months=1) ), and_( PatientReferral.from_app_user_id == current_user.id, PatientReferral.status == 'SENT' ) ), Patient.deleted == None ) ).group_by( Patient.id, Patient.first_name, Patient.last_name ).order_by( desc(func.max(func.coalesce(PatientReferral.last_modified, PatientReferral.created))) ) patient_ids = [] for patient_list in [ org_referrals_outgoing, org_referrals_incoming, org_need_renewal, your_recently_updated, your_referrals_outgoing ]: patient_ids += [patient.id for patient in patient_list] patients = Patient.query.filter(Patient.id.in_(patient_ids)) patient_dict = {patient.id: patient for patient in patients} return render_template( 'index.html', user=current_user, all_patients=all_patients, patient_dict=patient_dict, org_referrals_outgoing=org_referrals_outgoing, org_referrals_incoming=org_referrals_incoming, org_need_renewal=org_need_renewal, your_recently_updated=your_recently_updated, your_referrals_outgoing=your_referrals_outgoing, one_month_ago=datetime.datetime.today() - relativedelta(months=1), form=form )
def del_post(): engine = db.get_engine(db.get_app()) post_up.drop(engine) return 'post_up is del'