def main() -> None: # delete all predictions older than 30 days session = Session() try: today = datetime.datetime.today() one_month_ago = today - datetime.timedelta(days=30) session.query(Prediction).filter( Prediction.created_on < one_month_ago).delete() finally: session.close()
def main(): session = Session() try: lift_service = LiftService() lifts = lift_service.fetch_lifts() updated = [] for lift in lifts: stored_lift = session.query(Lift).filter( Lift.season.like(SEASON), Lift.name.like(lift['name'])).first() if stored_lift and stored_lift.status != lift['status']: stored_lift.status = lift['status'] stored_lift.last_updated = lift['last_updated'] session.commit() updated.append(lift) if any(updated): notification_service = NotificationService(len(updated)) notification_service.send_notifications() finally: session.close()
def dataset_index(*, db: Session = Depends(get_db), model_id: int, image_id: int): dataset_file = db.query(DBDatasetFile).filter(DBDatasetFile.id == image_id).first() model_file = db.query(DBAnalysisModel).filter(DBAnalysisModel.id == model_id).first() distance, similar_ids = find_similar_images(dataset_file, model_file) similar_ids = [int(x) for x in similar_ids[0] if int(x) != image_id] similar_dataset_files = db.query(DBDatasetFile).filter(DBDatasetFile.id.in_(similar_ids)).all() similar_ids_map = {dataset_file.id: dataset_file for dataset_file in similar_dataset_files} similar_ids_order = [similar_ids_map[each_id] for each_id in similar_ids] result = { 'datasets': [dataset_file.dataset], 'items': _get_dataset_model_out(similar_ids_order, dataset_file.dataset.id), 'models': [] } return result
def get_region_list(*, db: Session): try: result = db.query(distinct(Covid19.country_en)).all() return result except Exception as _: db.rollback() raise finally: db.close()
def get_user(*, db: Session, condition: dict): try: result = db.query(CovidUser).filter_by(**condition).all() return result except Exception as _: db.rollback() raise finally: db.close()
def get_last_update_date(*, db: Session): try: max_update_date = db.query( func.max(Covid19.update_date).label("max_update_date")).all() return max_update_date[0][0] except Exception as _: db.rollback() raise finally: db.close()
def read_items( db: Session = Depends(get_db), skip: int = 0, limit: int = 100 ): """ Retrieve items. """ items = db.query(DBDataset).offset(skip).limit(limit).all() return items
def main() -> None: # for each spot that's gathering data # gather forecast and swell data # format to a forecast object # save in DB session = Session() try: timestamp = datetime.utcnow() spots = session.query(Spot).filter(Spot.gathering_data == True).all() requests_session = requests.Session() access_token = login(requests_session) for spot in spots: spot_id = spot.id surfline_spot_id = spot.surfline_spot_id forecast_info = fetch_forecast_info(requests_session, surfline_spot_id, access_token) swell_info = fetch_swell_info(requests_session, surfline_spot_id, access_token) forecast = Forecast( spot_id=spot_id, timestamp=timestamp, am_min_height=forecast_info['am']['minHeight'], am_max_height=forecast_info['am']['maxHeight'], am_rating=forecast_info['am']['rating'], pm_min_height=forecast_info['pm']['minHeight'], pm_max_height=forecast_info['pm']['maxHeight'], pm_rating=forecast_info['pm']['rating'], swell1_height=swell_info['swells'][0]['height'], swell1_period=swell_info['swells'][0]['period'], swell1_direction=swell_info['swells'][0]['direction'], swell2_height=swell_info['swells'][1]['height'], swell2_period=swell_info['swells'][1]['period'], swell2_direction=swell_info['swells'][1]['direction'], swell3_height=swell_info['swells'][2]['height'], swell3_period=swell_info['swells'][2]['period'], swell3_direction=swell_info['swells'][2]['direction'], swell4_height=swell_info['swells'][3]['height'], swell4_period=swell_info['swells'][3]['period'], swell4_direction=swell_info['swells'][3]['direction'], swell5_height=swell_info['swells'][4]['height'], swell5_period=swell_info['swells'][4]['period'], swell5_direction=swell_info['swells'][4]['direction'], swell6_height=swell_info['swells'][5]['height'], swell6_period=swell_info['swells'][5]['period'], swell6_direction=swell_info['swells'][5]['direction']) session.add(forecast) session.commit() finally: session.close()
def update_user(*, db: Session, condition: dict, data: dict): try: result = Session.query(CovidUser).filter_by( **condition).update(data) db.commit() return result except Exception as _: db.rollback() raise finally: db.close()
async def root(request): session = Session() try: season = request.query_params.get('season', SEASON) lifts = session.query(Lift).filter( Lift.season == season).order_by(Lift.last_updated.desc()).all() lift_dicts = [l._for_html() for l in lifts] return templates.TemplateResponse('lifts/index.html.j2', {'request': request, 'season': season, 'lifts': lift_dicts}) finally: session.close()
def get_population(*, db: Session, country: str): try: if country: filters = and_(Population.country_en == country) else: filters = and_(1 == 1) result = db.query(Population).filter(filters).all() return result except Exception as _: db.rollback() raise finally: db.close()
async def lifts(request): session = Session() try: season = request.query_params.get('season', SEASON) lifts = session.query(Lift).filter( Lift.season == season).order_by(Lift.last_updated.desc()).all() lift_dicts = [l._for_json() for l in lifts] return JSONResponse({'lifts': lift_dicts}) finally: session.close()
def get_captcha_by_session( *, db: Session, session: str, ): try: result = db.query(Captcha).filter_by(session_id=session).order_by( Captcha.id.desc()).first() return result except Exception as _: db.rollback() raise finally: db.close()
def dataset_browse( *, db: Session = Depends(get_db), dataset_id: int, skip: int = 0, limit: int = 100 ): dataset = crud.dataset.get(db_session=db, id=dataset_id) if not dataset: raise HTTPException(status_code=404, detail=f"dataset {dataset_id} not found") analysis_models = db.query(DBAnalysisModel).filter(DBAnalysisModel.dataset_id==dataset_id).all() result = { 'datasets': [dataset] } dataset_files = (db.query(DBDatasetFile) .filter(DBDatasetFile.dataset_id == dataset_id).offset(skip) .limit(limit) .all()) result['items'] = _get_dataset_model_out(dataset_files, dataset_id) result['models'] = [DatasetModel(id=m.id, name=m.name) for m in analysis_models] return result
def get_infection_city_data(*, db: Session, city: str, stime: str or None, etime: str or None, country: str): try: if country: # 查询条件中有国家 filters = and_(Covid19.province_en == city, Covid19.country_en == country) else: # 查询条件中无国家 filters = and_(Covid19.province_en == city) if stime and etime: result = db.query( func.sum(Covid19.confirmed_add).label("confirmed_add"), func.sum(Covid19.deaths_add).label("deaths_add"), func.sum(Covid19.recovered_add).label("recovered_add"), ).filter(and_(Covid19.update_date.between(stime, etime)), filters).all() return result else: # 获取最新时间 max_update_date = db.query( func.max( Covid19.update_date).label("max_update_date")).all() result = db.query( func.sum(Covid19.confirmed_add).label("confirmed_add"), func.sum(Covid19.deaths_add).label("deaths_add"), func.sum(Covid19.recovered_add).label("recovered_add"), ).filter( and_(Covid19.update_date == str(max_update_date[0][0])), filters).all() return result except Exception as _: db.rollback() raise finally: db.close()
def get_infection_global_data(*, db: Session): try: result = db.query( Covid19.country_en, func.sum(Covid19.confirmed_add).label("confirmed_add"), func.sum(Covid19.deaths_add).label("deaths_add"), func.sum( Covid19.recovered_add).label("recovered_add")).group_by( Covid19.country_en).all() return result except Exception as _: db.rollback() raise finally: db.close()
def add_static_routes(): from app.db_models.dataset import Dataset as DBDataset db = Session() datasets = db.query(DBDataset).all() for dataset in datasets: app.mount( config.DATASET_STATIC_ORIG_TEMPLATE.format(dataset_id=dataset.id), StaticFiles(directory=dataset.base_dir), name="static") app.mount( config.DATASET_STATIC_THUMB_TEMPLATE.format(dataset_id=dataset.id), StaticFiles(directory=dataset.thumbnail_dir), name="static") db.close()
def get_area_list(*, db: Session, region: str, hmt: bool): if hmt: # 包含港澳台 filters = and_(Covid19.continents_en != "", Covid19.country_en == region) else: # 不包含港澳台 filters = and_(Covid19.continents_en != "", Covid19.province_en.notin_(HMT), Covid19.country_en == region) try: result = db.query(distinct( Covid19.province_en)).filter(filters).all() return result except Exception as _: db.rollback() raise finally: db.close()
async def predictions(request): session = Session() try: predictions_query = session.query(Prediction) spot_ids_param = request.query_params.get('spot_ids', None) if spot_ids_param: try: spot_ids = spot_ids_param.split(',') except: spot_ids = [] predictions_query = predictions_query.filter( Prediction.spot_id.in_(spot_ids)) created_on_param = request.query_params.get('created_on', None) if created_on_param: try: created_on_date = datetime.fromisoformat( created_on_param).date() except: created_on_date = datetime.utcnow().date() predictions_query = predictions_query.filter( cast(Prediction.created_on, Date) == created_on_date) predictions_query = predictions_query.order_by(Prediction.id.desc()) page_param = request.query_params.get('page', '1') page = int(page_param) predictions_query = predictions_query.limit(PAGE_SIZE).offset( (page - 1) * PAGE_SIZE) predictions = predictions_query.all() prediction_dicts = [p._asdict() for p in predictions] return JSONResponse({'predictions': prediction_dicts}) finally: session.close()
async def forecasts(request): session = Session() try: forecasts_query = session.query(Forecast) spot_ids_param = request.query_params.get('spot_ids', None) if spot_ids_param: try: spot_ids = spot_ids_param.split(',') except: spot_ids = [] forecasts_query = forecasts_query.filter( Forecast.spot_id.in_(spot_ids)) after_param = request.query_params.get('after', None) if after_param: try: after_datetime = datetime.fromisoformat(after_param) except: after_datetime = datetime.utcnow() forecasts_query = forecasts_query.filter( Forecast.timestamp >= after_datetime) forecasts_query = forecasts_query.order_by(Forecast.id.desc()) page_param = request.query_params.get('page', '1') page = int(page_param) forecasts_query = forecasts_query.limit(PAGE_SIZE).offset( (page - 1) * PAGE_SIZE) forecasts = forecasts_query.all() forecast_dicts = [f._asdict() for f in forecasts] return JSONResponse({'forecasts': forecast_dicts}) finally: session.close()
async def spots(request): session = Session() try: spots_query = session.query(Spot) surfline_spot_ids_param = request.query_params.get( 'surfline_spot_ids', None) if surfline_spot_ids_param: try: surfline_spot_ids = surfline_spot_ids_param.split(',') except: surfline_spot_ids = [] spots_query = spots_query.filter( Spot.surfline_spot_id.in_(surfline_spot_ids)) spots = spots_query.all() spot_dicts = [s._asdict() for s in spots] return JSONResponse({'spots': spot_dicts}) finally: session.close()
def find_by_user_id(self, user_id: UUID, db: Session) -> List[Transaction]: return db.query( self.model).filter(Transaction.user_id == user_id).all()
def find_by_transaction_id(self, transaction_id: UUID, db: Session) -> List[TransactionItem]: return (db.query(self.model).filter( TransactionItem.transaction_id == transaction_id).all())
def main() -> None: # for each spot # fetch the current swell data # make a prediction for onshore swell height # record prediction from surfline and ml model session = Session() try: created_on = datetime.utcnow() spots = session.query(Spot).all() requests_session = requests.Session() access_token = login(requests_session) for spot in spots: spot_id = spot.id surfline_spot_id = spot.surfline_spot_id forecasts = fetch_forecasts(requests_session, surfline_spot_id, access_token) swells = fetch_swells(requests_session, surfline_spot_id, access_token) predictions = fetch_predictions(swells) for i in range(0, (FORECAST_DAYS - 1)): forecast = forecasts[i] swell = swells[i] prediction = predictions[i] forecasted_for = created_on + timedelta(days=(i + 1)) prediction = Prediction( spot_id=spot_id, created_on=created_on, forecasted_for=forecasted_for, surfline_height=humanized_height_round( average_forecast_height(forecast)), stoke_height=humanized_height_round(prediction), swell1_height=swell['swells'][0]['height'], swell1_period=swell['swells'][0]['period'], swell1_direction=swell['swells'][0]['direction'], swell2_height=swell['swells'][1]['height'], swell2_period=swell['swells'][1]['period'], swell2_direction=swell['swells'][1]['direction'], swell3_height=swell['swells'][2]['height'], swell3_period=swell['swells'][2]['period'], swell3_direction=swell['swells'][2]['direction'], swell4_height=swell['swells'][3]['height'], swell4_period=swell['swells'][3]['period'], swell4_direction=swell['swells'][3]['direction'], swell5_height=swell['swells'][4]['height'], swell5_period=swell['swells'][4]['period'], swell5_direction=swell['swells'][4]['direction'], swell6_height=swell['swells'][5]['height'], swell6_period=swell['swells'][5]['period'], swell6_direction=swell['swells'][5]['direction']) session.add(prediction) session.commit() finally: session.close()
def find_by_user_id(self, user_id: UUID, db: Session) -> List[Candidate]: return db.query(self.model).filter(Candidate.user_id == user_id).all()
def get_infection_country_area_data(*, db: Session, country: str, start_date: str or None, end_date: str or None, hmt: bool): try: if hmt: # 包含港澳台 filters = and_(Covid19.continents_en != "") else: # 不包含港澳台 filters = and_(Covid19.continents_en != "", Covid19.province_en.notin_(HMT)) if start_date and not end_date: max_update_date = db.query( func.max( Covid19.update_date).label("max_update_date")).all() end_date = str(max_update_date[0][0]) check_date_filter( DateFilters(**{ 'start_date': start_date, 'end_date': end_date })) if start_date and end_date: result = db.query( Covid19.update_date, Covid19.province_en, Covid19.confirmed_add, Covid19.deaths_add, Covid19.recovered_add, Covid19.confirmed, Covid19.deaths, Covid19.recovered, ).filter( and_(Covid19.country_en == country, Covid19.update_date.between(start_date, end_date)), filters).group_by(Covid19.update_date, Covid19.province_en).all() return result else: # 获取最新时间 max_update_date = db.query( func.max( Covid19.update_date).label("max_update_date")).all() result = db.query( Covid19.update_date, Covid19.province_en, Covid19.confirmed_add, Covid19.deaths_add, Covid19.recovered_add, Covid19.confirmed, Covid19.deaths, Covid19.recovered, ).filter( and_(Covid19.country_en == country, Covid19.update_date == str(max_update_date[0][0])), filters).group_by(Covid19.update_date, Covid19.province_en).all() return result except Exception as _: db.rollback() raise finally: db.close()