def main(): session = Session() try: lift_service = LiftService() lifts = lift_service.fetch_lifts() updated = [] for lift in lifts: stored_lift = session.query(Lift).filter( Lift.season.like(SEASON), Lift.name.like(lift['name'])).first() if stored_lift and stored_lift.status != lift['status']: stored_lift.status = lift['status'] stored_lift.last_updated = lift['last_updated'] session.commit() updated.append(lift) if any(updated): notification_service = NotificationService(len(updated)) notification_service.send_notifications() finally: session.close()
def create(db_session: Session, *, creator: DBUser) -> Game: game = Game(players=[creator], status=GameStatus.CREATED) game_manager.create_game(game.id) db_session.add(game) db_session.commit() db_session.refresh(game) return game
def add_user(*, db: Session, email: str, token: str): try: new_user = CovidUser(email=email, token=token) db.add(new_user) db.commit() except Exception as _: db.rollback() raise finally: db.close()
def main() -> None: # for each spot that's gathering data # gather forecast and swell data # format to a forecast object # save in DB session = Session() try: timestamp = datetime.utcnow() spots = session.query(Spot).filter(Spot.gathering_data == True).all() requests_session = requests.Session() access_token = login(requests_session) for spot in spots: spot_id = spot.id surfline_spot_id = spot.surfline_spot_id forecast_info = fetch_forecast_info(requests_session, surfline_spot_id, access_token) swell_info = fetch_swell_info(requests_session, surfline_spot_id, access_token) forecast = Forecast( spot_id=spot_id, timestamp=timestamp, am_min_height=forecast_info['am']['minHeight'], am_max_height=forecast_info['am']['maxHeight'], am_rating=forecast_info['am']['rating'], pm_min_height=forecast_info['pm']['minHeight'], pm_max_height=forecast_info['pm']['maxHeight'], pm_rating=forecast_info['pm']['rating'], swell1_height=swell_info['swells'][0]['height'], swell1_period=swell_info['swells'][0]['period'], swell1_direction=swell_info['swells'][0]['direction'], swell2_height=swell_info['swells'][1]['height'], swell2_period=swell_info['swells'][1]['period'], swell2_direction=swell_info['swells'][1]['direction'], swell3_height=swell_info['swells'][2]['height'], swell3_period=swell_info['swells'][2]['period'], swell3_direction=swell_info['swells'][2]['direction'], swell4_height=swell_info['swells'][3]['height'], swell4_period=swell_info['swells'][3]['period'], swell4_direction=swell_info['swells'][3]['direction'], swell5_height=swell_info['swells'][4]['height'], swell5_period=swell_info['swells'][4]['period'], swell5_direction=swell_info['swells'][4]['direction'], swell6_height=swell_info['swells'][5]['height'], swell6_period=swell_info['swells'][5]['period'], swell6_direction=swell_info['swells'][5]['direction']) session.add(forecast) session.commit() finally: session.close()
def update_user(*, db: Session, condition: dict, data: dict): try: result = Session.query(CovidUser).filter_by( **condition).update(data) db.commit() return result except Exception as _: db.rollback() raise finally: db.close()
def main(): session = Session() for spot_attributes in NEW_SPOTS: spot = Spot(surfline_id=spot_attributes['surfline_id'], surfline_spot_id=spot_attributes['surfline_spot_id'], name=spot_attributes['name'], favorable_swells=spot_attributes['favorable_swells']) session.add(spot) session.commit() session.close()
def add_captcha(*, db: Session, captcha: str, session_id: str, expiration: str): try: new_captcha = Captcha(captcha=captcha, session_id=session_id, expiration=expiration) db.add(new_captcha) db.commit() except Exception as _: db.rollback() raise finally: db.close()
def create_by_user_id( self, user_id: UUID, obj_in: CandidateCreate, db: Session ) -> Candidate: db_candidate = self.model( name=obj_in.name, email=obj_in.email, linkedin_url=obj_in.linkedin_url, user_id=user_id, ) db.add(db_candidate) db.commit() db.refresh(db_candidate) return db_candidate
def main() -> None: session = Session() try: for spot_attributes in SPOTS: spot = Spot(surfline_id=spot_attributes['surfline_id'], surfline_spot_id=spot_attributes['surfline_spot_id'], name=spot_attributes['name'], favorable_swells=spot_attributes['favorable_swells'], gathering_data=spot_attributes['gathering_data']) session.add(spot) session.commit() finally: session.close()
async def get_auth_user_id( credentials: JWTAuthorizationCredentials = Depends(auth), db: Session = Depends(get_db), ) -> str: try: user_id = credentials.claims["sub"] username = credentials.claims["username"] user = user_repo.find(db=db, model_id=user_id) if not user: user = User(id=user_id, username=username) db.add(user) db.commit() return user_id except KeyError: HTTPException(status_code=HTTP_403_FORBIDDEN, detail="Username missing")
def update_metadata(*, db: Session = Depends(get_db), dataset_id: int, ): import pydicom import csv ''' dataset = crud.dataset.get(db_session=db, id=dataset_id) for each_file in dataset.files: fn = os.path.join(dataset.base_dir, each_file.path) if not dataset: raise HTTPException(status_code=404, detail=f"dataset {dataset_id} not found") for each_file in dataset.files: ''' ''' labels = {} with open("/data/datasets/siim-acr-pneumothorax-segmentation/labels.csv") as fp: reader = csv.reader(fp) headers = next(reader) for _id, label in reader: labels[_id] = label.strip() attrs = ["PatientID", "PatientName", "PatientAge", "PatientSex", "ViewPosition", "BodyPartExamined", "Modality", "StudyDate", "StudyTime"] notfound = 0 for each_file in dataset.files: fn = os.path.join(dataset.base_dir, each_file.path) ds = pydicom.dcmread(fn) _id = each_file.path.split("/")[-1].rsplit(".", 1)[0] if _id in labels: meta = {"HasPneumothorax": labels[_id]} else: notfound += 1 meta = {} meta.update({attr: str(getattr(ds, attr, '')) for attr in attrs}) each_file.meta = json.dumps(meta) print(f"Not found: {notfound}") ''' db.commit() return "OK"
def main(): session = Session() try: lift_service = LiftService() lifts = lift_service.fetch_lifts() for lift in lifts: new_lift = Lift( name=lift['name'], status=lift['status'], kind=lift['kind'], season=lift['season'], last_updated=lift['last_updated'] ) session.add(new_lift) session.commit() finally: session.close()
def update_metadata(*, db: Session = Depends(get_db), dataset_id: int, ): import pydicom dataset = crud.dataset.get(db_session=db, id=dataset_id) if not dataset: raise HTTPException(status_code=404, detail=f"dataset {dataset_id} not found") attrs = ["PatientID", "PatientName", "PatientAge", "PatientSex", "ViewPosition", "BodyPartExamined", "Modality", "StudyDate", "StudyTime"] for each_file in dataset.files: fn = os.path.join(dataset.base_dir, each_file.path) ds = pydicom.dcmread(fn) meta = {attr: str(getattr(ds, attr, '')) for attr in attrs} each_file.meta = json.dumps(meta) db.commit() return "OK"
def dataset_index( *, db: Session = Depends(get_db), analysis_model_in: AnalysisModelIn ): dataset = crud.dataset.get(db_session=db, id=analysis_model_in.dataset_id) if not dataset: raise HTTPException(status_code=404, detail=f"dataset {analysis_model_in.dataset_id} not found") analysis_model = DBAnalysisModel(index="", name=analysis_model_in.name, dataset_id=analysis_model_in.dataset_id) db.add(analysis_model) db.commit() db.refresh(analysis_model) index_location = create_index(dataset, analysis_model_in.name, ANALYSIS_INDEX_DIR, analysis_model.id) analysis_model.index = index_location db.commit() return AnalysisModel(id=analysis_model.id, index=analysis_model.index, name=analysis_model.name, dataset_id=analysis_model.dataset_id)
def create_item( *, db: Session = Depends(get_db), dataset_in: DatasetCreate, ): """ Create new item. """ base_dir = Path(dataset_in.base_dir) thumbnail_dir = Path(dataset_in.thumbnail_dir) if (not base_dir.exists()) or (not thumbnail_dir.exists()): raise HTTPException(status_code=400, detail=f"Invalid path {base_dir} or {thumbnail_dir}") dataset = crud.dataset.create(db_session=db, dataset_in=dataset_in) files = [] for each_file in base_dir.glob("**/*"): if each_file.is_dir(): continue rel_path = os.path.relpath(each_file, base_dir) thumbnail = thumbnail_dir.joinpath(rel_path) if thumbnail.exists(): _thumbnail = thumbnail elif thumbnail.with_suffix(".jpg").exists(): _thumbnail = thumbnail.with_suffix(".jpg") else: crud.dataset.remove(db_session=db, id=dataset.id) raise HTTPException(status_code=400, detail=f"Thumbnail {thumbnail} doesn't exist") thumbnail_rel_path = os.path.relpath(_thumbnail, thumbnail_dir) dataset_file = DBDatasetFile(name=each_file.name, dataset_id=dataset.id, path=rel_path, thumbnail=thumbnail_rel_path) files.append(dataset_file) db.bulk_save_objects(files) db.commit() return dataset
def create_with_items(self, db: Session, transaction: TransactionCreate) -> Transaction: # Adding transaction db_trx = Transaction( type=TransactionTypes(transaction.type), description=transaction.description, amount=transaction.amount, user_id=transaction.user_id, ) db.add(db_trx) db.commit() db.refresh(db_trx) # Adding items for item in transaction.items: obj_in_data = jsonable_encoder(item) db_item = TransactionItem(**obj_in_data) db_item.transaction_id = db_trx.id db_item.product_id = db_item.product_id db.add(db_item) db.commit() db.refresh(db_trx) return db_trx
def main() -> None: # for each spot # fetch the current swell data # make a prediction for onshore swell height # record prediction from surfline and ml model session = Session() try: created_on = datetime.utcnow() spots = session.query(Spot).all() requests_session = requests.Session() access_token = login(requests_session) for spot in spots: spot_id = spot.id surfline_spot_id = spot.surfline_spot_id forecasts = fetch_forecasts(requests_session, surfline_spot_id, access_token) swells = fetch_swells(requests_session, surfline_spot_id, access_token) predictions = fetch_predictions(swells) for i in range(0, (FORECAST_DAYS - 1)): forecast = forecasts[i] swell = swells[i] prediction = predictions[i] forecasted_for = created_on + timedelta(days=(i + 1)) prediction = Prediction( spot_id=spot_id, created_on=created_on, forecasted_for=forecasted_for, surfline_height=humanized_height_round( average_forecast_height(forecast)), stoke_height=humanized_height_round(prediction), swell1_height=swell['swells'][0]['height'], swell1_period=swell['swells'][0]['period'], swell1_direction=swell['swells'][0]['direction'], swell2_height=swell['swells'][1]['height'], swell2_period=swell['swells'][1]['period'], swell2_direction=swell['swells'][1]['direction'], swell3_height=swell['swells'][2]['height'], swell3_period=swell['swells'][2]['period'], swell3_direction=swell['swells'][2]['direction'], swell4_height=swell['swells'][3]['height'], swell4_period=swell['swells'][3]['period'], swell4_direction=swell['swells'][3]['direction'], swell5_height=swell['swells'][4]['height'], swell5_period=swell['swells'][4]['period'], swell5_direction=swell['swells'][4]['direction'], swell6_height=swell['swells'][5]['height'], swell6_period=swell['swells'][5]['period'], swell6_direction=swell['swells'][5]['direction']) session.add(prediction) session.commit() finally: session.close()