def post_ensemble(*, db: Session = Depends(get_db), ens_in: js.EnsembleIn, experiment_id: UUID) -> ds.Ensemble: experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one() active_reals = (ens_in.active_realizations if ens_in.active_realizations else list(range(ens_in.size))) if ens_in.size > 0: if max(active_reals) > ens_in.size - 1: raise exc.ExpectationError( f"Ensemble active realization index {max(active_reals)} out of realization range [0,{ ens_in.size - 1}]" ) if len(set(active_reals)) != len(active_reals): raise exc.ExpectationError( f"Non unique active realization index list not allowed {active_reals}" ) ens = ds.Ensemble( parameter_names=ens_in.parameter_names, response_names=ens_in.response_names, experiment=experiment, size=ens_in.size, userdata=ens_in.userdata, active_realizations=active_reals, ) db.add(ens) if ens_in.update_id: update_obj = db.query(ds.Update).filter_by(id=ens_in.update_id).one() update_obj.ensemble_result = ens db.commit() return ens
async def post_record_observations( *, db: Session = Depends(get_db), ensemble_id: UUID, name: str, realization_index: Optional[int] = None, observation_ids: List[UUID] = Body(...), ) -> None: if realization_index is None: record_obj = _get_ensemble_record(db, ensemble_id, name) else: record_obj = _get_forward_model_record(db, ensemble_id, name, realization_index) observations = (db.query(ds.Observation).filter( ds.Observation.id.in_(observation_ids)).all()) if observations: record_obj.observations = observations flag_modified(record_obj, "observations") db.commit() else: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail={ "error": f"Observations {observation_ids} not found!", "name": name, "ensemble_id": str(ensemble_id), }, )
async def replace_record_userdata( *, db: Session = Depends(get_db), record: ds.Record = Depends(get_record_by_name), body: Any = Body(...), ) -> None: """ Assign new userdata json """ record.userdata = body db.commit()
async def replace_ensemble_userdata( *, db: Session = Depends(get_db), ensemble_id: UUID, body: Any = Body(...), ) -> None: """ Assign new userdata json """ ensemble = db.query(ds.Ensemble).filter_by(id=ensemble_id).one() ensemble.userdata = body db.commit()
async def replace_experiment_userdata( *, db: Session = Depends(get_db), experiment_id: UUID, body: Any = Body(...), ) -> None: """ Assign new userdata json """ experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one() experiment.userdata = body db.commit()
async def patch_record_userdata( *, db: Session = Depends(get_db), record: ds.Record = Depends(get_record_by_name), body: Any = Body(...), ) -> None: """ Update userdata json """ record.userdata.update(body) flag_modified(record, "userdata") db.commit()
async def replace_observation_userdata( *, db: Session = Depends(get_db), obs_id: UUID, body: Any = Body(...), ) -> None: """ Assign new userdata json """ obs = db.query(ds.Observation).filter_by(id=obs_id).one() obs.userdata = body db.commit()
async def patch_observation_userdata( *, db: Session = Depends(get_db), obs_id: UUID, body: Any = Body(...), ) -> None: """ Update userdata json """ obs = db.query(ds.Observation).filter_by(id=obs_id).one() obs.userdata.update(body) flag_modified(obs, "userdata") db.commit()
async def patch_ensemble_userdata( *, db: Session = Depends(get_db), ensemble_id: UUID, body: Any = Body(...), ) -> None: """ Update userdata json """ ensemble = db.query(ds.Ensemble).filter_by(id=ensemble_id).one() ensemble.userdata.update(body) flag_modified(ensemble, "userdata") db.commit()
async def patch_experiment_userdata( *, db: Session = Depends(get_db), experiment_id: UUID, body: Any = Body(...), ) -> None: """ Update userdata json """ experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one() experiment.userdata.update(body) flag_modified(experiment, "userdata") db.commit()
async def post_record_observations( *, db: Session = Depends(get_db), record: ds.Record = Depends(get_record_by_name), observation_ids: List[UUID] = Body(...), ) -> None: observations = ( db.query(ds.Observation).filter(ds.Observation.id.in_(observation_ids)).all() ) if observations: record.observations = observations db.commit() else: raise exc.UnprocessableError(f"Observations {observation_ids} not found!")
async def replace_record_metadata( *, db: Session = Depends(get_db), ensemble_id: UUID, name: str, realization_index: Optional[int] = None, body: Any = Body(...), ) -> None: """ Assign new metadata json """ if realization_index is None: record_obj = _get_ensemble_record(db, ensemble_id, name) else: record_obj = _get_forward_model_record(db, ensemble_id, name, realization_index) record_obj._metadata = body db.commit()
def _create_record( db: Session, record: ds.Record, ) -> ds.Record: nested = db.begin_nested() try: db.add(record) db.commit() except IntegrityError: # Assuming this is a UNIQUE constraint failure due to an existing # record_info with the same name and ensemble. Try to fetch the # record_info nested.rollback() record_info = record.record_info old_record_info = ( db.query(ds.RecordInfo) .filter_by(ensemble=record_info.ensemble, name=record_info.name) .one() ) # Check that the parameters match if record_info.record_class != old_record_info.record_class: raise exc.ConflictError( "Record class of new record does not match previous record class", new_record_class=record_info.record_class, old_record_class=old_record_info.record_class, ) if record_info.record_type != old_record_info.record_type: raise exc.ConflictError( "Record type of new record does not match previous record type", new_record_type=record_info.record_type, old_record_type=old_record_info.record_type, ) record = ds.Record( record_info=old_record_info, f64_matrix=record.f64_matrix, file=record.file, realization_index=record.realization_index, ) db.add(record) db.commit() return record
def create_update( *, db: Session = Depends(get_db), update: js.UpdateIn, ) -> js.UpdateOut: ensemble = db.query( ds.Ensemble).filter_by(id=update.ensemble_reference_id).one() update_obj = ds.Update( algorithm=update.algorithm, ensemble_reference_pk=ensemble.pk, ) db.add(update_obj) if update.observation_transformations: transformations = { t.name: t for t in update.observation_transformations } observation_ids = [t.observation_id for t in transformations.values()] observations = (db.query(ds.Observation).filter( ds.Observation.id.in_(observation_ids)).all()) observation_transformations = [ ds.ObservationTransformation( active_list=transformations[observation.name].active, scale_list=transformations[observation.name].scale, observation=observation, update=update_obj, ) for observation in observations ] db.add_all(observation_transformations) db.commit() return js.UpdateOut( id=update_obj.id, experiment_id=ensemble.experiment.id, algorithm=update_obj.algorithm, ensemble_reference_id=ensemble.id, )
async def patch_record_metadata( *, db: Session = Depends(get_db), ensemble_id: UUID, name: str, realization_index: Optional[int] = None, body: Any = Body(...), ) -> None: """ Update metadata json """ if realization_index is None: record_obj = _get_ensemble_record(db, ensemble_id, name) else: record_obj = _get_forward_model_record(db, ensemble_id, name, realization_index) record_obj._metadata.update(body) flag_modified(record_obj, "_metadata") db.commit()
def post_ensemble(*, db: Session = Depends(get_db), ens_in: js.EnsembleIn, experiment_id: UUID) -> ds.Ensemble: experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one() ens = ds.Ensemble( parameter_names=ens_in.parameter_names, response_names=ens_in.response_names, experiment=experiment, size=ens_in.size, _metadata=ens_in.metadata, ) db.add(ens) if ens_in.update_id: update_obj = db.query(ds.Update).filter_by(id=ens_in.update_id).one() update_obj.ensemble_result = ens db.commit() return ens
def post_observation(*, db: Session = Depends(get_db), obs_in: js.ObservationIn, experiment_id: UUID) -> js.ObservationOut: experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one() records = ([ db.query(ds.Record).filter_by(id=rec_id).one() for rec_id in obs_in.records ] if obs_in.records is not None else []) obs = ds.Observation( name=obs_in.name, x_axis=obs_in.x_axis, errors=obs_in.errors, values=obs_in.values, experiment=experiment, records=records, ) db.add(obs) db.commit() return _observation_from_db(obs)
def post_experiments( *, db: Session = Depends(get_db), ens_in: js.ExperimentIn, ) -> js.ExperimentOut: experiment = ds.Experiment(name=ens_in.name) if ens_in.priors: db.add_all( ds.Prior( function=ds.PriorFunction.__members__[prior.function], experiment=experiment, name=name, argument_names=[ x[0] for x in prior if isinstance(x[1], (float, int)) ], argument_values=[ x[1] for x in prior if isinstance(x[1], (float, int)) ], ) for name, prior in ens_in.priors.items()) db.add(experiment) db.commit() return _experiment_from_db(experiment)
async def add_block( *, db: Session = Depends(get_db), ensemble_id: UUID, name: str, realization_index: Optional[int] = None, request: Request, block_index: int, ) -> None: """ Stage blocks to an existing azure blob record. """ ensemble = db.query(ds.Ensemble).filter_by(id=ensemble_id).one() block_id = str(uuid4()) file_block_obj = ds.FileBlock( ensemble=ensemble, block_id=block_id, block_index=block_index, record_name=name, realization_index=realization_index, ) record_obj = (db.query( ds.Record).filter_by(realization_index=realization_index).join( ds.RecordInfo).filter_by(ensemble_pk=ensemble.pk, name=name).one()) if HAS_AZURE_BLOB_STORAGE: key = record_obj.file.az_blob blob = azure_blob_container.get_blob_client(key) await blob.stage_block(block_id, await request.body()) else: file_block_obj.content = await request.body() db.add(file_block_obj) db.commit()
def delete_experiment(*, db: Session = Depends(get_db), experiment_id: UUID) -> None: experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one() db.delete(experiment) db.commit()
def _create_record( db: Session, ensemble: ds.Ensemble, name: str, record_type: ds.RecordType, record_class: ds.RecordClass = ds.RecordClass.other, prior: Optional[ds.Prior] = None, **kwargs: Any, ) -> ds.Record: record_info = ds.RecordInfo( ensemble=ensemble, name=name, record_class=record_class, record_type=record_type, prior=prior, ) record = ds.Record(record_info=record_info, **kwargs) nested = db.begin_nested() try: db.add(record) db.commit() except IntegrityError: # Assuming this is a UNIQUE constraint failure due to an existing # record_info with the same name and ensemble. Try to fetch the # record_info nested.rollback() old_record_info = (db.query(ds.RecordInfo).filter_by(ensemble=ensemble, name=name).one()) # Check that the parameters match if record_info.record_class != old_record_info.record_class: raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail={ "error": "Record class of new record does not match previous record class", "new_record_class": str(record_info.record_class), "old_record_class": str(old_record_info.record_class), "name": name, "ensemble_id": str(ensemble.id), }, ) if record_info.record_type != old_record_info.record_type: raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail={ "error": "Record type of new record does not match previous record type", "new_record_type": str(record_info.record_type), "old_record_type": str(old_record_info.record_type), "name": name, "ensemble_id": str(ensemble.id), }, ) record = ds.Record(record_info=old_record_info, **kwargs) db.add(record) db.commit() return record