Ejemplo n.º 1
0
def post_ensemble(*,
                  db: Session = Depends(get_db),
                  ens_in: js.EnsembleIn,
                  experiment_id: UUID) -> ds.Ensemble:

    experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one()
    active_reals = (ens_in.active_realizations if ens_in.active_realizations
                    else list(range(ens_in.size)))

    if ens_in.size > 0:
        if max(active_reals) > ens_in.size - 1:
            raise exc.ExpectationError(
                f"Ensemble active realization index {max(active_reals)} out of realization range [0,{ ens_in.size - 1}]"
            )
        if len(set(active_reals)) != len(active_reals):
            raise exc.ExpectationError(
                f"Non unique active realization index list not allowed {active_reals}"
            )

    ens = ds.Ensemble(
        parameter_names=ens_in.parameter_names,
        response_names=ens_in.response_names,
        experiment=experiment,
        size=ens_in.size,
        userdata=ens_in.userdata,
        active_realizations=active_reals,
    )
    db.add(ens)

    if ens_in.update_id:
        update_obj = db.query(ds.Update).filter_by(id=ens_in.update_id).one()
        update_obj.ensemble_result = ens
    db.commit()

    return ens
Ejemplo n.º 2
0
async def create_blob(
    *,
    db: Session = Depends(get_db),
    ensemble_id: UUID,
    name: str,
    realization_index: Optional[int] = None,
) -> None:
    """
    Create a record which points to a blob on Azure Blob Storage.
    """

    ensemble = db.query(ds.Ensemble).filter_by(id=ensemble_id).one()
    file_obj = ds.File(
        filename="test",
        mimetype="mime/type",
    )
    if HAS_AZURE_BLOB_STORAGE:
        key = f"{name}@{realization_index}@{uuid4()}"
        blob = azure_blob_container.get_blob_client(key)
        file_obj.az_container = (azure_blob_container.container_name, )
        file_obj.az_blob = (key, )
    else:
        pass

    db.add(file_obj)

    _create_record(
        db,
        ensemble,
        name,
        ds.RecordType.file,
        realization_index=realization_index,
        file=file_obj,
    )
Ejemplo n.º 3
0
async def add_block(
    *,
    db: Session = Depends(get_db),
    bh: BlobHandler = Depends(get_blob_handler),
    record: ds.Record = Depends(get_record_by_name),
    request: Request,
    block_index: int,
) -> None:
    """
    Stage blocks to an existing azure blob record.
    """
    db.add(await bh.stage_blob(record, request, block_index))
Ejemplo n.º 4
0
def _create_record(
    db: Session,
    record: ds.Record,
) -> ds.Record:
    nested = db.begin_nested()
    try:
        db.add(record)
        db.commit()
    except IntegrityError:
        # Assuming this is a UNIQUE constraint failure due to an existing
        # record_info with the same name and ensemble. Try to fetch the
        # record_info
        nested.rollback()
        record_info = record.record_info
        old_record_info = (
            db.query(ds.RecordInfo)
            .filter_by(ensemble=record_info.ensemble, name=record_info.name)
            .one()
        )

        # Check that the parameters match
        if record_info.record_class != old_record_info.record_class:
            raise exc.ConflictError(
                "Record class of new record does not match previous record class",
                new_record_class=record_info.record_class,
                old_record_class=old_record_info.record_class,
            )
        if record_info.record_type != old_record_info.record_type:
            raise exc.ConflictError(
                "Record type of new record does not match previous record type",
                new_record_type=record_info.record_type,
                old_record_type=old_record_info.record_type,
            )

        record = ds.Record(
            record_info=old_record_info,
            f64_matrix=record.f64_matrix,
            file=record.file,
            realization_index=record.realization_index,
        )
        db.add(record)
        db.commit()

    return record
Ejemplo n.º 5
0
def create_update(
        *,
        db: Session = Depends(get_db),
        update: js.UpdateIn,
) -> js.UpdateOut:

    ensemble = db.query(
        ds.Ensemble).filter_by(id=update.ensemble_reference_id).one()
    update_obj = ds.Update(
        algorithm=update.algorithm,
        ensemble_reference_pk=ensemble.pk,
    )
    db.add(update_obj)

    if update.observation_transformations:
        transformations = {
            t.name: t
            for t in update.observation_transformations
        }

        observation_ids = [t.observation_id for t in transformations.values()]
        observations = (db.query(ds.Observation).filter(
            ds.Observation.id.in_(observation_ids)).all())

        observation_transformations = [
            ds.ObservationTransformation(
                active_list=transformations[observation.name].active,
                scale_list=transformations[observation.name].scale,
                observation=observation,
                update=update_obj,
            ) for observation in observations
        ]

        db.add_all(observation_transformations)

    db.commit()
    return js.UpdateOut(
        id=update_obj.id,
        experiment_id=ensemble.experiment.id,
        algorithm=update_obj.algorithm,
        ensemble_reference_id=ensemble.id,
    )
Ejemplo n.º 6
0
def post_ensemble(*,
                  db: Session = Depends(get_db),
                  ens_in: js.EnsembleIn,
                  experiment_id: UUID) -> ds.Ensemble:

    experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one()
    ens = ds.Ensemble(
        parameter_names=ens_in.parameter_names,
        response_names=ens_in.response_names,
        experiment=experiment,
        size=ens_in.size,
        _metadata=ens_in.metadata,
    )
    db.add(ens)

    if ens_in.update_id:
        update_obj = db.query(ds.Update).filter_by(id=ens_in.update_id).one()
        update_obj.ensemble_result = ens
    db.commit()

    return ens
Ejemplo n.º 7
0
def post_observation(*,
                     db: Session = Depends(get_db),
                     obs_in: js.ObservationIn,
                     experiment_id: UUID) -> js.ObservationOut:
    experiment = db.query(ds.Experiment).filter_by(id=experiment_id).one()
    records = ([
        db.query(ds.Record).filter_by(id=rec_id).one()
        for rec_id in obs_in.records
    ] if obs_in.records is not None else [])
    obs = ds.Observation(
        name=obs_in.name,
        x_axis=obs_in.x_axis,
        errors=obs_in.errors,
        values=obs_in.values,
        experiment=experiment,
        records=records,
    )

    db.add(obs)
    db.commit()

    return _observation_from_db(obs)
Ejemplo n.º 8
0
async def post_ensemble_record_file(
        *,
        db: Session = Depends(get_db),
        ensemble_id: UUID,
        name: str,
        realization_index: Optional[int] = None,
        file: UploadFile = File(...),
) -> None:
    """
    Assign an arbitrary file to the given `name` record.
    """
    ensemble = _get_and_assert_ensemble(db, ensemble_id, name,
                                        realization_index)

    file_obj = ds.File(
        filename=file.filename,
        mimetype=file.content_type,
    )
    if HAS_AZURE_BLOB_STORAGE:
        key = f"{name}@{realization_index}@{uuid4()}"
        blob = azure_blob_container.get_blob_client(key)
        await blob.upload_blob(file.file)

        file_obj.az_container = azure_blob_container.container_name
        file_obj.az_blob = key
    else:
        file_obj.content = await file.read()

    db.add(file_obj)
    _create_record(
        db,
        ensemble,
        name,
        ds.RecordType.file,
        realization_index=realization_index,
        file=file_obj,
    )
Ejemplo n.º 9
0
def post_experiments(
    *,
    db: Session = Depends(get_db),
    ens_in: js.ExperimentIn,
) -> js.ExperimentOut:
    experiment = ds.Experiment(name=ens_in.name)

    if ens_in.priors:
        db.add_all(
            ds.Prior(
                function=ds.PriorFunction.__members__[prior.function],
                experiment=experiment,
                name=name,
                argument_names=[
                    x[0] for x in prior if isinstance(x[1], (float, int))
                ],
                argument_values=[
                    x[1] for x in prior if isinstance(x[1], (float, int))
                ],
            ) for name, prior in ens_in.priors.items())

    db.add(experiment)
    db.commit()
    return _experiment_from_db(experiment)
Ejemplo n.º 10
0
async def add_block(
    *,
    db: Session = Depends(get_db),
    ensemble_id: UUID,
    name: str,
    realization_index: Optional[int] = None,
    request: Request,
    block_index: int,
) -> None:
    """
    Stage blocks to an existing azure blob record.
    """

    ensemble = db.query(ds.Ensemble).filter_by(id=ensemble_id).one()
    block_id = str(uuid4())

    file_block_obj = ds.FileBlock(
        ensemble=ensemble,
        block_id=block_id,
        block_index=block_index,
        record_name=name,
        realization_index=realization_index,
    )

    record_obj = (db.query(
        ds.Record).filter_by(realization_index=realization_index).join(
            ds.RecordInfo).filter_by(ensemble_pk=ensemble.pk, name=name).one())
    if HAS_AZURE_BLOB_STORAGE:
        key = record_obj.file.az_blob
        blob = azure_blob_container.get_blob_client(key)
        await blob.stage_block(block_id, await request.body())
    else:
        file_block_obj.content = await request.body()

    db.add(file_block_obj)
    db.commit()
Ejemplo n.º 11
0
def _create_record(
    db: Session,
    ensemble: ds.Ensemble,
    name: str,
    record_type: ds.RecordType,
    record_class: ds.RecordClass = ds.RecordClass.other,
    prior: Optional[ds.Prior] = None,
    **kwargs: Any,
) -> ds.Record:
    record_info = ds.RecordInfo(
        ensemble=ensemble,
        name=name,
        record_class=record_class,
        record_type=record_type,
        prior=prior,
    )
    record = ds.Record(record_info=record_info, **kwargs)

    nested = db.begin_nested()
    try:
        db.add(record)
        db.commit()
    except IntegrityError:
        # Assuming this is a UNIQUE constraint failure due to an existing
        # record_info with the same name and ensemble. Try to fetch the
        # record_info
        nested.rollback()
        old_record_info = (db.query(ds.RecordInfo).filter_by(ensemble=ensemble,
                                                             name=name).one())

        # Check that the parameters match
        if record_info.record_class != old_record_info.record_class:
            raise HTTPException(
                status_code=status.HTTP_409_CONFLICT,
                detail={
                    "error":
                    "Record class of new record does not match previous record class",
                    "new_record_class": str(record_info.record_class),
                    "old_record_class": str(old_record_info.record_class),
                    "name": name,
                    "ensemble_id": str(ensemble.id),
                },
            )
        if record_info.record_type != old_record_info.record_type:
            raise HTTPException(
                status_code=status.HTTP_409_CONFLICT,
                detail={
                    "error":
                    "Record type of new record does not match previous record type",
                    "new_record_type": str(record_info.record_type),
                    "old_record_type": str(old_record_info.record_type),
                    "name": name,
                    "ensemble_id": str(ensemble.id),
                },
            )

        record = ds.Record(record_info=old_record_info, **kwargs)
        db.add(record)
        db.commit()

    return record
Ejemplo n.º 12
0
async def post_ensemble_record_matrix(
    *,
    db: Session = Depends(get_db),
    ensemble_id: UUID,
    name: str,
    realization_index: Optional[int] = None,
    content_type: str = Header("application/json"),
    prior_id: Optional[UUID] = None,
    request: Request,
) -> js.RecordOut:
    """
    Assign an n-dimensional float matrix, encoded in JSON, to the given `name` record.
    """
    if content_type == "application/x-dataframe":
        logger.warning(
            "Content-Type with 'application/x-dataframe' is deprecated. Use 'text/csv' instead."
        )
        content_type = "text/csv"

    ensemble = _get_and_assert_ensemble(db, ensemble_id, name,
                                        realization_index)
    is_parameter = name in ensemble.parameter_names
    is_response = name in ensemble.response_names
    if prior_id is not None and not is_parameter:
        raise HTTPException(
            status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
            detail={
                "error": "Priors can only be specified for parameter records",
                "name": name,
                "ensemble_id": str(ensemble_id),
                "realization_index": realization_index,
                "prior_id": str(prior_id),
            },
        )

    labels = None
    prior = ((db.query(ds.Prior).filter_by(
        id=prior_id, experiment_pk=ensemble.experiment_pk).one())
             if prior_id else None)

    try:
        if content_type == "application/json":
            content = np.array(await request.json(), dtype=np.float64)
        elif content_type == "application/x-numpy":
            from numpy.lib.format import read_array

            stream = io.BytesIO(await request.body())
            content = read_array(stream)
        elif content_type == "text/csv":
            stream = io.BytesIO(await request.body())
            df = pd.read_csv(stream, index_col=0, float_precision="round_trip")
            content = df.values
            labels = [
                [str(v) for v in df.columns.values],
                [str(v) for v in df.index.values],
            ]
        else:
            raise ValueError()
    except ValueError:
        if realization_index is None:
            message = f"Ensemble-wide record '{name}' for ensemble '{ensemble_id}' needs to be a matrix"
        else:
            message = f"Forward-model record '{name}' for ensemble '{ensemble_id}', realization {realization_index} needs to be a matrix"

        raise HTTPException(
            status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
            detail={
                "error": message,
                "name": name,
                "ensemble_id": str(ensemble_id),
                "realization_index": realization_index,
            },
        )

    # Require that the dimensionality of an ensemble-wide parameter matrix is at least 2
    if realization_index is None and is_parameter:
        if content.ndim <= 1:
            raise HTTPException(
                status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
                detail={
                    "error":
                    "Ensemble-wide parameter record '{name}' for ensemble '{ensemble_id}'"
                    "must have dimensionality of at least 2",
                    "name":
                    name,
                    "ensemble_id":
                    str(ensemble_id),
                    "realization_index":
                    realization_index,
                },
            )

    matrix_obj = ds.F64Matrix(content=content.tolist(), labels=labels)
    db.add(matrix_obj)

    record_class = ds.RecordClass.other
    if is_parameter:
        record_class = ds.RecordClass.parameter
    if is_response:
        record_class = ds.RecordClass.response

    return _create_record(
        db,
        ensemble,
        name,
        ds.RecordType.f64_matrix,
        record_class,
        prior,
        f64_matrix=matrix_obj,
        realization_index=realization_index,
    )