Beispiel #1
0
    def submit(request):
        """Submit a follow-up request to SEDMv2.

        Parameters
        ----------
        request: skyportal.models.FollowupRequest
            The request to submit.
        """

        from ..models import FacilityTransaction, DBSession

        validate_request_to_sedmv2(request)

        if cfg['app.sedmv2_endpoint'] is not None:
            altdata = request.allocation.altdata

            if not altdata:
                raise ValueError('Missing allocation information.')

            payload = {
                'obj_id': request.obj_id,
                'allocation_id': request.allocation.id,
                'payload': request.payload,
            }

            r = requests.post(
                cfg['app.sedmv2_endpoint'],
                json=payload,
                headers={"Authorization": f"token {altdata['api_token']}"},
            )

            if r.status_code == 200:
                request.status = 'submitted'
            else:
                request.status = f'rejected: {r.content}'

            transaction = FacilityTransaction(
                request=http.serialize_requests_request(r.request),
                response=http.serialize_requests_response(r),
                followup_request=request,
                initiator_id=request.last_modified_by_id,
            )
        else:
            request.status = 'submitted'

            transaction = FacilityTransaction(
                request=None,
                response=None,
                followup_request=request,
                initiator_id=request.last_modified_by_id,
            )

        DBSession().add(transaction)

        flow = Flow()
        flow.push(
            '*',
            'skyportal/REFRESH_SOURCE',
            payload={'obj_key': request.obj.internal_key},
        )
Beispiel #2
0
def post_observing_run(data, user_id, session):
    """Post ObservingRun to database.
    data: dict
        Observing run dictionary
    user_id : int
        SkyPortal ID of User posting the GcnEvent
    session: sqlalchemy.Session
        Database session for this transaction
    """

    user = session.query(User).get(user_id)

    try:
        rund = ObservingRunPost.load(data)
    except ValidationError as exc:
        raise ValidationError(
            f"Invalid/missing parameters: {exc.normalized_messages()}"
        )

    run = ObservingRun(**rund)
    run.owner_id = user.id

    session.add(run)
    session.commit()

    flow = Flow()
    flow.push('*', "skyportal/FETCH_OBSERVING_RUNS")

    return run.id
Beispiel #3
0
    def submit(request):
        """Submit a follow-up request to SEDM.

        Parameters
        ----------
        request: skyportal.models.FollowupRequest
            The request to submit.
        """

        from ..models import FacilityTransaction, DBSession

        payload = convert_request_to_sedm(request, method_value='new')
        content = json.dumps(payload)
        r = requests.post(
            cfg['app.sedm_endpoint'],
            files={'jsonfile': ('jsonfile', content)},
        )

        if r.status_code == 200:
            request.status = 'submitted'
        else:
            request.status = f'rejected: {r.content}'

        transaction = FacilityTransaction(
            request=http.serialize_requests_request(r.request),
            response=http.serialize_requests_response(r),
            followup_request=request,
            initiator_id=request.last_modified_by_id,
        )

        DBSession().add(transaction)

        flow = Flow()
        flow.push(
            '*',
            'skyportal/REFRESH_SOURCE',
            payload={'obj_key': request.obj.internal_key},
        )
Beispiel #4
0
    def update(request):
        """Update a request in the SEDM queue.

        Parameters
        ----------
        request: skyportal.models.FollowupRequest
            The updated request.
        """

        from ..models import FacilityTransaction, DBSession

        payload = convert_request_to_sedm(request, method_value='edit')
        content = json.dumps(payload)
        r = requests.post(
            cfg['app.sedm_endpoint'],
            files={'jsonfile': ('jsonfile', content)},
        )

        if r.status_code == 200:
            request.status = 'submitted'
        else:
            request.status = f'rejected: {r.content}'

        transaction = FacilityTransaction(
            request=http.serialize_requests_request(r.request),
            response=http.serialize_requests_response(r),
            followup_request=request,
            initiator_id=request.last_modified_by_id,
        )

        DBSession().add(transaction)

        flow = Flow()
        flow.push(
            '*',
            "skyportal/REFRESH_FOLLOWUP_REQUESTS",
            payload={"obj_key": request.obj.internal_key},
        )
 def receive_after_flush(session, context):
     listing_subquery = (Listing.query.filter(
         Listing.list_name == "favorites").filter(
             Listing.obj_id == target.obj_id).distinct(
                 Listing.user_id).subquery())
     users = (User.query.join(
         listing_subquery, User.id == listing_subquery.c.user_id).filter(
             User.preferences["favorite_sources_activity_notifications"][
                 target.__tablename__].astext.cast(
                     sa.Boolean).is_(True)).all())
     ws_flow = Flow()
     for user in users:
         # Only notify users who have read access to the new record in question
         if target.__class__.get_if_accessible_by(target.id,
                                                  user) is not None:
             session.add(
                 UserNotification(
                     user=user,
                     text=
                     f"New {target.__class__.__name__.lower()} on your favorite source *{target.obj_id}*",
                     url=f"/source/{target.obj_id}",
                 ))
             ws_flow.push(user.id, "skyportal/FETCH_NOTIFICATIONS")
Beispiel #6
0
    def delete(request):
        """Delete a follow-up request from SEDM queue.

        Parameters
        ----------
        request: skyportal.models.FollowupRequest
            The request to delete from the queue and the SkyPortal database.
        """

        from ..models import FacilityTransaction, DBSession

        payload = convert_request_to_sedm(request, method_value='delete')
        content = json.dumps(payload)
        r = requests.post(
            cfg['app.sedm_endpoint'],
            files={'jsonfile': ('jsonfile', content)},
        )

        r.raise_for_status()
        request.status = "deleted"

        transaction = FacilityTransaction(
            request=http.serialize_requests_request(r.request),
            response=http.serialize_requests_response(r),
            followup_request=request,
            initiator_id=request.last_modified_by_id,
        )

        DBSession().add(transaction)

        flow = Flow()
        flow.push(
            '*',
            'skyportal/REFRESH_SOURCE',
            payload={'obj_key': request.obj.internal_key},
        )
Beispiel #7
0
def post_assignment(data, user_id, session):
    """Post assignment to database.
    data: dict
        Assignment dictionary
    user_id : int
        SkyPortal ID of User posting the GcnEvent
    session: sqlalchemy.Session
        Database session for this transaction
    """

    user = session.query(User).get(user_id)

    try:
        assignment = ClassicalAssignment(**AssignmentSchema.load(data=data))
    except ValidationError as e:
        raise ValidationError('Error parsing followup request: '
                              f'"{e.normalized_messages()}"')

    run_id = assignment.run_id
    data['priority'] = assignment.priority.name
    run = ObservingRun.get_if_accessible_by(run_id, user, raise_if_none=False)
    if run is None:
        raise ValueError('Observing run is not accessible.')

    predecessor = (
        ClassicalAssignment.query_records_accessible_by(user).filter(
            ClassicalAssignment.obj_id == assignment.obj_id,
            ClassicalAssignment.run_id == run_id,
        ).first())

    if predecessor is not None:
        raise ValueError('Object is already assigned to this run.')

    assignment = ClassicalAssignment(**data)

    assignment.requester_id = user.id
    session.add(assignment)
    session.commit()

    flow = Flow()
    flow.push(
        '*',
        "skyportal/REFRESH_SOURCE",
        payload={"obj_key": assignment.obj.internal_key},
    )
    flow.push(
        '*',
        "skyportal/REFRESH_OBSERVING_RUN",
        payload={"run_id": assignment.run_id},
    )
    return assignment.id
Beispiel #8
0
def add_linked_thumbnails_and_push_ws_msg(obj_id, user):
    try:
        obj = Obj.get_if_accessible_by(obj_id, user)
        obj.add_linked_thumbnails()
        flow = Flow()
        flow.push('*',
                  "skyportal/REFRESH_SOURCE",
                  payload={"obj_key": obj.internal_key})
        flow.push('*',
                  "skyportal/REFRESH_CANDIDATE",
                  payload={"id": obj.internal_key})
    except Exception as e:
        log(f"Unable to add linked thumbnails to {obj_id}: {e}")
    finally:
        DBSession.remove()
Beispiel #9
0
def add_linked_thumbnails_and_push_ws_msg(obj_id, user_id):
    with Session() as session:
        try:
            user = session.query(User).get(user_id)
            if Obj.get_if_accessible_by(obj_id, user) is None:
                raise AccessError(
                    f"Insufficient permissions for User {user_id} to read Obj {obj_id}"
                )
            obj = session.query(Obj).get(obj_id)
            obj.add_linked_thumbnails(session=session)
            flow = Flow()
            flow.push(
                '*', "skyportal/REFRESH_SOURCE", payload={"obj_key": obj.internal_key}
            )
            flow.push(
                '*', "skyportal/REFRESH_CANDIDATE", payload={"id": obj.internal_key}
            )
        except Exception as e:
            log(f"Unable to add linked thumbnails to {obj_id}: {e}")
            session.rollback()
Beispiel #10
0
    def submit(request):
        """Generate an observation plan.

        Parameters
        ----------
        request: skyportal.models.ObservationPlanRequest
            The request to generate the observation plan.
        """

        from tornado.ioloop import IOLoop
        from ..models import DBSession, EventObservationPlan

        plan = EventObservationPlan.query.filter_by(
            plan_name=request.payload["queue_name"]).first()
        if plan is None:

            # check payload
            required_parameters = {
                'start_date',
                'end_date',
                'schedule_type',
                'schedule_strategy',
                'filter_strategy',
                'exposure_time',
                'filters',
                'maximum_airmass',
                'integrated_probability',
                'minimum_time_difference',
            }

            if not required_parameters.issubset(set(request.payload.keys())):
                raise ValueError('Missing required planning parameter')

            if request.payload["schedule_type"] not in [
                    "greedy",
                    "greedy_slew",
                    "sear",
                    "airmass_weighted",
            ]:
                raise ValueError(
                    'schedule_type must be one of greedy, greedy_slew, sear, or airmass_weighted'
                )

            if (request.payload["integrated_probability"] < 0
                    or request.payload["integrated_probability"] > 100):
                raise ValueError(
                    'integrated_probability must be between 0 and 100')

            if request.payload["filter_strategy"] not in [
                    "block", "integrated"
            ]:
                raise ValueError(
                    'filter_strategy must be either block or integrated')

            start_time = Time(request.payload["start_date"],
                              format='iso',
                              scale='utc')
            end_time = Time(request.payload["end_date"],
                            format='iso',
                            scale='utc')

            plan = EventObservationPlan(
                observation_plan_request_id=request.id,
                dateobs=request.gcnevent.dateobs,
                plan_name=request.payload['queue_name'],
                instrument_id=request.instrument.id,
                validity_window_start=start_time.datetime,
                validity_window_end=end_time.datetime,
            )

            DBSession().add(plan)
            DBSession().commit()

            request.status = 'running'
            DBSession().merge(request)
            DBSession().commit()

            flow = Flow()
            flow.push(
                '*',
                "skyportal/REFRESH_GCNEVENT",
                payload={"gcnEvent_dateobs": request.gcnevent.dateobs},
            )

            log(f"Generating schedule for observation plan {plan.id}")
            IOLoop.current().run_in_executor(
                None, lambda: generate_plan(plan.id, request.id, request.
                                            requester.id))
        else:
            raise ValueError(
                f'plan_name {request.payload["queue_name"]} already exists.')
Beispiel #11
0
def generate_plan(observation_plan_id, request_id, user_id):
    """Use gwemopt to construct observing plan."""

    from ..models import DBSession
    from skyportal.handlers.api.instrument import add_tiles

    Session = scoped_session(
        sessionmaker(bind=DBSession.session_factory.kw["bind"]))

    import gwemopt
    import gwemopt.utils
    import gwemopt.segments
    import gwemopt.skyportal

    from ..models import (
        EventObservationPlan,
        Galaxy,
        InstrumentField,
        ObservationPlanRequest,
        PlannedObservation,
        User,
    )

    session = Session()
    try:
        plan = session.query(EventObservationPlan).get(observation_plan_id)
        request = session.query(ObservationPlanRequest).get(request_id)
        user = session.query(User).get(user_id)

        event_time = Time(request.gcnevent.dateobs,
                          format='datetime',
                          scale='utc')
        start_time = Time(request.payload["start_date"],
                          format='iso',
                          scale='utc')
        end_time = Time(request.payload["end_date"], format='iso', scale='utc')

        params = {
            'config': {
                request.instrument.name: {
                    # field list from skyportal
                    'tesselation': request.instrument.fields,
                    # telescope longitude [deg]
                    'longitude': request.instrument.telescope.lon,
                    # telescope latitude [deg]
                    'latitude': request.instrument.telescope.lat,
                    # telescope elevation [m]
                    'elevation': request.instrument.telescope.elevation,
                    # telescope name
                    'telescope': request.instrument.name,
                    # telescope horizon
                    'horizon': -12.0,
                    # time in seconds to change the filter
                    'filt_change_time': 0.0,
                    # extra overhead in seconds
                    'overhead_per_exposure': 0.0,
                    # slew rate for the telescope [deg/s]
                    'slew_rate': 2.6,
                    # camera readout time
                    'readout': 0.0,
                    # telescope field of view
                    'FOV': 0.0,
                    # exposure time for the given limiting magnitude
                    'exposuretime': 1.0,
                    # limiting magnitude given telescope time
                    'magnitude': 0.0,
                },
            },
            # gwemopt filter strategy
            # options: block (blocks of single filters), integrated (series of alternating filters)
            'doAlternativeFilters':
            request.payload["filter_strategy"] == "block",
            # flag to indicate fields come from DB
            'doDatabase':
            True,
            # only keep tiles within powerlaw_cl
            'doMinimalTiling':
            True,
            # single set of scheduled observations
            'doSingleExposure':
            True,
            # gwemopt scheduling algorithms
            # options: greedy, greedy_slew, sear, airmass_weighted
            'scheduleType':
            request.payload["schedule_type"],
            # list of filters to use for observations
            'filters':
            request.payload["filters"].split(","),
            # GPS time for event
            'gpstime':
            event_time.gps,
            # Healpix nside for the skymap
            'nside':
            512,
            # maximum integrated probability of the skymap to consider
            'powerlaw_cl':
            request.payload["integrated_probability"],
            'telescopes': [request.instrument.name],
            # minimum difference between observations of the same field
            'mindiff':
            request.payload["minimum_time_difference"],
            # maximum airmass with which to observae
            'airmass':
            request.payload["maximum_airmass"],
            # array of exposure times (same length as filter array)
            'exposuretimes':
            np.array([int(request.payload["exposure_time"])] *
                     len(request.payload["filters"].split(","))),
        }

        if request.payload["schedule_strategy"] == "galaxy":
            params = {
                **params,
                'tilesType': 'galaxy',
                'galaxy_catalog': request.payload["galaxy_catalog"],
                'galaxy_grade': 'S',
                'writeCatalog': False,
                'catalog_n': 1.0,
                'powerlaw_dist_exp': 1.0,
            }
        elif request.payload["schedule_strategy"] == "tiling":
            params = {**params, 'tilesType': 'moc'}
        else:
            raise AttributeError(
                'scheduling_strategy should be tiling or galaxy')

        params = gwemopt.utils.params_checker(params)
        params = gwemopt.segments.get_telescope_segments(params)

        params["Tobs"] = [
            start_time.mjd - event_time.mjd,
            end_time.mjd - event_time.mjd,
        ]

        params['map_struct'] = dict(
            zip(['prob', 'distmu', 'distsigma', 'distnorm'],
                request.localization.flat))

        params['is3D'] = request.localization.is_3d

        # Function to read maps
        map_struct = gwemopt.utils.read_skymap(params,
                                               is3D=params["do3D"],
                                               map_struct=params['map_struct'])

        if params["tilesType"] == "galaxy":
            query = Galaxy.query_records_accessible_by(user, mode="read")
            query = query.filter(
                Galaxy.catalog_name == params["galaxy_catalog"])
            galaxies = query.all()
            catalog_struct = {}
            catalog_struct["ra"] = np.array([g.ra for g in galaxies])
            catalog_struct["dec"] = np.array([g.dec for g in galaxies])
            catalog_struct["S"] = np.array([1.0 for g in galaxies])
            catalog_struct["Sloc"] = np.array([1.0 for g in galaxies])
            catalog_struct["Smass"] = np.array([1.0 for g in galaxies])

        if params["tilesType"] == "moc":
            moc_structs = gwemopt.skyportal.create_moc_from_skyportal(
                params, map_struct=map_struct)
            tile_structs = gwemopt.tiles.moc(params, map_struct, moc_structs)
        elif params["tilesType"] == "galaxy":
            if request.instrument.region is None:
                raise ValueError(
                    'Must define the instrument region in the case of galaxy requests'
                )
            regions = Regions.parse(request.instrument.region, format='ds9')
            tile_structs = gwemopt.skyportal.create_galaxy_from_skyportal(
                params, map_struct, catalog_struct, regions=regions)

        tile_structs, coverage_struct = gwemopt.coverage.timeallocation(
            params, map_struct, tile_structs)

        # if the fields do not yet exist, we need to add them
        if params["tilesType"] == "galaxy":
            regions = Regions.parse(request.instrument.region, format='ds9')
            data = {
                'RA': coverage_struct["data"][:, 0],
                'Dec': coverage_struct["data"][:, 1],
            }
            field_data = pd.DataFrame.from_dict(data)
            field_ids = add_tiles(
                request.instrument.id,
                request.instrument.name,
                regions,
                field_data,
                session=session,
            )

        planned_observations = []
        for ii in range(len(coverage_struct["ipix"])):
            data = coverage_struct["data"][ii, :]
            filt = coverage_struct["filters"][ii]
            mjd = data[2]
            tt = Time(mjd, format='mjd')

            overhead_per_exposure = params["config"][
                request.instrument.name]["overhead_per_exposure"]

            exposure_time, prob = data[4], data[6]
            if params["tilesType"] == "galaxy":
                field_id = field_ids[ii]
            else:
                field_id = data[5]

            field = InstrumentField.query.filter(
                InstrumentField.instrument_id == request.instrument.id,
                InstrumentField.field_id == field_id,
            ).first()
            if field is None:
                return log(f"Missing field {field_id} from list")

            planned_observation = PlannedObservation(
                obstime=tt.datetime,
                dateobs=request.gcnevent.dateobs,
                field_id=field.id,
                exposure_time=exposure_time,
                weight=prob,
                filt=filt,
                instrument_id=request.instrument.id,
                planned_observation_id=ii,
                observation_plan_id=plan.id,
                overhead_per_exposure=overhead_per_exposure,
            )
            planned_observations.append(planned_observation)

        session.add_all(planned_observations)
        plan.status = 'complete'
        session.merge(plan)
        session.commit()

        request.status = 'complete'
        session.merge(request)
        session.commit()

        flow = Flow()
        flow.push(
            '*',
            "skyportal/REFRESH_GCNEVENT",
            payload={"gcnEvent_dateobs": request.gcnevent.dateobs},
        )

        return log(
            f"Generated plan for observation plan {observation_plan_id}")

    except Exception as e:
        return log(
            f"Unable to generate plan for observation plan {observation_plan_id}: {e}"
        )
    finally:
        Session.remove()
Beispiel #12
0
def commit_photometry(json_response, altdata, request_id, instrument_id,
                      user_id):
    """
    Commits ATLAS photometry to the database

    Parameters
    ----------
    json_response : dict
        response.json() from call to ATLAS photometry service.
    altdata: dict
        Contains ATLAS photometry api_token for the user
    request_id : int
        FollowupRequest SkyPortal ID
    instrument_id : int
        Instrument SkyPortal ID
    user_id : int
        User SkyPortal ID
    """

    from ..models import (
        DBSession,
        FollowupRequest,
        Instrument,
        User,
    )

    Session = scoped_session(
        sessionmaker(bind=DBSession.session_factory.kw["bind"]))
    session = Session()

    try:
        request = session.query(FollowupRequest).get(request_id)
        instrument = session.query(Instrument).get(instrument_id)
        user = session.query(User).get(user_id)

        result_url = json_response['result_url']
        request.status = f"Task is complete with results available at {result_url}"

        s = requests.get(
            result_url,
            headers={
                'Authorization': f"Token {altdata['api_token']}",
                'Accept': 'application/json',
            },
        )
        s.raise_for_status()

        # ATLAS response looks like
        """
     ###MJD          m      dm   uJy   duJy F err chi/N     RA       Dec        x        y     maj  min   phi  apfit mag5sig Sky   Obs
     59226.235875  16.177  0.012  1228   15 c  0  54.64 342.45960  51.26340  7768.79  7767.00 2.53 2.39 -63.4 -0.375 19.58 21.54 01a59226o0051c
     59228.242600  16.258  0.017  1140   20 c  0   7.87 342.45960  51.26340  2179.59  9252.78 3.41 3.09 -51.0 -0.396 19.28 21.28 02a59228o0102c
     59228.246262  16.582  0.021   846   18 c  0  28.37 342.45960  51.26340  2162.23  9213.32 3.53 3.25 -52.3 -0.366 19.14 21.26 02a59228o0110c
     59228.252679  16.451  0.019   954   18 c  0  13.76 342.45960  51.26340  2218.02  9291.76 3.34 3.03 -49.8 -0.389 19.17 21.24 02a59228o0124c
     59228.265532  17.223  0.049   469   23 c  0   3.90 342.45960  51.26340  2237.25  9167.94 4.31 3.88 -43.7 -0.473 18.95 21.20 02a59228o0152c
         """

        try:
            df = pd.read_csv(StringIO(s.text.replace("###MJD", "mjd")),
                             delim_whitespace=True)
        except Exception as e:
            raise ValueError(f'Format of response not understood: {e.message}')

        desired_columns = {'mjd', 'RA', 'Dec', 'm', 'dm', 'mag5sig', 'F'}
        if not desired_columns.issubset(set(df.columns)):
            raise ValueError('Missing expected column')

        df.rename(
            columns={
                'RA': 'ra',
                'Dec': 'dec',
                'm': 'mag',
                'dm': 'magerr',
                'mag5sig': 'limiting_mag',
                'F': 'filter',
            },
            inplace=True,
        )
        cyan = df['filter'] == 'c'
        orange = df['filter'] == 'o'

        snr = df['uJy'] / df['duJy'] < 5

        df.loc[cyan, 'filter'] = 'atlasc'
        df.loc[orange, 'filter'] = 'atlaso'
        df.loc[snr, 'mag'] = None
        df.loc[snr, 'magerr'] = None

        iszero = df['duJy'] == 0.0
        df.loc[iszero, 'mag'] = None
        df.loc[iszero, 'magerr'] = None

        isnan = np.isnan(df['uJy'])
        df.loc[isnan, 'mag'] = None
        df.loc[isnan, 'magerr'] = None

        df = df.replace({np.nan: None})

        drop_columns = list(
            set(df.columns.values) -
            {'mjd', 'ra', 'dec', 'mag', 'magerr', 'limiting_mag', 'filter'})

        df.drop(
            columns=drop_columns,
            inplace=True,
        )
        df['magsys'] = 'ab'

        data_out = {
            'obj_id': request.obj_id,
            'instrument_id': instrument.id,
            'group_ids': [g.id for g in user.accessible_groups],
            **df.to_dict(orient='list'),
        }

        from skyportal.handlers.api.photometry import add_external_photometry

        if len(df.index) > 0:
            add_external_photometry(data_out, request.requester)
            request.status = "Photometry committed to database"
        else:
            request.status = "No photometry to commit to database"

        session.add(request)
        session.commit()

        flow = Flow()
        flow.push(
            '*',
            "skyportal/REFRESH_SOURCE",
            payload={"obj_key": request.obj.internal_key},
        )

    except Exception as e:
        return log(f"Unable to commit photometry for {request_id}: {e}")
Beispiel #13
0
def add_observations(instrument_id, obstable):
    """Post executed observations for a given instrument.
    obstable is a pandas DataFrame of the form:

   observation_id  field_id       obstime   seeing    limmag  exposure_time  \
0        84434604         1  2.458599e+06  1.57415  20.40705             30
1        84434651         1  2.458599e+06  1.58120  20.49405             30
2        84434696         1  2.458599e+06  1.64995  20.56030             30
3        84434741         1  2.458599e+06  1.54945  20.57400             30
4        84434788         1  2.458599e+06  1.62870  20.60385             30

  filter  processed_fraction airmass
0   ztfr                 1.0    None
1   ztfr                 1.0    None
2   ztfr                 1.0    None
3   ztfr                 1.0    None
4   ztfr                 1.0    None
     """

    session = Session()
    # if the fields do not yet exist, we need to add them
    if ('RA' in obstable) and ('Dec'
                               in obstable) and not ('field_id' in obstable):
        instrument = session.query(Instrument).get(instrument_id)
        regions = Regions.parse(instrument.region, format='ds9')
        field_data = obstable[['RA', 'Dec']]
        field_ids = add_tiles(instrument.id,
                              instrument.name,
                              regions,
                              field_data,
                              session=session)
        obstable['field_id'] = field_ids

    try:
        observations = []
        for index, row in obstable.iterrows():
            field_id = int(row["field_id"])
            field = (session.query(InstrumentField).filter(
                InstrumentField.instrument_id == instrument_id,
                InstrumentField.field_id == field_id,
            ).first())
            if field is None:
                return log(
                    f"Unable to add observations for instrument {instrument_id}: Missing field {field_id}"
                )

            observation = (session.query(ExecutedObservation).filter_by(
                instrument_id=instrument_id,
                observation_id=row["observation_id"]).first())
            if observation is not None:
                log(f"Observation {row['observation_id']} for instrument {instrument_id} already exists... continuing."
                    )
                continue

            # enable multiple obstime formats
            try:
                # can catch iso and isot this way
                obstime = Time(row["obstime"])
            except ValueError:
                # otherwise catch jd as the numerical example
                obstime = Time(row["obstime"], format='jd')

            observations.append(
                ExecutedObservation(
                    instrument_id=instrument_id,
                    observation_id=row["observation_id"],
                    instrument_field_id=field.id,
                    obstime=obstime.datetime,
                    seeing=row["seeing"],
                    limmag=row["limmag"],
                    exposure_time=row["exposure_time"],
                    filt=row["filter"],
                    processed_fraction=row["processed_fraction"],
                ))
        session.add_all(observations)
        session.commit()

        flow = Flow()
        flow.push('*', "skyportal/REFRESH_OBSERVATIONS")

        return log(
            f"Successfully added observations for instrument {instrument_id}")
    except Exception as e:
        return log(
            f"Unable to add observations for instrument {instrument_id}: {e}")
    finally:
        Session.remove()
Beispiel #14
0
def commit_photometry(text_response, request_id, instrument_id, user_id):
    """
    Commits PS1 DR2 photometry to the database

    Parameters
    ----------
    text_response : dict
        response.text from call to PS1 DR2 photometry service.
    request_id : int
        FollowupRequest SkyPortal ID
    instrument_id : int
        Instrument SkyPortal ID
    user_id : int
        User SkyPortal ID
    """

    from ..models import (
        DBSession,
        FollowupRequest,
        Instrument,
        User,
    )

    Session = scoped_session(
        sessionmaker(bind=DBSession.session_factory.kw["bind"]))
    session = Session()

    try:
        request = session.query(FollowupRequest).get(request_id)
        instrument = session.query(Instrument).get(instrument_id)
        user = session.query(User).get(user_id)

        tab = astropy.io.ascii.read(text_response)
        # good data only
        tab = tab[tab['psfQfPerfect'] > 0.9]
        id2filter = np.array(
            ['ps1::g', 'ps1::r', 'ps1::i', 'ps1::z', 'ps1::y'])
        tab['filter'] = id2filter[(tab['filterID'] - 1).data.astype(int)]
        df = tab.to_pandas()

        df.rename(
            columns={
                'obsTime': 'mjd',
                'psfFlux': 'flux',
                'psfFluxerr': 'fluxerr',
            },
            inplace=True,
        )
        df = df.replace({np.nan: None})

        df.drop(
            columns=[
                'detectID',
                'filterID',
                'psfQfPerfect',
            ],
            inplace=True,
        )
        df['magsys'] = 'ab'
        df['zp'] = 8.90

        data_out = {
            'obj_id': request.obj_id,
            'instrument_id': instrument.id,
            'group_ids': [g.id for g in user.accessible_groups],
            **df.to_dict(orient='list'),
        }

        from skyportal.handlers.api.photometry import add_external_photometry

        if len(df.index) > 0:
            add_external_photometry(data_out, request.requester)
            request.status = "Photometry committed to database"
        else:
            request.status = "No photometry to commit to database"

        session.add(request)
        session.commit()

        flow = Flow()
        flow.push(
            '*',
            "skyportal/REFRESH_SOURCE",
            payload={"obj_key": request.obj.internal_key},
        )

    except Exception as e:
        return log(f"Unable to commit photometry for {request_id}: {e}")
Beispiel #15
0
    def delete(request):
        """Delete a follow-up request from SEDMv2 queue.

        Parameters
        ----------
        request: skyportal.models.FollowupRequest
            The request to delete from the queue and the SkyPortal database.
        """

        from ..models import DBSession, FollowupRequest, FacilityTransaction

        if cfg['app.sedmv2_endpoint'] is not None:
            altdata = request.allocation.altdata

            req = (
                DBSession()
                .query(FollowupRequest)
                .filter(FollowupRequest.id == request.id)
                .one()
            )

            altdata = request.allocation.altdata

            if not altdata:
                raise ValueError('Missing allocation information.')

            content = req.transactions[0].response["content"]
            content = json.loads(content)

            uid = content["data"]["id"]

            r = requests.delete(
                f"{cfg['app.sedmv2_endpoint']}/{uid}",
                headers={"Authorization": f"token {altdata['api_token']}"},
            )
            r.raise_for_status()
            request.status = "deleted"

            transaction = FacilityTransaction(
                request=http.serialize_requests_request(r.request),
                response=http.serialize_requests_response(r),
                followup_request=request,
                initiator_id=request.last_modified_by_id,
            )
        else:
            request.status = 'deleted'

            transaction = FacilityTransaction(
                request=None,
                response=None,
                followup_request=request,
                initiator_id=request.last_modified_by_id,
            )

        DBSession().add(transaction)

        flow = Flow()
        flow.push(
            '*',
            'skyportal/REFRESH_SOURCE',
            payload={'obj_key': request.obj.internal_key},
        )
Beispiel #16
0
    async def put(self):
        """
        ---
        description: |
          Reprioritize followup requests schedule automatically based on
          location within skymap.
        tags:
            - followup_requests
        parameters:
        - in: body
          name: localizationId
          schema:
            type: integer
          description: Filter by localization ID
        - in: body
          name: requestIds
          schema:
            type: list of integers
          description: List of follow-up request IDs
        - in: body
          name: minimumPriority
          schema:
            type: string
          description: Minimum priority for the instrument. Defaults to 1.
        - in: body
          name: maximumPriority
          schema:
            type: string
          description: Maximum priority for the instrument. Defaults to 5.
        responses:
          200:
            content:
              application/json:
                schema: Success
          400:
            content:
              application/json:
                schema: Error
        """

        data = self.get_json()
        localization_id = data.get('localizationId', None)
        request_ids = data.get('requestIds', None)
        minimum_priority = data.get('minimumPriority', 1)
        maximum_priority = data.get('maximumPriority', 5)

        if localization_id is None:
            return self.error('localizationId is required')
        if request_ids is None:
            return self.error('requestIds is required')

        localization = (Localization.query_records_accessible_by(
            self.current_user).filter(
                Localization.id == localization_id, ).first())
        if localization is None:
            return self.error(
                message=f"Missing localization with id {localization_id}")

        followup_requests = []
        for request_id in request_ids:
            # get owned assignments
            followup_request = FollowupRequest.get_if_accessible_by(
                request_id, self.current_user, mode="update")
            if followup_request is None:
                return self.error(
                    message=f"Missing FollowUpRequest with id {request_id}")
            followup_requests.append(followup_request)

        if len(followup_requests) == 0:
            return self.error('Need at least one observation to modify.')

        ras = np.array([
            followup_request.obj.ra for followup_request in followup_requests
        ])
        decs = np.array([
            followup_request.obj.dec for followup_request in followup_requests
        ])
        dists = np.array([
            cosmo.luminosity_distance(followup_request.obj.redshift).value
            if followup_request.obj.redshift is not None else -1
            for followup_request in followup_requests
        ])

        tab = localization.flat
        ipix = hp.ang2pix(Localization.nside, ras, decs, lonlat=True)
        if localization.is_3d:
            prob, distmu, distsigma, distnorm = tab
            if not all([dist > 0 for dist in dists]):
                weights = prob[ipix]
            else:
                weights = prob[ipix] * (distnorm[ipix] * norm(
                    distmu[ipix], distsigma[ipix]).pdf(dists))
        else:
            weights = prob[ipix]
        weights = weights / np.max(weights)
        priorities = [
            int(
                np.round(weight * (maximum_priority - minimum_priority) +
                         minimum_priority)) for weight in weights
        ]

        with DBSession() as session:
            for request_id, priority in zip(request_ids, priorities):
                # get owned assignments
                followup_request = session.query(FollowupRequest).get(
                    request_id)
                api = followup_request.instrument.api_class
                if not api.implements()['update']:
                    return self.error(
                        'Cannot update requests on this instrument.')
                payload = followup_request.payload
                payload["priority"] = priority
                session.query(FollowupRequest).filter(
                    FollowupRequest.id == request_id).update(
                        {'payload': payload})
                session.commit()

                followup_request.payload = payload
                followup_request.instrument.api_class.update(followup_request)

        flow = Flow()
        flow.push(
            '*',
            "skyportal/REFRESH_FOLLOWUP_REQUESTS",
        )

        return self.success()
Beispiel #17
0
def commit_photometry(url, altdata, df_request, request_id, instrument_id,
                      user_id):
    """
    Commits ZTF forced photometry to the database

    Parameters
    ----------
    url : str
        ZTF forced photometry service data file location.
    altdata: dict
        Contains ZTF photometry api_token for the user
    df_request: pandas.DataFrame
        DataFrame containing request parameters (ra, dec, start jd, end jd)
    request_id : int
        FollowupRequest SkyPortal ID
    instrument_id : int
        Instrument SkyPortal ID
    user_id: int
        User SkyPortal ID
    """

    from ..models import (
        DBSession,
        FollowupRequest,
        Instrument,
        User,
    )

    Session = scoped_session(
        sessionmaker(bind=DBSession.session_factory.kw["bind"]))
    session = Session()

    try:
        request = session.query(FollowupRequest).get(request_id)
        instrument = session.query(Instrument).get(instrument_id)
        user = session.query(User).get(user_id)

        r = requests.get(
            url,
            auth=HTTPBasicAuth(altdata['ipac_http_user'],
                               altdata['ipac_http_password']),
        )
        df = ascii.read(r.content.decode(),
                        header_start=0,
                        data_start=1,
                        comment='#').to_pandas()

        df.columns = df.columns.str.replace(',', '')
        desired_columns = {
            'jd',
            'forcediffimflux',
            'forcediffimfluxunc',
            'diffmaglim',
            'zpdiff',
            'filter',
        }
        if not desired_columns.issubset(set(df.columns)):
            raise ValueError('Missing expected column')
        df['ra'] = df_request['ra']
        df['dec'] = df_request['dec']
        df.rename(
            columns={'diffmaglim': 'limiting_mag'},
            inplace=True,
        )
        df = df.replace({"null": np.nan})
        df['mjd'] = astropy.time.Time(df['jd'], format='jd').mjd
        df['filter'] = df['filter'].str.replace('_', '')
        df['filter'] = df['filter'].str.lower()
        df = df.astype({
            'forcediffimflux': 'float64',
            'forcediffimfluxunc': 'float64'
        })

        df['mag'] = df['zpdiff'] - 2.5 * np.log10(df['forcediffimflux'])
        df['magerr'] = 1.0857 * df['forcediffimfluxunc'] / df['forcediffimflux']

        snr = df['forcediffimflux'] / df['forcediffimfluxunc'] < 5
        df['mag'].loc[snr] = None
        df['magerr'].loc[snr] = None

        iszero = df['forcediffimfluxunc'] == 0.0
        df['mag'].loc[iszero] = None
        df['magerr'].loc[iszero] = None

        isnan = np.isnan(df['forcediffimflux'])
        df['mag'].loc[isnan] = None
        df['magerr'].loc[isnan] = None

        df = df.replace({np.nan: None})

        drop_columns = list(
            set(df.columns.values) - set([
                'mjd', 'ra', 'dec', 'mag', 'magerr', 'limiting_mag', 'filter'
            ]))

        df.drop(
            columns=drop_columns,
            inplace=True,
        )
        df['magsys'] = 'ab'

        data_out = {
            'obj_id': request.obj_id,
            'instrument_id': instrument.id,
            'group_ids': [g.id for g in user.accessible_groups],
            **df.to_dict(orient='list'),
        }

        from skyportal.handlers.api.photometry import add_external_photometry

        add_external_photometry(data_out, request.requester)

        request.status = "Photometry committed to database"
        session.add(request)
        session.commit()

        flow = Flow()
        flow.push(
            '*',
            "skyportal/REFRESH_SOURCE",
            payload={"obj_key": request.obj.internal_key},
        )
    except Exception as e:
        return log(f"Unable to commit photometry for {request_id}: {e}")
    finally:
        Session.remove()
Beispiel #18
0
    def receive_after_flush(session, context):

        is_gcnnotice = "dateobs" in target.to_dict()
        is_facility_transaction = "initiator_id" in target.to_dict()

        if is_gcnnotice:
            users = User.query.filter(User.preferences["slack_integration"]
                                      ["gcnnotices"].astext.cast(
                                          sa.Boolean).is_(True)).all()
        elif is_facility_transaction:
            users = User.query.filter(User.preferences["slack_integration"]
                                      ["facilitytransactions"].astext.cast(
                                          sa.Boolean).is_(True)).all()
        else:
            listing_subquery = (Listing.query.filter(
                Listing.list_name == "favorites").filter(
                    Listing.obj_id == target.obj_id).distinct(
                        Listing.user_id).subquery())
            users = (User.query.join(
                listing_subquery,
                User.id == listing_subquery.c.user_id).filter(
                    User.preferences["favorite_sources_activity_notifications"]
                    [target.__tablename__].astext.cast(
                        sa.Boolean).is_(True)).all())
        ws_flow = Flow()
        for user in users:
            # Only notify users who have read access to the new record in question
            if target.__class__.get_if_accessible_by(target.id,
                                                     user) is not None:
                if is_gcnnotice:
                    session.add(
                        UserNotification(
                            user=user,
                            text=
                            f"New {target.__class__.__name__.lower()} on GcnEvent *{target.dateobs}*",
                            url=
                            f"/gcn_events/{str(target.dateobs).replace(' ','T')}",
                        ))
                elif is_facility_transaction:
                    if "observation_plan_request" in target.to_dict():
                        allocation_id = target.observation_plan_request.allocation_id
                        allocation = session.query(Allocation).get(
                            allocation_id)
                        instrument = allocation.instrument
                        localization_id = (
                            target.observation_plan_request.localization_id)
                        localization = session.query(Localization).get(
                            localization_id)
                        session.add(
                            UserNotification(
                                user=user,
                                text=
                                f"New observation plan submission for GcnEvent *{localization.dateobs}* by *{instrument.name}*",
                                url=
                                f"/gcn_events/{str(localization.dateobs).replace(' ','T')}",
                            ))
                    elif "followup_request" in target.to_dict():
                        allocation_id = target.followup_request.allocation_id
                        allocation = session.query(Allocation).get(
                            allocation_id)
                        instrument = allocation.instrument
                        session.add(
                            UserNotification(
                                user=user,
                                text=
                                f"New follow-up submission for object *{target.followup_request.obj_id}* by *{instrument.name}*",
                                url=f"/source/{target.followup_request.obj_id}",
                            ))

                else:
                    session.add(
                        UserNotification(
                            user=user,
                            text=
                            f"New {target.__class__.__name__.lower()} on your favorite source *{target.obj_id}*",
                            url=f"/source/{target.obj_id}",
                        ))
                ws_flow.push(user.id, "skyportal/FETCH_NOTIFICATIONS")