예제 #1
0
admin_token = get_token()


@gcn.include_notice_types(
    gcn.NoticeType.FERMI_GBM_FLT_POS,
    gcn.NoticeType.FERMI_GBM_GND_POS,
    gcn.NoticeType.FERMI_GBM_FIN_POS,
    gcn.NoticeType.FERMI_GBM_SUBTHRESH,
    gcn.NoticeType.LVC_PRELIMINARY,
    gcn.NoticeType.LVC_INITIAL,
    gcn.NoticeType.LVC_UPDATE,
    gcn.NoticeType.LVC_RETRACTION,
    gcn.NoticeType.LVC_TEST,
    gcn.NoticeType.AMON_ICECUBE_COINC,
    gcn.NoticeType.AMON_ICECUBE_HESE,
    gcn.NoticeType.ICECUBE_ASTROTRACK_GOLD,
    gcn.NoticeType.ICECUBE_ASTROTRACK_BRONZE,
)
def handle(payload, root):
    response_status, data = api('POST',
                                'gcn_event',
                                data={'xml': payload},
                                token=admin_token)


if __name__ == "__main__":
    log = make_log("gcnserver")

    gcn.listen(handler=handle)
예제 #2
0
from sqlalchemy import desc
from sqlalchemy.orm import joinedload
from collections import defaultdict
from baselayer.app.access import auth_or_token
from baselayer.log import make_log
from ...base import BaseHandler
from ....models import Obj, Source
from .source_views import t_index

default_prefs = {'maxNumSources': 5}

log = make_log('api/recent_sources')


class RecentSourcesHandler(BaseHandler):
    @classmethod
    def get_recent_source_ids(self, current_user):
        user_prefs = getattr(current_user, 'preferences', None) or {}
        recent_sources_prefs = user_prefs.get('recentSources', {})
        recent_sources_prefs = {**default_prefs, **recent_sources_prefs}

        max_num_sources = int(recent_sources_prefs['maxNumSources'])
        query_results = (
            Source.query_records_accessible_by(current_user).filter(
                Source.active.is_(True)).order_by(desc(
                    Source.created_at)).distinct(
                        Source.obj_id,
                        Source.created_at).limit(max_num_sources).all())
        ids = map(lambda src: src.obj_id, query_results)
        return ids
예제 #3
0
import pandas as pd
from regions import Regions
from datetime import datetime, timedelta
import numpy as np

import sqlalchemy as sa
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.orm import joinedload

from baselayer.log import make_log
from baselayer.app.flow import Flow
from baselayer.app.env import load_env

from . import FollowUpAPI

log = make_log('api/observation_plan')

env, cfg = load_env()

default_filters = cfg.get('app.observation_plan.default_filters',
                          ['g', 'r', 'i'])


def generate_plan(observation_plan_id, request_id, user_id):
    """Use gwemopt to construct observing plan."""

    from ..models import DBSession
    from skyportal.handlers.api.instrument import add_tiles

    Session = scoped_session(
        sessionmaker(bind=DBSession.session_factory.kw["bind"]))
예제 #4
0
class CustomSMTPHandler:
    async def handle_DATA(self, server, session, envelope):
        peer = session.peer
        mail_from = envelope.mail_from
        rcpt_tos = envelope.rcpt_tos
        data = envelope.content  # type: bytes
        log(f"Receiving message from: {peer}")
        log(f"Message addressed from: {mail_from}")
        log(f"Message addressed to  : {rcpt_tos}")
        log(f"Message length        : {len(data)}")
        return '250 OK'


if __name__ == "__main__":
    env, cfg = load_env()
    log = make_log("testsmtpserver")

    if "test_server" in cfg:
        smtp_port = cfg["test_server.smtp_port"]

        log(f"Listening for test SMTP requests on port {smtp_port}")
        # SMTP TLS context
        context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
        cert_file = os.path.join(os.path.dirname(__file__), "cert.pem")
        key_file = os.path.join(os.path.dirname(__file__), "key.pem")

        context.load_cert_chain(cert_file, key_file)
        handler = CustomSMTPHandler()

        controller = Controller(
            handler,
예제 #5
0
import re
import subprocess

from baselayer.log import make_log

log = make_log('gitlog')


def get_gitlog(
    cwd='.',
    name=None,
    pr_url_base='https://github.com/skyportal/skyportal/pull',
    commit_url_base='https://github.com/skyportal/skyportal/commit',
    N=1000,
):
    """Return git log for a given directory.

    Parameters
    ----------
    cwd : str
        Where to gather logs.
    N : int
        Number of log entries to return.
    name : str
        Name of these logs. Value is propagated into the output dictionary.

    Returns
    -------
    dict
        The output dictionary has the following keys:
예제 #6
0
파일: obj.py 프로젝트: AlexGKim/skyportal
    Base,
    DBSession,
    public,
    restricted,
    CustomUserAccessControl,
)
from baselayer.log import make_log

from .photometry import Photometry
from .spectrum import Spectrum
from .candidate import Candidate
from .thumbnail import Thumbnail
from .cosmo import cosmo

_, cfg = load_env()
log = make_log('models.obj')

# The minimum signal-to-noise ratio to consider a photometry point as a detection
PHOT_DETECTION_THRESHOLD = cfg["misc.photometry_detection_threshold_nsigma"]


def delete_obj_if_all_data_owned(cls, user_or_token):
    from .source import Source

    allow_nonadmins = cfg["misc.allow_nonadmins_delete_objs"] or False

    deletable_photometry = Photometry.query_records_accessible_by(
        user_or_token, mode="delete"
    ).subquery()
    nondeletable_photometry = (
        DBSession()
예제 #7
0
from penquins import Kowalski
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
from typing import Mapping, Optional
import uuid

from baselayer.log import make_log
from baselayer.app.access import auth_or_token, permissions
from baselayer.app.env import load_env
from ..base import BaseHandler
from ...models import Instrument, Source, Stream
from skyportal.model_util import create_token, delete_token

env, cfg = load_env()
log = make_log("archive")

# A (dedicated) Kowalski instance holding the ZTF light curve data referred to as Gloria
try:
    gloria = Kowalski(
        token=cfg["app.gloria.token"],
        protocol=cfg["app.gloria.protocol"],
        host=cfg["app.gloria.host"],
        port=int(cfg["app.gloria.port"]),
        timeout=10,
    )
    connection_ok = gloria.ping()
    log(f"Gloria connection OK: {connection_ok}")
    if not connection_ok:
        gloria = None
except Exception as e:
예제 #8
0
import matplotlib
import arviz as az
import requests

from tornado.ioloop import IOLoop
import tornado.web
import tornado.escape

from astropy.table import Table
import sncosmo

from baselayer.log import make_log
from baselayer.app.env import load_env

_, cfg = load_env()
log = make_log('sn_analysis_service')

# we need to set the backend here to insure we
# can render the plot headlessly
matplotlib.use('Agg')
rng = np.random.default_rng()

default_analysis_parameters = {"source": "nugent-sn2p", "fix_z": False}


def upload_analysis_results(results, data_dict, request_timeout=60):
    """
    Upload the results to the webhook.
    """

    log("Uploading results to webhook")
예제 #9
0
from baselayer.app.env import load_env
from baselayer.app.flow import Flow
from baselayer.log import make_log

from ..utils import http

env, cfg = load_env()

if cfg.get('app.atlas.port') is None:
    ATLAS_URL = f"{cfg['app.atlas.protocol']}://{cfg['app.atlas.host']}"
else:
    ATLAS_URL = (
        f"{cfg['app.atlas.protocol']}://{cfg['app.atlas.host']}:{cfg['app.atlas.port']}"
    )

log = make_log('facility_apis/atlas')


class ATLASRequest:
    """A dictionary structure for ATLAS forced photometry requests."""
    def _build_payload(self, request):
        """Payload json for ATLAS forced photometry requests.

        Parameters
        ----------

        request : skyportal.models.FollowupRequest
            The request to add to the queue and the SkyPortal database.

        Returns
        ----------
예제 #10
0
                self.write(response["body"]["string"])

            else:
                self.set_status(500)
                self.write("Could not find test route redirect")


def make_app():
    return tornado.web.Application([
        (".*", TestRouteHandler),
    ])


if __name__ == "__main__":
    env, cfg = load_env()
    log = make_log("testapiserver")
    my_vcr = vcr.VCR()
    my_vcr.register_matcher("atlas", atlas_request_matcher)
    my_vcr.register_matcher("lt", lt_request_matcher)
    my_vcr.register_matcher("lco", lco_request_matcher)
    my_vcr.register_matcher("ps1", ps1_request_matcher)
    my_vcr.register_matcher("ztf", ztf_request_matcher)
    my_vcr.register_matcher("kait", kait_request_matcher)
    my_vcr.register_matcher("treasuremap", treasuremap_request_matcher)
    my_vcr.register_matcher("swift", swift_request_matcher)
    if "test_server" in cfg:
        app = make_app()
        server = tornado.httpserver.HTTPServer(app)
        port = cfg["test_server.port"]
        server.listen(port)
예제 #11
0
import os

import sqlalchemy as sa
from sqlalchemy import event
from sqlalchemy.orm import relationship

import requests

from baselayer.app.models import Base, AccessibleIfRelatedRowsAreAccessible
from baselayer.log import make_log

from ..utils.thumbnail import image_is_grayscale
from ..enum_types import thumbnail_types

log = make_log('models.thumbnail')


class Thumbnail(Base):
    """Thumbnail image centered on the location of an Obj."""

    create = read = AccessibleIfRelatedRowsAreAccessible(obj='read')

    # TODO delete file after deleting row
    type = sa.Column(thumbnail_types,
                     doc='Thumbnail type (e.g., ref, new, sub, dr8, ps1, ...)')
    file_uri = sa.Column(
        sa.String(),
        nullable=True,
        index=False,
        unique=False,
예제 #12
0
import os
import shutil
import subprocess
import time

import tornado.ioloop
import tornado.web

from baselayer.app.env import load_env
from baselayer.log import make_log

env, cfg = load_env()
log = make_log("migration_manager")


conf_files = env.config
conf_flags = ["-x", f'config={":".join(conf_files)}'] if conf_files else []


class timeout_cache:
    def __init__(self, timeout):
        self.timeout = timeout
        self.lastrun = 0
        self.cache = None
        self.func = None

    def __call__(self, f):
        self.func = f
        return self.wrapped

    def wrapped(self, *args, **kwargs):
예제 #13
0
import datetime

from sqlalchemy.orm import sessionmaker, scoped_session

from baselayer.log import make_log
from baselayer.app.env import load_env

from ..base import BaseHandler

from ...models import (
    DBSession,
    ObjAnalysis,
)

log = make_log('app/webhook')

_, cfg = load_env()

Session = scoped_session(sessionmaker(bind=DBSession.session_factory.kw["bind"]))


class AnalysisWebhookHandler(BaseHandler):
    def post(self, analysis_resource_type, token):
        """
        ---
        description: Return the results of an analysis
        tags:
          - webhook
        parameters:
          - in: path
            name: analysis_resource_type
예제 #14
0
                    if not (k == "Transfer-Encoding" and "chunked" in v):
                        self.set_header(k, v[0])
                self.write(response["body"]["string"])

            else:
                self.set_status(500)
                self.write("Could not find test route redirect")


def make_app():
    return tornado.web.Application([
        (".*", TestRouteHandler),
    ])


if __name__ == "__main__":
    env, cfg = load_env()
    log = make_log("testserver")
    my_vcr = vcr.VCR()
    my_vcr.register_matcher("lt", lt_request_matcher)
    if "test_server" in cfg:
        app = make_app()
        server = tornado.httpserver.HTTPServer(app)
        port = cfg["test_server.port"]
        server.listen(port)

        refresh_cache_days = cfg["test_server.refresh_cache_days"]

        log(f"Listening for test HTTP requests on port {port}")
        tornado.ioloop.IOLoop.current().start()
예제 #15
0
from baselayer.app.access import auth_or_token
from baselayer.log import make_log
from ..base import BaseHandler
from ...models import (
    DBSession,
    Group,
    GroupStream,
    Obj,
    Stream,
    StreamUser,
    Source,
)
from .photometry import PhotometryHandler
from .thumbnail import ThumbnailHandler

log = make_log("alert")

c = tornado.httpclient.AsyncHTTPClient()


def make_thumbnail(a, ttype, ztftype):

    cutout_data = a[f'cutout{ztftype}']['stampData']
    with gzip.open(io.BytesIO(cutout_data), 'rb') as f:
        with fits.open(io.BytesIO(f.read())) as hdu:
            # header = hdu[0].header
            data_flipped_y = np.flipud(hdu[0].data)
    # fixme: png, switch to fits eventually
    buff = io.BytesIO()
    plt.close('all')
    fig = plt.figure()
예제 #16
0
from ...models import (
    DBSession,
    AnalysisService,
    Group,
    Photometry,
    Spectrum,
    Annotation,
    Classification,
    Obj,
    Comment,
    ObjAnalysis,
)
from .photometry import serialize

log = make_log('app/analysis')

_, cfg = load_env()

Session = scoped_session(
    sessionmaker(bind=DBSession.session_factory.kw["bind"]))


def valid_url(trial_url):
    """
    determine if the URL is valid
    """
    try:
        rez = urlparse(trial_url)
        return all([rez.scheme, rez.netloc])
    except ValueError:
예제 #17
0
    PlotObjTelAirmassHandler,
    PlotHoursBelowAirmassHandler,
    AnnotationsInfoHandler,
    EphemerisHandler,
    StandardsHandler,
    NotificationHandler,
    BulkNotificationHandler,
    RecentGcnEventsHandler,
    FilterWavelengthHandler,
)

from . import model_util, openapi
from .models import init_db


log = make_log('app_server')


class CustomApplication(tornado.web.Application):
    def log_request(self, handler):
        # We don't want to log expected exceptions intentionally raised
        # during auth pipeline; such exceptions will have "google-oauth2" in
        # their request route
        if "google-oauth2" in str(handler.request.uri):
            return
        return super().log_request(handler)


skyportal_handlers = [
    # API endpoints
    (r'/api/acls', ACLHandler),
예제 #18
0
import arrow
import sqlalchemy as sa
from sqlalchemy import func
from sqlalchemy.orm import joinedload

from ..base import BaseHandler
from ...models import (
    DBSession,
    Obj,
    Photometry,
    PhotStat,
)
from baselayer.app.access import permissions, auth_or_token
from baselayer.log import make_log

log = make_log('api/source')

DEFAULT_SOURCES_PER_PAGE = 100
MAX_SOURCES_PER_PAGE = 500


class PhotStatHandler(BaseHandler):
    @auth_or_token
    def get(self, obj_id=None):
        """
        ---
        description: retrieve the PhotStat associated with the obj_id.
        tags:
          - photometry
        parameters:
          - in: path
예제 #19
0
    Token,
    UserNotification,
)

_, cfg = load_env()


def has_admin_access_for_group(user, group_id):
    groupuser = (GroupUser.query.filter(GroupUser.group_id == group_id).filter(
        GroupUser.user_id == user.id).first())
    return len({"System admin", "Manage groups", "Manage_users"}.intersection(
        set(user.permissions))) > 0 or (groupuser is not None
                                        and groupuser.admin)


log = make_log('api/group')


class GroupHandler(BaseHandler):
    @auth_or_token
    def get(self, group_id=None):
        """
        ---
        single:
          description: Retrieve a group
          tags:
            - groups
          parameters:
            - in: path
              name: group_id
              required: true
예제 #20
0
    StreamPhotometry,
    PhotStat,
)

from ...models.schema import (
    PhotometryMag,
    PhotometryFlux,
    PhotFluxFlexible,
    PhotMagFlexible,
    PhotometryRangeQuery,
)
from ...enum_types import ALLOWED_MAGSYSTEMS

_, cfg = load_env()

log = make_log('api/photometry')

MAX_NUMBER_ROWS = 10000


def save_data_using_copy(rows, table, columns):
    # Prepare data
    output = StringIO()
    df = pd.DataFrame.from_records(rows)
    # Coerce missing non-numbers and numbers, respectively, for SQLAlchemy
    df.replace("NaN", "null", inplace=True)
    df.replace(np.nan, "NaN", inplace=True)

    df.to_csv(
        output,
        index=False,
예제 #21
0
import bson.json_util as bj
import os
import requests

from baselayer.log import make_log
from baselayer.app.access import auth_or_token
from ..base import BaseHandler
from ...models import DBSession, Filter, Stream

log = make_log("kowalski_filter")

s = requests.Session()


class KowalskiFilterHandler(BaseHandler):
    @auth_or_token
    def get(self, filter_id):
        """
        ---
        single:
          description: Retrieve a filter as stored on Kowalski
          parameters:
            - in: path
              name: filter_id
              required: true
              schema:
                type: integer
          responses:
            200:
              content:
                application/json:
예제 #22
0
from astropy import cosmology
from astropy import units as u

from baselayer.log import make_log
from baselayer.app.env import load_env

log = make_log('cosmology')
_, cfg = load_env()


def establish_cosmology(cfg=cfg):
    user_cosmo = cfg['misc']['cosmology']

    if user_cosmo in cosmology.realizations.available:
        cosmo = cosmology.default_cosmology.get_cosmology_from_string(
            user_cosmo)

    elif isinstance(user_cosmo, dict):
        try:
            if user_cosmo.get('flat'):
                cosmo = cosmology.FlatLambdaCDM(
                    user_cosmo['H0'],
                    user_cosmo['Om0'],
                    Tcmb0=user_cosmo.get('Tcmb0', 2.725),
                    Neff=user_cosmo.get('Neff', 3.04),
                    m_nu=u.Quantity(user_cosmo.get('m_nu', [0.0, 0.0, 0.0]),
                                    u.eV),
                    name=user_cosmo.get("name", "user_cosmology"),
                    Ob0=user_cosmo.get('Ob0', 0.0455),
                )
            else:
예제 #23
0
from baselayer.app.env import load_env
from baselayer.log import make_log

import requests
import time
import subprocess

env, cfg = load_env()
log = make_log('health')

SECONDS_BETWEEN_CHECKS = cfg['health_monitor.seconds_between_checks']
ALLOWED_DOWNTIME_SECONDS = cfg['health_monitor.allowed_downtime_seconds']
ALLOWED_TIMES_DOWN = cfg['health_monitor.allowed_times_down']
REQUEST_TIMEOUT_SECONDS = cfg['health_monitor.request_timeout_seconds']
STARTUP_GRACE_SECONDS = cfg['health_monitor.startup_grace_seconds']


class DownStatus:
    def __init__(self, nr_times=0, timestamp=None):
        self.nr_times = nr_times
        self.timestamp = time.time() if timestamp is None else timestamp

    def increase(self):
        self.nr_times += 1
        return self


def migrated():
    try:
        r = requests.get(f'http://localhost:{cfg["ports.migration_manager"]}',
                         timeout=10)
예제 #24
0
import json
import traceback

import tornado.ioloop
import tornado.web
import tornado.escape
from tornado.httpclient import AsyncHTTPClient

from baselayer.app.env import load_env
from baselayer.log import make_log

env, cfg = load_env()
log = make_log('slack')


class MainHandler(tornado.web.RequestHandler):
    def set_default_headers(self):
        self.set_header('Content-Type', 'application/json')

    def error(self, code, message):
        self.set_status(code)
        self.write({'message': message})

    def get(self):
        self.write({'status': 'active'})

    async def post(self):
        """
        Handles the error checking and posting to the Slack webhook.
        The Slack Webhook API is described here:
            https://api.slack.com/messaging/webhooks#posting_with_webhooks
예제 #25
0
import subprocess
import os
import time

import tornado.ioloop
import tornado.web

from baselayer.app.env import load_env
from baselayer.log import make_log

env, cfg = load_env()
log = make_log('migration_manager')


conf_file = env.config[0]
conf_flags = ['-x', f'config={conf_file}'] if conf_file else []


class timeout_cache:
    def __init__(self, timeout):
        self.timeout = timeout
        self.lastrun = 0
        self.cache = None
        self.func = None

    def __call__(self, f):
        self.func = f
        return self.wrapped

    def wrapped(self, *args, **kwargs):
        tic = self.lastrun
예제 #26
0
import os
from os.path import join as pjoin
import pathlib
import requests
import sys
import signal
import subprocess
import time

sys.path.insert(0, pjoin(os.path.dirname(__file__), '../..'))  # noqa

from baselayer.tools.supervisor_status import supervisor_status
from baselayer.app.model_util import clear_tables
from baselayer.log import make_log

log = make_log('test_frontend')

try:
    import pytest_randomly  # noqa

    RAND_ARGS = '--randomly-seed=1'
except ImportError:
    RAND_ARGS = ''

TEST_CONFIG = 'test_config.yaml'


def all_services_running():
    """Check that all webservices were started successfully.

    All webservices controlled by `supervisor` must be currently running
예제 #27
0
from astropy.wcs.wcs import FITSFixedWarning
from astropy.wcs import WCS
from astropy.wcs.utils import pixel_to_skycoord
from astropy.io import fits
from astropy.visualization import ImageNormalize, ZScaleInterval
from reproject import reproject_adaptive
import pyvo as vo
from pyvo.dal.exceptions import DALQueryError

from .cache import Cache

from baselayer.log import make_log
from baselayer.app.env import load_env

log = make_log('finder-chart')

_, cfg = load_env()


class GaiaQuery:

    alt_tap = 'https://gaia.aip.de/tap'
    alt_main_db = 'gaiaedr3'

    # conversion for units in VO tables to astropy units
    unit_conversion = {
        'Dimensionless': None,
        'Angle[deg]': u.deg,
        'Time[Julian Years]': u.yr,
        'Magnitude[mag]': u.mag,
예제 #28
0
파일: app.py 프로젝트: dmitryduev/baselayer
import tornado.ioloop
import requests

from baselayer.app.env import load_env, parser
from baselayer.log import make_log

parser.description = 'Launch app microservice'
parser.add_argument(
    '-p',
    '--process',
    type=int,
    help='Process number, when multiple server processes are used.'
    ' This number gets added to the app port.')
env, cfg = load_env()

log = make_log(f'app_{env.process or 0}')

# We import these later, otherwise them calling load_env interferes
# with argument parsing
from baselayer.app.app_server import (
    handlers as baselayer_handlers,
    settings as baselayer_settings,
)  # noqa: E402

app_factory = cfg['app.factory']
baselayer_settings['cookie_secret'] = cfg['app.secret_key']
baselayer_settings['autoreload'] = env.debug
# if env.debug:
#     import logging
#     logging.basicConfig()
#     logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
예제 #29
0
    Annotation,
    Group,
    Classification,
    Listing,
    Comment,
)
from ...utils.cache import Cache, array_to_bytes


_, cfg = load_env()
cache_dir = "cache/candidates_queries"
cache = Cache(
    cache_dir=cache_dir,
    max_age=cfg["misc.minutes_to_keep_candidate_query_cache"] * 60,
)
log = make_log('api/candidate')

Session = scoped_session(sessionmaker(bind=DBSession.session_factory.kw["bind"]))


def add_linked_thumbnails_and_push_ws_msg(obj_id, user_id):
    with Session() as session:
        try:
            user = session.query(User).get(user_id)
            if Obj.get_if_accessible_by(obj_id, user) is None:
                raise AccessError(
                    f"Insufficient permissions for User {user_id} to read Obj {obj_id}"
                )
            obj = session.query(Obj).get(obj_id)
            obj.add_linked_thumbnails(session=session)
            flow = Flow()
예제 #30
0
from io import StringIO

from ..base import BaseHandler
from ...models import (
    DBSession,
    GcnEvent,
    Instrument,
    InstrumentField,
    InstrumentFieldTile,
    Localization,
    LocalizationTile,
    Telescope,
)
from ...enum_types import ALLOWED_BANDPASSES

log = make_log('api/instrument')

Session = scoped_session(sessionmaker(bind=DBSession.session_factory.kw["bind"]))


class InstrumentHandler(BaseHandler):
    @permissions(['System admin'])
    def post(self):
        # See bottom of this file for redoc docstring -- moved it there so that
        # it could be made an f-string.

        data = self.get_json()
        telescope_id = data.get('telescope_id')
        telescope = Telescope.get_if_accessible_by(
            telescope_id, self.current_user, raise_if_none=True, mode="read"
        )