def post(self): try: if request.headers.get("Content-Type", None) != "application/octet": raise BadRequest("Content-Type should be application/octet") filename = request.args.get("filename", "", type=str) # Database connections db = get_session(DB_URL) ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) # Read data from request data = BytesIO(request.data) obs = ObservationDAO(data, filename) obs_rec = obs.observation_record() db.add(obs_rec) ds.put(obs_rec.obs_id, data, data.getbuffer().nbytes) app.logger.info("Stored {} as {}".format(obs_rec.filename, obs_rec.obs_id)) db.commit() return { "status": "ok", "obs_id": obs_rec.obs_id }, status.HTTP_201_CREATED except ObservationDAOError as e: raise BadRequest(e) except DatastoreError as e: raise InternalServerError("Datastore Error: {}".format(e))
def get(self, obs_id): """ Return a Downsampled Observation for rendering graphically """ try: ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) o = ObservationDAO(ds.get(obs_id)) return o.view() except DatastoreError as e: raise InternalServerError(e)
def get(self, obs_id): try: ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) o = ObservationDAO(ds.get(obs_id)) ov = o.view() app.logger.debug(ov) return o.view() except DatastoreError as e: raise InternalServerError(e)
def get(self, evt_id): """ Return SAX data for a detected Event """ try: parser = reqparse.RequestParser() parser.add_argument("offset", type=int, default=0) parser.add_argument("length", type=int, required=True) parser.add_argument("absolute", type=str) parser.add_argument("bandpass", type=str) parser.add_argument("bandpassLow", type=int) parser.add_argument("bandpassHigh", type=int) parser.add_argument("sax", type=str) parser.add_argument("paaInt", type=int, required=True) parser.add_argument("alphabet", type=str, required=True) p = parser.parse_args() ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) db = get_session(DB_URL) evt = db.query(EventRecord).filter_by(evt_id=evt_id).one() raw = ds.get(evt.obs_id) obs = ObservationDAO(raw) start = (evt.start + timedelta(milliseconds=p["offset"])) end = (start + timedelta(milliseconds=p["length"])) obs.slice(start=start.timestamp(), end=end.timestamp()) obs.normalise() if is_true(p["bandpass"]): obs.bandpass(p["bandpassLow"], p["bandpassHigh"]) if is_true(p["absolute"]): obs.absolute() paa = Paa(obs.series()) paa_data = paa(p["paaInt"]) paa_results = [{ "x": int(k), "y": v } for k, v in json.loads(paa_data.to_json(orient="index")).items()] paa_results = sorted(paa_results, key=lambda x: x["x"]) sax = Sax(paa_data) return { "original": obs.view(), "paa": paa_results, "sax": "".join([i for i in sax(p["alphabet"])]) } except (PaaError, SaxError) as e: raise InternalServerError(e) except NoResultFound: raise NotFound except MultipleResultsFound: raise InternalServerError("Multiple results found, this is bad") except DatastoreError as e: raise InternalServerError(e)
def delete(self, obs_id): """ Delete an Observation """ try: ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) db = get_session(DB_URL) events_only = True if request.args.get("eventsOnly") == "true" else False # Delete DB entry db.query(EventRecord).filter(EventRecord.obs_id == obs_id).delete() if not events_only: db.query(ObservationRecord).filter(ObservationRecord.obs_id == obs_id).delete() # Delete Object ds.delete(obs_id) # Commit DB if all went well db.commit() return "", status.HTTP_204_NO_CONTENT except DatastoreError as e: raise InternalServerError(e)
def get(self, obs_id): """ Retrieve a RAW observation file """ ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) db = get_session(DB_URL) try: obs_rec = db.query(ObservationRecord).filter_by(obs_id=obs_id).one() filename = obs_rec.filename if len(obs_rec.filename) > 0 else obs_id data = ds.get(obs_id) return send_file( data, as_attachment=True, attachment_filename=filename, mimetype="application/octet" ) except (DatastoreError, MultipleResultsFound) as e: raise InternalServerError(e) except NoResultFound: raise NotFound("{} not found".format(obs_id))
def get(self, obs_id): """ Return the trigger data for Event Detection on an Observation """ try: ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) db = get_session(DB_URL) obs_rec = db.query(ObservationRecord).filter_by(obs_id=obs_id).one() raw = ds.get("trigger_data/{}.json".format(obs_id)) df = pd.read_json(raw) start_ms = obs_rec.start.timestamp() * 1000 end_ms = obs_rec.end.timestamp() * 1000 app.logger.info("Start: {}".format(start_ms)) df["t"] = df["t"].add(start_ms) df["t"] = pd.to_datetime(df["t"], unit="ms") downsample_freq = int((end_ms - start_ms) / 1000) df.set_index("t", inplace=True) downsample = df.resample("{}L".format(downsample_freq)).mean() downsample["t"] = downsample.index.astype(np.int64) // 10 ** 6 return json.loads(downsample.to_json(orient="records")) except DatastoreError as e: return str(e), status.HTTP_404_NOT_FOUND
def get(self, obs_id, evt_id): """ Retrieve a SAC file for an event """ try: parser = reqparse.RequestParser() parser.add_argument("offset", type=int, default=0) parser.add_argument("length", type=int) parser.add_argument("filename", type=str) p = parser.parse_args() # Get observation ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) db = get_session(DB_URL) evt = db.query(EventRecord).filter_by(obs_id=obs_id, evt_id=evt_id).one() obs = ObservationDAO(ds.get(obs_id)) # Slice event from observation start = (evt.start + timedelta(milliseconds=p["offset"])) if p["length"]: end = (start + timedelta(milliseconds=p["length"])) else: end = evt.end obs.slice(start.timestamp(), end.timestamp()) # Prepare download filename = p["filename"] if p["filename"] else "{}.SAC".format(evt_id) b = BytesIO() obs.stream.write(b, format="SAC") b.seek(0) return send_file( b, as_attachment=True, attachment_filename=filename, mimetype="application/octet" ) except NoResultFound: raise NotFound except MultipleResultsFound: raise InternalServerError("Multiple results found, this is bad") except DatastoreError as e: raise InternalServerError(e)
from seismic.observations import ObservationDAO, ObservationDAOError from seismic.datastore import Datastore, DatastoreError from seismic.metadb import get_session, ObservationRecord, EventRecord from seismic.detector import StaLtaDetect, SaxDetect, DetectorError MINIO_HOST = getenv("MINIO_HOST", "localhost:9000") MINIO_ACCESS_KEY = getenv("MINIO_ACCESS_KEY", "dev-TKE8KC10YL") MINIO_SECRET_KEY = getenv("MINIO_SECRET_KEY", "dev-ALUP1N7WUO") MINIO_BUCKET = getenv("MINIO_BUCKET", "raw") DB_URL = getenv("DB_URL", "postgresql://*****:*****@localhost:5432/seismic") BROKER_URL = getenv("BROKER_URL", "redis://localhost:6379") capp = Celery('tasks', broker=BROKER_URL) ds = Datastore(MINIO_HOST, MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_BUCKET) logger = logging.getLogger() logger.setLevel(logging.DEBUG) @capp.task() def stalta_detector(obs_id, trace, bp_low, bp_high, short_window, long_window, nstds, trigger_len): """ Run detector to find events and store in metadb Args: obs_id (string): Observation ID trace (int): Number of trace in file (probably 0) bp_low (int): Low frequency for bandpass