Example #1
0
    def get(self):
        # Return expected parameter output, also set indent settings
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        if 'op' in args and args['op'] == 'time':
            t = db.session.query(func.max(HMM.timestamp)).one()[0]
            d = j2k_to_date(t, (args['timezone'] == 'HST')).strftime(FFMT)
            return {'HMM': d}, 200
        else:
            starttime = args['starttime']
            endtime = args['endtime']
            tz = (args['timezone'].lower() == 'hst')
            sd, ed = create_date_from_input(starttime, endtime, tz)
            jsd = date_to_j2k(sd, tz)
            jed = date_to_j2k(ed, tz)
            data = (HMM
                    .query.filter(HMM.timestamp.between(jsd, jed))
                    .order_by(HMM.timestamp.desc()).all())
            output = []
            Date = j2k_to_date
            List = output.append
            for d in data:
                List({'date': Date(d.timestamp, tz).strftime(FFMT),
                      'accumrate': d.accumrate,
                      'percentjuvenile': d.percentjuvenile})
            return {'nr': len(data),
                    'records': output}, 200
Example #2
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        w_items = [
            LavaLevel.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)),
            LavaLevel.rid == args['rank']
        ]
        data = LavaLevel.query.filter(*w_items).all()

        output = []
        append = output.append
        jtod = j2k_to_date
        for d in data:
            append({
                'date': jtod(d.timestamp, tz).strftime(FFMT),
                'rank': d.rank.name,
                'lavalevel': d.lavalevel
            })
        return {'nr': len(data), 'records': output}, 200
Example #3
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        channels = args['channel'].split(',')
        unknown = (set([x.upper()
                        for x in channels]).difference(triggers._tablenames))
        if len(unknown) > 0:
            return {'Error': 'unknown channel(s): %s' % ','.join(unknown)}

        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        output = {}
        count = 0

        for channel in channels:
            queryclauses = []
            orderby = []
            cname = getattr(triggers, channel.upper())

            # Set up query filters
            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))

            # Set up order by values
            orderby.append(cname.timestamp.asc())

            q = cname.query.filter(*queryclauses).order_by(*orderby)
            try:
                q = q.limit(MAX_LINES['TRIGGERS'])
            except KeyError:
                pass
            data = q.all()

            output[channel] = []
            Date = j2k_to_date
            List = output[channel].append

            for d in data:
                a = {
                    'date': Date(d.timestamp, tz).strftime(SFMT),
                    'triggers': d.triggers
                }

                List(a)
            count += len(data)
        return {'nr': count, 'records': output}, 200
Example #4
0
 def post(self):
     lf = logging.getLogger('file')
     try:
         args = json.loads(request.data)
         for arg in args:
             arg = clean_input(arg)
             d = date_to_j2k(arg['date'], False)
             item = HMM.query.filter_by(timestamp=d).first()
             if item:
                 lf.debug('Updating item for j2ksec: ' + str(d))
                 if arg['ar'] != '':
                     item.accumrate = '%.3f' % float(arg['ar'])
                 else:
                     item.accumrate = None
                 if arg['pj'] != '':
                     item.percentjuvenile = '%.2f' % float(arg['pj'])
                 else:
                     item.percentjuvenile = None
             else:
                 item = HMM(time=arg['date'], ar=arg['ar'], pj=arg['pj'])
                 lf.debug("Attempting to insert ash observation for"
                          "date=%s, accumrate=%s, percentjuvenile=%s" %
                          (arg['date'], arg['ar'], arg['pj']))
                 db.session.add(item)
         db.session.commit()
         lf.debug("Item added/updated")
         return {'status': 'ok'}, 201
     except exc.SQLAlchemyError as e:
         lf.debug(f"Insert failed: {e}")
         return {'status': 'error inserting item, check logs'}, 200
Example #5
0
    def get(self):
        # Return expected parameter output, also set indent settings
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        if 'op' in args and args['op'] == 'time':
            kierz = db.session.query(func.max(edxrf.KIERZ.timestamp)).one()[0]
            kisum = db.session.query(func.max(edxrf.KISUM.timestamp)).one()[0]
            return {
                'kierz': j2k_to_date(kierz, False).strftime(FFMT),
                'kisum': j2k_to_date(kisum, False).strftime(FFMT)
            }, 200
        else:
            channels = args['channel'].split(',')
            starttime = args['starttime']
            endtime = args['endtime']
            tz = (args['timezone'].lower() == 'hst')
            sd, ed = create_date_from_input(starttime, endtime, tz)
            jsd = date_to_j2k(sd, tz)
            jed = date_to_j2k(ed, tz)
            output = {}
            count = 0
            for channel in channels:
                stn = getattr(edxrf, channel.upper())
                ob = [stn.timestamp.desc()]
                data = (stn.query.filter(stn.timestamp.between(
                    jsd, jed)).order_by(*ob).all())
                out = []
                Date = j2k_to_date
                List = out.append
                count += len(data)
                for d in data:
                    List({
                        'date': Date(d.timestamp, tz).strftime(FFMT),
                        'rb': d.rb,
                        'sr': d.sr,
                        'y': d.y,
                        'zr': d.zr,
                        'nb': d.nb
                    })
                output[channel] = out
            return {'nr': count, 'records': output}, 200
Example #6
0
 def __init__(self, time='', rb='', sr='', y='', zr='', nb=''):
     self.timestamp = date_to_j2k(time, False)
     self.rb = '%.2f' % float(rb) if rb != '' else None
     self.sr = '%.2f' % float(sr) if sr != '' else None
     self.y = '%.2f' % float(y) if y != '' else None
     self.zr = '%.2f' % float(zr) if zr != '' else None
     self.nb = '%.2f' % float(nb) if nb != '' else None
     self.tid = 2
     self.rid = 1
Example #7
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        w_items = [
            LaserLavaLevel.timestamp.between(date_to_j2k(start, tz),
                                             date_to_j2k(end, tz))
        ]
        data = LaserLavaLevel.query.filter(*w_items).all()

        output = []
        append = output.append
        jtod = j2k_to_date
        for d in data:
            if d.sealevel is None:
                append({
                    'date': jtod(d.timestamp, tz).strftime(FFMT),
                    'rank': d.rank.name,
                    'sealevel': d.sealevel,
                    'overlook': d.overlook
                })
            else:
                append({
                    'date':
                    jtod(d.timestamp, tz).strftime(FFMT),
                    'rank':
                    d.rank.name,
                    'sealevel': (d.sealevel * d.translation.csealevel +
                                 d.translation.dsealevel),
                    'overlook': (d.overlook * d.translation.coverlook +
                                 d.translation.doverlook)
                })
        return {'nr': len(data), 'records': output}, 200
Example #8
0
 def post(self):
     lf = logging.getLogger('file')
     try:
         args = json.loads(request.data)
         for arg in args:
             arg = clean_input(arg)
             tbl = arg['region']
             cname = getattr(edxrf, tbl.upper())
             d = date_to_j2k(arg['date'], False)
             item = cname.query.filter_by(timestamp=d).first()
             if item:
                 lf.debug('EDXRF::Updating item for j2ksec: ' + str(d))
                 if arg['rb'] != '':
                     item.rb = '%.2f' % float(arg['rb'])
                 else:
                     item.rb = None
                 if arg['sr'] != '':
                     item.sr = '%.2f' % float(arg['sr'])
                 else:
                     item.sr = None
                 if arg['y'] != '':
                     item.y = '%.2f' % float(arg['y'])
                 else:
                     item.y = None
                 if arg['zr'] != '':
                     item.zr = '%.2f' % float(arg['zr'])
                 else:
                     item.zr = None
                 if arg['nb'] != '':
                     item.nb = '%.2f' % float(arg['nb'])
                 else:
                     item.nb = None
             else:
                 item = cname(time=arg['date'],
                              rb=arg['rb'],
                              sr=arg['sr'],
                              y=arg['y'],
                              zr=arg['zr'],
                              nb=arg['nb'])
                 lf.debug((f"EDXRF::Attempting to insert edxrf observation "
                           f"for region={arg['region']}, date={arg['date']}"
                           f", rb={arg['rb']}, sr={arg['sr']}, y={arg['y']}"
                           f", zr={arg['zr']}, nb={arg['nb']}"))
                 db.session.add(item)
         db.session.commit()
         lf.debug('EDXRF::Item added')
         return {'status': 'ok'}, 201
     except exc.SQLAlchemyError as e:
         lf.error(e)
         return {'status': 'error'}, 400
Example #9
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        channels = args['channel'].split(',')
        unknown = (set([x.upper() for x in channels
                        ]).difference(so2emissions._tablenames))
        if len(unknown) > 0:
            return {'Error': f"unknown channel(s): {','.join(unknown)}"}

        series = args['series'].split(',')
        unknown = set([x.lower() for x in series]).difference(_series_options)
        if len(unknown) > 0:
            return {'Error': f"unknown series: {','.join(unknown)}"}

        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        output = {}
        count = 0

        for channel in channels:
            queryclauses = []
            orderby = []
            cname = getattr(so2emissions, channel.upper())

            # Set up query filters
            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))

            # Set up order by values
            orderby.append(cname.timestamp.asc())

            if args['rank'] == 0:
                orderby.append(cname.rid.desc())
            else:
                queryclauses.append(cname.rid == args['rank'])

            q = cname.query.filter(*queryclauses).order_by(*orderby)
            try:
                q = q.limit(MAX_LINES['SO2EMISSIONS'])
            except KeyError:
                pass
            data = q.all()

            output[channel] = []
            Date = j2k_to_date
            List = output[channel].append

            for d in data:
                a = {
                    'date': Date(d.timestamp, tz).strftime(SFMT),
                    'rank': d.rank.name
                }

                for i in args['series'].split(','):
                    a[i] = getattr(d, i.lower())
                List(a)
            count += len(data)
        return {'nr': count, 'records': output}, 200
Example #10
0
 def __init__(self, time='', ar='', pj=''):
     self.timestamp = date_to_j2k(time, False)
     self.accumrate = '%.3f' % float(ar) if ar != '' else None
     self.percentjuvenile = '%.2f' % float(pj) if pj != '' else None
     self.tid = 2
     self.rid = 1
Example #11
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        channels = args['channel'].split(',')
        unknown = (set([x.upper()
                        for x in channels]).difference(strain._tablenames))
        if len(unknown) > 0:
            return {'Error': 'unknown channel(s): %s' % ','.join(unknown)}

        series = args['series'].split(',')
        unknown = set([x.lower() for x in series]).difference(_series_options)
        if len(unknown) > 0:
            return {'Error': 'unknown series: %s' % ','.join(unknown)}

        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        output = {}
        count = 0

        for channel in channels:
            queryclauses = []
            orderby = []
            cname = getattr(strain, channel.upper())

            # Set up query filters
            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))

            # Set up order by values
            orderby.append(cname.timestamp.asc())

            if args['rank'] == 0:
                orderby.append(cname.rid.desc())
            else:
                queryclauses.append(cname.rid == args['rank'])

            q = cname.query.filter(*queryclauses).order_by(*orderby)
            try:
                q = q.limit(MAX_LINES['STRAIN'])
            except KeyError:
                pass
            data = q.all()

            output[channel] = []
            Date = j2k_to_date
            List = output[channel].append

            # Means
            if args['debias'] == 'mean':
                m01 = (sum([x.dt01 for x in data]) /
                       float(len(data)) if 'dt01' in series else 0)
                m02 = (sum([x.dt02 for x in data]) /
                       float(len(data)) if 'dt02' in series else 0)
                mbarometer = (sum([x.barometer for x in data]) /
                              float(len(data)) if 'barometer' in series else 0)
            else:
                m01 = 0
                m02 = 0
                mbarometer = 0

            for d in data:
                a = {
                    'date': Date(d.timestamp, tz).strftime(SFMT),
                    'rank': d.rank.name
                }

                if 'dt01' in series:
                    a['dt01'] = ((d.dt01 - m01) * d.translation.cdt01 +
                                 d.translation.ddt01)
                if 'dt02' in series:
                    a['dt02'] = ((d.dt02 - m02) * d.translation.cdt02 +
                                 d.translation.ddt02)
                if 'barometer' in series:
                    a['barometer'] = (
                        (d.barometer - mbarometer) * d.translation.barometer +
                        d.translation.barometer)
                List(a)
            count += len(data)
        return {'nr': count, 'records': output}, 200
Example #12
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        # Check that valid channels were queried
        channels = args['channel'].split(',')
        unknown = (set([x.upper()
                        for x in channels]).difference(tilt._tablenames))
        if len(unknown) > 0:
            return {'Error': 'unknown channel: %s' % ','.join(unknown)}

        # Timezone
        tz = (args['timezone'] == 'hst')

        # Start by getting all the data
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        raw_output = {}
        count = 0

        # If we're downsampling, we need to create the query by hand
        for channel in channels:
            cname = getattr(tilt, channel.upper())
            queryclauses = []
            orderby = []

            # Set up query filters
            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))

            # Set up orderby clauses
            orderby.append(cname.timestamp.asc())

            if args['rank'] == 0:
                orderby.append(cname.rid.desc())
            else:
                queryclauses.append(cname.rid == args['rank'])

            if args['downsample'] == 'none':
                # Query the data and map it to the raw_tilt_fields structure
                tt = tilt.TiltTranslation
                q_items = []
                q_items.extend([
                    cname.timestamp.label('timestamp'),
                    cname.rid.label('rid')
                ])
                q_items.append(
                    (func.cos(func.radians(tt.azimuth)) *
                     (cname.xTilt * tt.cxTilt + tt.dxTilt).self_group() +
                     func.sin(func.radians(tt.azimuth)) *
                     (cname.yTilt * tt.cyTilt + tt.dyTilt).self_group()
                     ).label('east'))
                q_items.append(
                    (-func.sin(func.radians(tt.azimuth)) *
                     (cname.xTilt * tt.cxTilt + tt.dxTilt).self_group() +
                     func.cos(func.radians(tt.azimuth)) *
                     (cname.yTilt * tt.cyTilt + tt.dyTilt).self_group()
                     ).label('north'))
                q_items.extend([tilt.TiltRank.name, tt])

                # Add optional parameters
                if any(x in args['series'] for x in ['holeTemp', 'all']):
                    q_items.append((cname.holeTemp * tt.choleTemp +
                                    tt.dholeTemp).label('holeTemp'))
                if any(x in args['series'] for x in ['boxTemp', 'all']):
                    q_items.append((cname.boxTemp * tt.cboxTemp +
                                    tt.dboxTemp).label('boxTemp'))
                if any(x in args['series'] for x in ['instVolt', 'all']):
                    q_items.append((cname.instVolt * tt.cinstVolt +
                                    tt.dinstVolt).label('instVolt'))
                if any(x in args['series'] for x in ['rainfall', 'all']):
                    q_items.append((cname.rainfall * tt.crainfall +
                                    tt.drainfall).label('rainfall'))

                q = db.session.query(*q_items).join(tt, tilt.TiltRank)
                q = q.filter(*queryclauses).order_by(*orderby)
                try:
                    q = q.limit(MAX_LINES['TILT'])
                except KeyError:
                    pass
                data = q.all()

                data = self.filter_nulls(data)

                raw_output[channel] = [*map(self.create_initial_output, data)]

                # Adjust dates from j2ksec to actual datetime
                for d in raw_output[channel]:
                    d['date'] = j2k_to_date(d['date'], tz).strftime(FFMT)
            elif args['downsample'] == 'decimate':
                interval = int(args['dsint'])
                dbname = 'v3_hvo_deformation_tilt$tilt'
                s = ("SELECT * FROM(SELECT fullquery.*, @row := @row+1 AS "
                     "rownum FROM (SELECT j2ksec as timestamp, c.rid, c.name, "
                     "COS(RADIANS(b.azimuth)) * (xTilt * cxTilt + dxTilt) "
                     "+ SIN(RADIANS(b.azimuth)) * (yTilt * cyTilt + dyTilt) "
                     "as east, (-SIN(RADIANS(b.azimuth))) * (xTilt * cxTilt "
                     "+ dxTilt) + COS(RADIANS(b.azimuth)) * (yTilt * cyTilt "
                     "+ dyTilt) as north, holeTemp * cHoleTemp + dHoleTemp as "
                     "holeTemp, boxTemp * cboxTemp + dboxTemp as boxTemp, "
                     "instVolt * cinstVolt + dinstVolt as instVolt, rainfall "
                     "* crainfall + drainfall as rainfall FROM "
                     f"{dbname}.{cname.__tablename__} a INNER JOIN "
                     f"{dbname}.translations b on a.tid = b.tid INNER JOIN "
                     f"{dbname}.ranks c ON a.rid = c.rid WHERE "
                     "j2ksec BETWEEN :st AND :et ")
                if args['rank'] != 0:
                    s += "AND c.rid = :rid ORDER BY j2ksec ASC"
                else:
                    s += "ORDER BY j2ksec ASC AND a.rid DESC"
                s += (") fullquery, (SELECT @row:=0) r) ranked WHERE "
                      "rownum % :dsint = 1")
                try:
                    s += ' LIMIT ' + str(MAX_LINES['TILT'])
                except KeyError:
                    pass
                data = (db.session.execute(text(s),
                                           params=dict(
                                               dsint=interval,
                                               st=date_to_j2k(start, tz),
                                               et=date_to_j2k(end, tz),
                                               rid=args['rank'])).fetchall())
                data = self.filter_nulls(data)
                raw_output[channel] = [*map(self.create_initial_output, data)]
                for d in raw_output[channel]:
                    d['date'] = j2k_to_date(d['date'], tz).strftime(FFMT)
            elif args['downsample'] == 'mean':
                pass

            # Calculate rainfall
            if 'rainfall' in args['series'] or 'all' in args['series']:
                lastval = -1
                total = 0
                for d in raw_output[channel]:
                    if lastval == -1:
                        lastval = d['rainfall']
                        d['rainfall'] = 0
                    elif d['rainfall'] == lastval:
                        d['rainfall'] = total
                    else:
                        total += d['rainfall'] - lastval
                        lastval = d['rainfall']
                        d['rainfall'] = total
            count += len(data)

        # Now go through and compute things like radial, tangential,
        # azimuth, magnitude if requested by the user
        if set(args['series'].split(',')).intersection([
                'all', 'radial', 'tangential', 'magnitude', 'azimuth', 'east',
                'north'
        ]):
            tc = tilt.TiltChannel
            for channel in channels:
                data = raw_output[channel]
                if args['azimuth'] == 'nominal':
                    azimuth = (tc.query.filter(
                        tc.code == channel.upper()).first().azimuth % 360.0)
                elif args['azimuth'] == 'optimal':
                    azimuth = self.get_optimal_azimuth(data) % 360.0
                else:
                    azimuth = args['azimuth'] % 360.0

                if len([*data]) == 0:
                    continue

                # Subtract means to get zero-based values
                em = sum([x['east'] for x in data]) / len(data)
                nm = sum([x['north'] for x in data]) / len(data)
                for i in data:
                    i['east'] -= em
                    i['north'] -= nm

                tr = radians(azimuth)
                rotation_matrix = matrix([[cos(tr), sin(tr)],
                                          [-sin(tr), cos(tr)]])

                # Add radial, tangential, magnitude, and azimuth vals to output
                ox = data[0]['east']
                oy = data[0]['north']
                for i in data:
                    e, n = i['east'], i['north']
                    m = matrix([[e, n]]) * rotation_matrix
                    if any(x in args['series'] for x in ['radial', 'all']):
                        i['radial'] = m.A[0][1]
                    if any(x in args['series'] for x in ['tangential', 'all']):
                        i['tangential'] = m.A[0][0]
                    if any(x in args['series'] for x in ['magnitude', 'all']):
                        i['magnitude'] = sqrt((e - ox) * (e - ox) + (n - oy) *
                                              (n - oy))
                    if any(x in args['series'] for x in ['azimuth', 'all']):
                        i['azimuth'] = atan2(n - oy, e - ox)

                # If east and/or north aren't in the series list,
                # remove them from output
                if not any(x in args['series'] for x in ['east', 'all']):
                    for d in data:
                        del d['east']
                if not any(x in args['series'] for x in ['north', 'all']):
                    for d in data:
                        del d['north']

        return {
            'nr': count,
            'used_azimuth': azimuth,
            'tangential_azimuth': (azimuth + 90) % 360,
            'records': raw_output
        }, 200
Example #13
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        tz = (args['timezone'] == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        queryclauses = []
        orderby = []

        if not args['north']:
            args['north'] = _hawaii_coords[args['geo']][0]
            args['south'] = _hawaii_coords[args['geo']][1]
            args['east'] = _hawaii_coords[args['geo']][2]
            args['west'] = _hawaii_coords[args['geo']][3]

        queryclauses.append(
            Hypocenter.lat.between(args['south'], args['north']))
        queryclauses.append(
            Hypocenter.timestamp.between(date_to_j2k(start, tz),
                                         date_to_j2k(end, tz)))

        # Handle crossing dateline
        if args['west'] <= args['east']:
            queryclauses.append(
                Hypocenter.lon.between(args['west'], args['east']))
        else:
            queryclauses.append(Hypocenter.lon >= args['west']
                                | Hypocenter.lon <= args['east'])

        queryclauses.append(
            Hypocenter.depth.between(args['depthmin'], args['depthmax']))
        queryclauses.append(
            Hypocenter.prefmag.between(args['magmin'], args['magmax']))
        queryclauses.append(
            Hypocenter.nphases.between(args['nphasesmin'], args['nphasesmax']))
        queryclauses.append(
            Hypocenter.rms.between(args['rmsmin'], args['rmsmax']))
        queryclauses.append(
            Hypocenter.herr.between(args['herrmin'], args['herrmax']))
        queryclauses.append(
            Hypocenter.verr.between(args['verrmin'], args['verrmax']))

        # Remarks
        if args['remarks']:
            queryclauses.append(Hypocenter.rmk == args['remarks'])

        orderby.append(Hypocenter.eid.asc())

        # Ranks - both order and filter
        if args['rank'] != 0:
            queryclauses.append(Hypocenter.rid == args['rank'])
        else:
            orderby.append(Hypocenter.rid.desc())

        q = Hypocenter.query.filter(*queryclauses).order_by(*orderby)
        try:
            q = q.limit(MAX_LINES['HYPOCENTER'])
        except KeyError:
            pass
        data = q.all()

        output = []
        Date = j2k_to_date
        List = output.append
        for d in data:
            List({
                'eid': d.eid,
                'date': Date(d.timestamp, tz).strftime(FFMT),
                'rank': d.rank.name,
                'depth': d.depth,
                'lat': d.lat,
                'lon': d.lon,
                'prefMag': d.prefmag
            })
        return {
            'nr':
            len(data),
            'location':
            ', '.join([
                str(args['north']),
                str(args['south']),
                str(args['east']),
                str(args['west'])
            ]),
            'records':
            output
        }, 200
Example #14
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        channels = args['channel'].split(',')
        if set([x.upper() for x in channels]).difference(rsam._tablenames):
            return {'Error': 'unknown channel'}

        tz = (args['timezone'] == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        output = {}
        count = 0
        for channel in channels:
            queryclauses = []
            orderby = []
            cname = getattr(rsam, channel.upper())

            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))
            orderby.append(cname.timestamp.asc())

            if args['downsample'] == 'none':
                q = cname.query.filter(*queryclauses).order_by(*orderby)
                try:
                    q = q.limit(MAX_LINES['RSAM'])
                except KeyError:
                    pass
                data = q.all()
            elif args['downsample'] == 'decimate':
                interval = args['dsint']
                dbname = 'v3_hvo_seismic_rsam$rsam'
                s = ("SELECT * FROM(SELECT fullquery.*, @row := @row+1 AS "
                     "rownum FROM(SELECT j2ksec as timestamp, rsam FROM "
                     f"{dbname}.{cname.__tablename__} WHERE j2ksec BETWEEN "
                     ":st AND :et ORDER BY j2ksec ASC) fullquery, "
                     "(SELECT @row := 0) r) WHERE ranked rownum % :dsint = 1")
                try:
                    s += ' LIMIT ' + str(MAX_LINES['RSAM'])
                except KeyError:
                    pass
                data = (db.session.execute(text(s),
                                           params=dict(
                                               st=date_to_j2k(start, tz),
                                               et=date_to_j2k(end, tz),
                                               dsint=interval)).fetchall())
            elif args['downsample'] == 'mean':
                q_items = []
                interval = args['dsint']
                groupby = ['intNum', cname.timestamp]
                q_items.append(func.min(cname.timestamp).label('timestamp'))
                q_items.append(func.avg(cname.rsam).label('rsam'))
                q_items.append(
                    ((cname.timestamp) - date_to_j2k(start, tz)
                     ).self_group().op('div')(interval).label('intNum'))
                q = (db.session.query(*q_items).filter(*queryclauses).order_by(
                    *orderby).group_by(*groupby))
                try:
                    q = q.limit(MAX_LINES['RSAM'])
                except KeyError:
                    pass
                data = q.all()

            output[channel] = []
            Date = j2k_to_date
            List = output[channel].append
            for d in data:
                List({
                    'date': Date(d.timestamp, tz).strftime(FFMT),
                    'rsam': d.rsam
                })

            count += len(data)
        return {'nr': count, 'records': output}, 200
Example #15
0
    def get_gps_data(channel, args, dates):
        """Given a channel and input args, query the database for the gps data
        and then perform the various calculations on it prior to sending it
        back to the user.

        Parameters:
        channel -- the channel to be queried
        args    -- the args dictionary passed into this request
        dates   -- three-item list containing start and end timestamps and tz

        Returns:
        An array of dictionaries containing all computed GPS data
        """
        queryclauses = []
        orderby = []

        id = (GPSChannel.query.filter(
            GPSChannel.code == channel.upper()).one().cid)

        tz = dates[2]
        queryclauses.append(GPSChannel.cid == id)
        queryclauses.append(
            GPSSource.avgdate.between(date_to_j2k(dates[0], tz),
                                      date_to_j2k(dates[1], tz)))

        if args['rank'] != 0:
            queryclauses.append(GPSRank.rid == args['rank'])

        orderby.append(GPSSource.avgdate.asc())
        orderby.append(GPSRank.rank.desc())

        if args['downsample'] == 'none':
            q = (Solution.query.join(GPSSource).join(GPSChannel).join(
                GPSRank).filter(*queryclauses).order_by(*orderby))

            try:
                q = q.limit(MAX_LINES['GPS'])
            except KeyError:
                pass
            data = q.all()
        elif args['downsample'] == 'decimate':
            interval = args['dsint']
            dbname = 'v3_hvo_deformation_gps$gps'
            s = ("SELECT * FROM(SELECT fullquery.*, @row := @row+1 as rownum "
                 "FROM (SELECT (j2ksec0 + j2ksec1) / 2 as t, d.rid as r, x, "
                 f"y, z, sxx, syy, szz, sxy, sxz, syz FROM {dbname}.solutions "
                 f"a INNER JOIN {dbname}.channels b on a.cid = b.cid "
                 "INNER JOIN {dbname}.sources c on a.sid = c.sid INNER JOIN "
                 f"{dbname}.ranks d on c.rid = d.rid WHERE b.cid = :cid AND "
                 "(c.j2ksec0 + c.j2ksec1) / 2 BETWEEN :st AND :et ")
            if args['rank'] != 0:
                s += "AND d.rid = :rid ORDER BY 1 ASC"
            else:
                s += "ORDER BY 1 ASC, d.rid DESC"
            s += (") fullquery, (SELECT @row:=0) r) ranked WHERE "
                  "rownum % :dsint = 1")
            try:
                s += ' limit ' + str(MAX_LINES['GPS'])
            except KeyError:
                pass
            data = db.session.execute(text(s),
                                      params=dict(dsint=interval,
                                                  st=date_to_j2k(dates[0], tz),
                                                  et=date_to_j2k(dates[1], tz),
                                                  rid=args['rank'],
                                                  cid=id)).fetchall()
        elif args['downsample'] == 'mean':
            q_items = []
            interval = args['dsint']
            q_items.extend([
                func.min(GPSSource.avgdate).label('t'),
                func.min(GPSRank.rid).label('r')
            ])
            q_items.extend([
                func.avg(Solution.x).label('x'),
                func.avg(Solution.y).label('y'),
                func.avg(Solution.z).label('z'),
                func.avg(Solution.sxx).label('sxx'),
                func.avg(Solution.syy).label('syy'),
                func.avg(Solution.szz).label('szz'),
                func.avg(Solution.sxy).label('sxy'),
                func.avg(Solution.sxz).label('sxz'),
                func.avg(Solution.syz).label('syz')
            ])
            q_items.append(((GPSSource.avgdate) - date_to_j2k(dates[0], tz)
                            ).self_group().op('div')(interval).label('intNum'))
            q = (db.session.query(*q_items).select_from(Solution).join(
                GPSChannel, GPSSource, GPSRank))
            q = q.filter(*queryclauses).order_by(*orderby).group_by('intNum')
            try:
                q = q.limit(MAX_LINES['GPS'])
            except KeyError:
                pass
            data = q.all()

        # Loop through and remove data with duplicate time stamps.
        # Note: Should we only have to do this in the case of 'best rank'?
        if args['rank'] == 0:
            res = []
            tmpj2k = 0
            for d in data:
                if not tmpj2k == d.source.avgdate:
                    res.append(d)
                    tmpj2k = d.source.avgdate
            return GPSAPI.set_to_list(res)
        else:
            return GPSAPI.set_to_list(data)