Exemple #1
0
    def get(self):
        # Return expected parameter output, also set indent settings
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        if 'op' in args and args['op'] == 'time':
            t = db.session.query(func.max(HMM.timestamp)).one()[0]
            d = j2k_to_date(t, (args['timezone'] == 'HST')).strftime(FFMT)
            return {'HMM': d}, 200
        else:
            starttime = args['starttime']
            endtime = args['endtime']
            tz = (args['timezone'].lower() == 'hst')
            sd, ed = create_date_from_input(starttime, endtime, tz)
            jsd = date_to_j2k(sd, tz)
            jed = date_to_j2k(ed, tz)
            data = (HMM
                    .query.filter(HMM.timestamp.between(jsd, jed))
                    .order_by(HMM.timestamp.desc()).all())
            output = []
            Date = j2k_to_date
            List = output.append
            for d in data:
                List({'date': Date(d.timestamp, tz).strftime(FFMT),
                      'accumrate': d.accumrate,
                      'percentjuvenile': d.percentjuvenile})
            return {'nr': len(data),
                    'records': output}, 200
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        w_items = [
            LavaLevel.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)),
            LavaLevel.rid == args['rank']
        ]
        data = LavaLevel.query.filter(*w_items).all()

        output = []
        append = output.append
        jtod = j2k_to_date
        for d in data:
            append({
                'date': jtod(d.timestamp, tz).strftime(FFMT),
                'rank': d.rank.name,
                'lavalevel': d.lavalevel
            })
        return {'nr': len(data), 'records': output}, 200
Exemple #3
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        channels = args['channel'].split(',')
        unknown = (set([x.upper()
                        for x in channels]).difference(triggers._tablenames))
        if len(unknown) > 0:
            return {'Error': 'unknown channel(s): %s' % ','.join(unknown)}

        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        output = {}
        count = 0

        for channel in channels:
            queryclauses = []
            orderby = []
            cname = getattr(triggers, channel.upper())

            # Set up query filters
            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))

            # Set up order by values
            orderby.append(cname.timestamp.asc())

            q = cname.query.filter(*queryclauses).order_by(*orderby)
            try:
                q = q.limit(MAX_LINES['TRIGGERS'])
            except KeyError:
                pass
            data = q.all()

            output[channel] = []
            Date = j2k_to_date
            List = output[channel].append

            for d in data:
                a = {
                    'date': Date(d.timestamp, tz).strftime(SFMT),
                    'triggers': d.triggers
                }

                List(a)
            count += len(data)
        return {'nr': count, 'records': output}, 200
Exemple #4
0
    def get(self):
        # Return expected parameter output, also set indent settings
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        if 'op' in args and args['op'] == 'time':
            kierz = db.session.query(func.max(edxrf.KIERZ.timestamp)).one()[0]
            kisum = db.session.query(func.max(edxrf.KISUM.timestamp)).one()[0]
            return {
                'kierz': j2k_to_date(kierz, False).strftime(FFMT),
                'kisum': j2k_to_date(kisum, False).strftime(FFMT)
            }, 200
        else:
            channels = args['channel'].split(',')
            starttime = args['starttime']
            endtime = args['endtime']
            tz = (args['timezone'].lower() == 'hst')
            sd, ed = create_date_from_input(starttime, endtime, tz)
            jsd = date_to_j2k(sd, tz)
            jed = date_to_j2k(ed, tz)
            output = {}
            count = 0
            for channel in channels:
                stn = getattr(edxrf, channel.upper())
                ob = [stn.timestamp.desc()]
                data = (stn.query.filter(stn.timestamp.between(
                    jsd, jed)).order_by(*ob).all())
                out = []
                Date = j2k_to_date
                List = out.append
                count += len(data)
                for d in data:
                    List({
                        'date': Date(d.timestamp, tz).strftime(FFMT),
                        'rb': d.rb,
                        'sr': d.sr,
                        'y': d.y,
                        'zr': d.zr,
                        'nb': d.nb
                    })
                output[channel] = out
            return {'nr': count, 'records': output}, 200
Exemple #5
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        w_items = [
            LaserLavaLevel.timestamp.between(date_to_j2k(start, tz),
                                             date_to_j2k(end, tz))
        ]
        data = LaserLavaLevel.query.filter(*w_items).all()

        output = []
        append = output.append
        jtod = j2k_to_date
        for d in data:
            if d.sealevel is None:
                append({
                    'date': jtod(d.timestamp, tz).strftime(FFMT),
                    'rank': d.rank.name,
                    'sealevel': d.sealevel,
                    'overlook': d.overlook
                })
            else:
                append({
                    'date':
                    jtod(d.timestamp, tz).strftime(FFMT),
                    'rank':
                    d.rank.name,
                    'sealevel': (d.sealevel * d.translation.csealevel +
                                 d.translation.dsealevel),
                    'overlook': (d.overlook * d.translation.coverlook +
                                 d.translation.doverlook)
                })
        return {'nr': len(data), 'records': output}, 200
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        channels = args['channel'].split(',')
        unknown = (set([x.upper() for x in channels
                        ]).difference(so2emissions._tablenames))
        if len(unknown) > 0:
            return {'Error': f"unknown channel(s): {','.join(unknown)}"}

        series = args['series'].split(',')
        unknown = set([x.lower() for x in series]).difference(_series_options)
        if len(unknown) > 0:
            return {'Error': f"unknown series: {','.join(unknown)}"}

        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        output = {}
        count = 0

        for channel in channels:
            queryclauses = []
            orderby = []
            cname = getattr(so2emissions, channel.upper())

            # Set up query filters
            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))

            # Set up order by values
            orderby.append(cname.timestamp.asc())

            if args['rank'] == 0:
                orderby.append(cname.rid.desc())
            else:
                queryclauses.append(cname.rid == args['rank'])

            q = cname.query.filter(*queryclauses).order_by(*orderby)
            try:
                q = q.limit(MAX_LINES['SO2EMISSIONS'])
            except KeyError:
                pass
            data = q.all()

            output[channel] = []
            Date = j2k_to_date
            List = output[channel].append

            for d in data:
                a = {
                    'date': Date(d.timestamp, tz).strftime(SFMT),
                    'rank': d.rank.name
                }

                for i in args['series'].split(','):
                    a[i] = getattr(d, i.lower())
                List(a)
            count += len(data)
        return {'nr': count, 'records': output}, 200
Exemple #7
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        channels = args['channel'].split(',')
        unknown = (set([x.upper()
                        for x in channels]).difference(strain._tablenames))
        if len(unknown) > 0:
            return {'Error': 'unknown channel(s): %s' % ','.join(unknown)}

        series = args['series'].split(',')
        unknown = set([x.lower() for x in series]).difference(_series_options)
        if len(unknown) > 0:
            return {'Error': 'unknown series: %s' % ','.join(unknown)}

        tz = (args['timezone'].lower() == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        output = {}
        count = 0

        for channel in channels:
            queryclauses = []
            orderby = []
            cname = getattr(strain, channel.upper())

            # Set up query filters
            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))

            # Set up order by values
            orderby.append(cname.timestamp.asc())

            if args['rank'] == 0:
                orderby.append(cname.rid.desc())
            else:
                queryclauses.append(cname.rid == args['rank'])

            q = cname.query.filter(*queryclauses).order_by(*orderby)
            try:
                q = q.limit(MAX_LINES['STRAIN'])
            except KeyError:
                pass
            data = q.all()

            output[channel] = []
            Date = j2k_to_date
            List = output[channel].append

            # Means
            if args['debias'] == 'mean':
                m01 = (sum([x.dt01 for x in data]) /
                       float(len(data)) if 'dt01' in series else 0)
                m02 = (sum([x.dt02 for x in data]) /
                       float(len(data)) if 'dt02' in series else 0)
                mbarometer = (sum([x.barometer for x in data]) /
                              float(len(data)) if 'barometer' in series else 0)
            else:
                m01 = 0
                m02 = 0
                mbarometer = 0

            for d in data:
                a = {
                    'date': Date(d.timestamp, tz).strftime(SFMT),
                    'rank': d.rank.name
                }

                if 'dt01' in series:
                    a['dt01'] = ((d.dt01 - m01) * d.translation.cdt01 +
                                 d.translation.ddt01)
                if 'dt02' in series:
                    a['dt02'] = ((d.dt02 - m02) * d.translation.cdt02 +
                                 d.translation.ddt02)
                if 'barometer' in series:
                    a['barometer'] = (
                        (d.barometer - mbarometer) * d.translation.barometer +
                        d.translation.barometer)
                List(a)
            count += len(data)
        return {'nr': count, 'records': output}, 200
Exemple #8
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        # Check that valid channels were queried
        channels = args['channel'].split(',')
        unknown = (set([x.upper()
                        for x in channels]).difference(tilt._tablenames))
        if len(unknown) > 0:
            return {'Error': 'unknown channel: %s' % ','.join(unknown)}

        # Timezone
        tz = (args['timezone'] == 'hst')

        # Start by getting all the data
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        raw_output = {}
        count = 0

        # If we're downsampling, we need to create the query by hand
        for channel in channels:
            cname = getattr(tilt, channel.upper())
            queryclauses = []
            orderby = []

            # Set up query filters
            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))

            # Set up orderby clauses
            orderby.append(cname.timestamp.asc())

            if args['rank'] == 0:
                orderby.append(cname.rid.desc())
            else:
                queryclauses.append(cname.rid == args['rank'])

            if args['downsample'] == 'none':
                # Query the data and map it to the raw_tilt_fields structure
                tt = tilt.TiltTranslation
                q_items = []
                q_items.extend([
                    cname.timestamp.label('timestamp'),
                    cname.rid.label('rid')
                ])
                q_items.append(
                    (func.cos(func.radians(tt.azimuth)) *
                     (cname.xTilt * tt.cxTilt + tt.dxTilt).self_group() +
                     func.sin(func.radians(tt.azimuth)) *
                     (cname.yTilt * tt.cyTilt + tt.dyTilt).self_group()
                     ).label('east'))
                q_items.append(
                    (-func.sin(func.radians(tt.azimuth)) *
                     (cname.xTilt * tt.cxTilt + tt.dxTilt).self_group() +
                     func.cos(func.radians(tt.azimuth)) *
                     (cname.yTilt * tt.cyTilt + tt.dyTilt).self_group()
                     ).label('north'))
                q_items.extend([tilt.TiltRank.name, tt])

                # Add optional parameters
                if any(x in args['series'] for x in ['holeTemp', 'all']):
                    q_items.append((cname.holeTemp * tt.choleTemp +
                                    tt.dholeTemp).label('holeTemp'))
                if any(x in args['series'] for x in ['boxTemp', 'all']):
                    q_items.append((cname.boxTemp * tt.cboxTemp +
                                    tt.dboxTemp).label('boxTemp'))
                if any(x in args['series'] for x in ['instVolt', 'all']):
                    q_items.append((cname.instVolt * tt.cinstVolt +
                                    tt.dinstVolt).label('instVolt'))
                if any(x in args['series'] for x in ['rainfall', 'all']):
                    q_items.append((cname.rainfall * tt.crainfall +
                                    tt.drainfall).label('rainfall'))

                q = db.session.query(*q_items).join(tt, tilt.TiltRank)
                q = q.filter(*queryclauses).order_by(*orderby)
                try:
                    q = q.limit(MAX_LINES['TILT'])
                except KeyError:
                    pass
                data = q.all()

                data = self.filter_nulls(data)

                raw_output[channel] = [*map(self.create_initial_output, data)]

                # Adjust dates from j2ksec to actual datetime
                for d in raw_output[channel]:
                    d['date'] = j2k_to_date(d['date'], tz).strftime(FFMT)
            elif args['downsample'] == 'decimate':
                interval = int(args['dsint'])
                dbname = 'v3_hvo_deformation_tilt$tilt'
                s = ("SELECT * FROM(SELECT fullquery.*, @row := @row+1 AS "
                     "rownum FROM (SELECT j2ksec as timestamp, c.rid, c.name, "
                     "COS(RADIANS(b.azimuth)) * (xTilt * cxTilt + dxTilt) "
                     "+ SIN(RADIANS(b.azimuth)) * (yTilt * cyTilt + dyTilt) "
                     "as east, (-SIN(RADIANS(b.azimuth))) * (xTilt * cxTilt "
                     "+ dxTilt) + COS(RADIANS(b.azimuth)) * (yTilt * cyTilt "
                     "+ dyTilt) as north, holeTemp * cHoleTemp + dHoleTemp as "
                     "holeTemp, boxTemp * cboxTemp + dboxTemp as boxTemp, "
                     "instVolt * cinstVolt + dinstVolt as instVolt, rainfall "
                     "* crainfall + drainfall as rainfall FROM "
                     f"{dbname}.{cname.__tablename__} a INNER JOIN "
                     f"{dbname}.translations b on a.tid = b.tid INNER JOIN "
                     f"{dbname}.ranks c ON a.rid = c.rid WHERE "
                     "j2ksec BETWEEN :st AND :et ")
                if args['rank'] != 0:
                    s += "AND c.rid = :rid ORDER BY j2ksec ASC"
                else:
                    s += "ORDER BY j2ksec ASC AND a.rid DESC"
                s += (") fullquery, (SELECT @row:=0) r) ranked WHERE "
                      "rownum % :dsint = 1")
                try:
                    s += ' LIMIT ' + str(MAX_LINES['TILT'])
                except KeyError:
                    pass
                data = (db.session.execute(text(s),
                                           params=dict(
                                               dsint=interval,
                                               st=date_to_j2k(start, tz),
                                               et=date_to_j2k(end, tz),
                                               rid=args['rank'])).fetchall())
                data = self.filter_nulls(data)
                raw_output[channel] = [*map(self.create_initial_output, data)]
                for d in raw_output[channel]:
                    d['date'] = j2k_to_date(d['date'], tz).strftime(FFMT)
            elif args['downsample'] == 'mean':
                pass

            # Calculate rainfall
            if 'rainfall' in args['series'] or 'all' in args['series']:
                lastval = -1
                total = 0
                for d in raw_output[channel]:
                    if lastval == -1:
                        lastval = d['rainfall']
                        d['rainfall'] = 0
                    elif d['rainfall'] == lastval:
                        d['rainfall'] = total
                    else:
                        total += d['rainfall'] - lastval
                        lastval = d['rainfall']
                        d['rainfall'] = total
            count += len(data)

        # Now go through and compute things like radial, tangential,
        # azimuth, magnitude if requested by the user
        if set(args['series'].split(',')).intersection([
                'all', 'radial', 'tangential', 'magnitude', 'azimuth', 'east',
                'north'
        ]):
            tc = tilt.TiltChannel
            for channel in channels:
                data = raw_output[channel]
                if args['azimuth'] == 'nominal':
                    azimuth = (tc.query.filter(
                        tc.code == channel.upper()).first().azimuth % 360.0)
                elif args['azimuth'] == 'optimal':
                    azimuth = self.get_optimal_azimuth(data) % 360.0
                else:
                    azimuth = args['azimuth'] % 360.0

                if len([*data]) == 0:
                    continue

                # Subtract means to get zero-based values
                em = sum([x['east'] for x in data]) / len(data)
                nm = sum([x['north'] for x in data]) / len(data)
                for i in data:
                    i['east'] -= em
                    i['north'] -= nm

                tr = radians(azimuth)
                rotation_matrix = matrix([[cos(tr), sin(tr)],
                                          [-sin(tr), cos(tr)]])

                # Add radial, tangential, magnitude, and azimuth vals to output
                ox = data[0]['east']
                oy = data[0]['north']
                for i in data:
                    e, n = i['east'], i['north']
                    m = matrix([[e, n]]) * rotation_matrix
                    if any(x in args['series'] for x in ['radial', 'all']):
                        i['radial'] = m.A[0][1]
                    if any(x in args['series'] for x in ['tangential', 'all']):
                        i['tangential'] = m.A[0][0]
                    if any(x in args['series'] for x in ['magnitude', 'all']):
                        i['magnitude'] = sqrt((e - ox) * (e - ox) + (n - oy) *
                                              (n - oy))
                    if any(x in args['series'] for x in ['azimuth', 'all']):
                        i['azimuth'] = atan2(n - oy, e - ox)

                # If east and/or north aren't in the series list,
                # remove them from output
                if not any(x in args['series'] for x in ['east', 'all']):
                    for d in data:
                        del d['east']
                if not any(x in args['series'] for x in ['north', 'all']):
                    for d in data:
                        del d['north']

        return {
            'nr': count,
            'used_azimuth': azimuth,
            'tangential_azimuth': (azimuth + 90) % 360,
            'records': raw_output
        }, 200
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        tz = (args['timezone'] == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        queryclauses = []
        orderby = []

        if not args['north']:
            args['north'] = _hawaii_coords[args['geo']][0]
            args['south'] = _hawaii_coords[args['geo']][1]
            args['east'] = _hawaii_coords[args['geo']][2]
            args['west'] = _hawaii_coords[args['geo']][3]

        queryclauses.append(
            Hypocenter.lat.between(args['south'], args['north']))
        queryclauses.append(
            Hypocenter.timestamp.between(date_to_j2k(start, tz),
                                         date_to_j2k(end, tz)))

        # Handle crossing dateline
        if args['west'] <= args['east']:
            queryclauses.append(
                Hypocenter.lon.between(args['west'], args['east']))
        else:
            queryclauses.append(Hypocenter.lon >= args['west']
                                | Hypocenter.lon <= args['east'])

        queryclauses.append(
            Hypocenter.depth.between(args['depthmin'], args['depthmax']))
        queryclauses.append(
            Hypocenter.prefmag.between(args['magmin'], args['magmax']))
        queryclauses.append(
            Hypocenter.nphases.between(args['nphasesmin'], args['nphasesmax']))
        queryclauses.append(
            Hypocenter.rms.between(args['rmsmin'], args['rmsmax']))
        queryclauses.append(
            Hypocenter.herr.between(args['herrmin'], args['herrmax']))
        queryclauses.append(
            Hypocenter.verr.between(args['verrmin'], args['verrmax']))

        # Remarks
        if args['remarks']:
            queryclauses.append(Hypocenter.rmk == args['remarks'])

        orderby.append(Hypocenter.eid.asc())

        # Ranks - both order and filter
        if args['rank'] != 0:
            queryclauses.append(Hypocenter.rid == args['rank'])
        else:
            orderby.append(Hypocenter.rid.desc())

        q = Hypocenter.query.filter(*queryclauses).order_by(*orderby)
        try:
            q = q.limit(MAX_LINES['HYPOCENTER'])
        except KeyError:
            pass
        data = q.all()

        output = []
        Date = j2k_to_date
        List = output.append
        for d in data:
            List({
                'eid': d.eid,
                'date': Date(d.timestamp, tz).strftime(FFMT),
                'rank': d.rank.name,
                'depth': d.depth,
                'lat': d.lat,
                'lon': d.lon,
                'prefMag': d.prefmag
            })
        return {
            'nr':
            len(data),
            'location':
            ', '.join([
                str(args['north']),
                str(args['south']),
                str(args['east']),
                str(args['west'])
            ]),
            'records':
            output
        }, 200
Exemple #10
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()
        channels = args['channel'].split(',')
        if set([x.upper() for x in channels]).difference(rsam._tablenames):
            return {'Error': 'unknown channel'}

        tz = (args['timezone'] == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        output = {}
        count = 0
        for channel in channels:
            queryclauses = []
            orderby = []
            cname = getattr(rsam, channel.upper())

            queryclauses.append(
                cname.timestamp.between(date_to_j2k(start, tz),
                                        date_to_j2k(end, tz)))
            orderby.append(cname.timestamp.asc())

            if args['downsample'] == 'none':
                q = cname.query.filter(*queryclauses).order_by(*orderby)
                try:
                    q = q.limit(MAX_LINES['RSAM'])
                except KeyError:
                    pass
                data = q.all()
            elif args['downsample'] == 'decimate':
                interval = args['dsint']
                dbname = 'v3_hvo_seismic_rsam$rsam'
                s = ("SELECT * FROM(SELECT fullquery.*, @row := @row+1 AS "
                     "rownum FROM(SELECT j2ksec as timestamp, rsam FROM "
                     f"{dbname}.{cname.__tablename__} WHERE j2ksec BETWEEN "
                     ":st AND :et ORDER BY j2ksec ASC) fullquery, "
                     "(SELECT @row := 0) r) WHERE ranked rownum % :dsint = 1")
                try:
                    s += ' LIMIT ' + str(MAX_LINES['RSAM'])
                except KeyError:
                    pass
                data = (db.session.execute(text(s),
                                           params=dict(
                                               st=date_to_j2k(start, tz),
                                               et=date_to_j2k(end, tz),
                                               dsint=interval)).fetchall())
            elif args['downsample'] == 'mean':
                q_items = []
                interval = args['dsint']
                groupby = ['intNum', cname.timestamp]
                q_items.append(func.min(cname.timestamp).label('timestamp'))
                q_items.append(func.avg(cname.rsam).label('rsam'))
                q_items.append(
                    ((cname.timestamp) - date_to_j2k(start, tz)
                     ).self_group().op('div')(interval).label('intNum'))
                q = (db.session.query(*q_items).filter(*queryclauses).order_by(
                    *orderby).group_by(*groupby))
                try:
                    q = q.limit(MAX_LINES['RSAM'])
                except KeyError:
                    pass
                data = q.all()

            output[channel] = []
            Date = j2k_to_date
            List = output[channel].append
            for d in data:
                List({
                    'date': Date(d.timestamp, tz).strftime(FFMT),
                    'rsam': d.rsam
                })

            count += len(data)
        return {'nr': count, 'records': output}, 200
Exemple #11
0
    def get(self):
        if not request.args:
            return self.create_param_string(), 200

        if not current_app.debug:
            current_app.config['RESTFUL_JSON'] = {}

        args = self.reqparse.parse_args()

        channels = args['channel'].split(',')
        okch = [x.code for x in GPSChannel.query.all()]
        if set([x.upper() for x in channels]).difference(okch):
            return {'Error': 'unknown channel'}

        tz = (args['timezone'] == 'hst')
        start, end = create_date_from_input(args['starttime'], args['endtime'],
                                            tz)
        q_data = {}
        count = 0
        for channel in channels:
            data = self.get_gps_data(channel, args, [start, end, tz])
            if len(data) == 0:
                q_data[channel] = []
            else:
                cdata = []

                originllh = self.xyz_to_llh(data['xyz'][0], data['xyz'][1],
                                            data['xyz'][2])
                if args['baseline']:
                    bchannel = args['baseline']
                    baseline = self.get_gps_data(bchannel, args,
                                                 [start, end, tz])
                    data = self.apply_baseline(baseline, data)

                xyz, cov = self.to_enu(originllh[0], originllh[1],
                                       len(data['xyz']) // 3, data['xyz'],
                                       data['cov'])
                enurows = self.column_3N_to_rows(xyz)

                if args['noadjust'] == 'F':
                    em = (sum(enurows[x, 0] for x in range(len(enurows))) /
                          float(len(enurows)))
                    nm = (sum(enurows[x, 1] for x in range(len(enurows))) /
                          float(len(enurows)))
                    um = (sum(enurows[x, 2] for x in range(len(enurows))) /
                          float(len(enurows)))
                    for i in range(len(enurows)):
                        enurows[i, 0] -= em
                        enurows[i, 1] -= nm
                        enurows[i, 2] -= um

                    lm = (sum(data['lendata'][x]
                              for x in range(len(data['lendata']))) /
                          float(len(data['lendata'])))
                    for i in range(len(data['lendata'])):
                        data['lendata'][i] -= lm

                List = cdata.append
                for i in range(len(enurows)):
                    dt = j2k_to_date(data['t'][i], tz).strftime(FFMT)
                    current_res = {'date': dt, 'rank': data['r'][i]}
                    if 'east' in args['series']:
                        current_res['east'] = enurows[i, 0]
                    if 'north' in args['series']:
                        current_res['north'] = enurows[i, 1]
                    if 'up' in args['series']:
                        current_res['up'] = enurows[i, 2]
                    if 'length' in args['series']:
                        current_res['length'] = data['lendata'][i]
                    List(current_res)

                q_data[channel] = cdata
                count += len(cdata)

        return {'nr': count, 'records': q_data}, 200