def _get(self, version, project, freqmode, scanno): product = get_args.get_string('product') db = level2db.Level2DB(project) L2 = db.get_L2(freqmode, scanno, product=product) if not L2: abort(404) return L2
def _fetch(self, version, project, freqmode): limit = get_args.get_int('limit') or DEFAULT_LIMIT offset = get_args.get_int('offset') or DEFAULT_OFFSET db = level2db.Level2DB(project) comments = db.get_comments(freqmode, offset=offset, limit=limit) base_url = get_base_url(version) info = { 'Comments': [{ 'Comment': comment, 'URLS': { 'URL-scans': '{}/{}/{}/scans?{}'.format( base_url, project, freqmode, urllib.parse.urlencode([('comment', comment)])), 'URL-failed': '{}/{}/{}/failed?{}'.format( base_url, project, freqmode, urllib.parse.urlencode([('comment', comment)])) } } for comment in comments] } count = db.count_comments(freqmode) data = { 'info': info, 'count': count, } headers = { 'Link': make_rfc5988_pagination_header( offset, limit, count, self._get_endpoint(), version=version, project=project, freqmode=freqmode ), } return data, HTTPStatus.OK, headers
def _get(self, version, project, freqmode, scanno): db = level2db.Level2DB(project) L2i = db.get_L2i(freqmode, scanno) if not L2i: abort(404) L2i['URLS'] = get_scan_urls(version, project, freqmode, scanno) return L2i
def _fetch(self, version, project, freqmode): start_time = get_args.get_datetime('start_time') end_time = get_args.get_datetime('end_time') limit = get_args.get_int('limit') or DEFAULT_LIMIT offset = get_args.get_int('offset') or DEFAULT_OFFSET if start_time and end_time and start_time > end_time: abort(400) param = { 'start_time': start_time, 'end_time': end_time, 'comment': get_args.get_string('comment')} db = level2db.Level2DB(project) scans = list( db.get_failed_scans(freqmode, offset=offset, limit=limit, **param)) for scan in scans: scan['URLS'] = get_scan_urls( version, project, freqmode, scan['ScanID']) scan['Error'] = scan.pop('Comments')[0] scan['Date'] = time_util.stw2datetime( scan['ScanID']).date().isoformat() count = db.count_failed_scans(freqmode, **param) data = { 'scans': scans, 'count': count, } headers = { 'Link': make_rfc5988_pagination_header( offset, limit, count, self._get_endpoint(), version=version, project=project, freqmode=freqmode, **param ), } return data, HTTPStatus.OK, headers
def _get(self, version, project, freqmode, scanno): product = get_args.get_string('product') db = level2db.Level2DB(project) L2 = db.get_L2(freqmode, scanno, product=product) if not L2: abort(404) L2anc = get_ancillary_data(DatabaseConnector(), L2) return L2anc
def cli(argv: List = []) -> None: parser = argparse.ArgumentParser( description="Generates Odin/SMR level2 monthly product files.", ) parser.add_argument( "project", type=str, help="project name", ) parser.add_argument( "products", type=str, nargs='+', help="product name(s), can be more than one name", ) parser.add_argument( "freqmode", type=int, help="frequency mode", ) parser.add_argument( "date_start", type=str, help="start date: format: YYYY-MM-DD", ) parser.add_argument( "date_end", type=str, help="end date: format: YYYY-MM-DD", ) parser.add_argument( '-q', '--outdir', dest='outdir', type=str, default='/tmp', help='data directory for saving output default is /tmp', ) parser.add_argument( "-f", "--force", action="store_true", help="flag for overwriting existing files", ) args = parser.parse_args(argv) date_start = dt.datetime.strptime(args.date_start, '%Y-%m-%d') date_end = dt.datetime.strptime(args.date_end, '%Y-%m-%d') db1 = DatabaseConnector db2 = level2db.Level2DB(args.project) for product in args.products: process_period(db1, db2, args.project, args.freqmode, product, date_start, date_end, datamodel.L2FILE.parameters, args.outdir, args.force)
def delete(self, version): """Delete level2 data for a scan id and freq mode""" msg = request.args.get('d') if not msg: abort(400) try: scanid, freqmode, project = decode_level2_target_parameter(msg) except: # noqa abort(400) db = level2db.Level2DB(project) db.delete(scanid, freqmode) return '', HTTPStatus.NO_CONTENT
def _fetch(self, version, project, freqmode, scanno): db = level2db.Level2DB(project) L2i, L2, L2c = db.get_scan(freqmode, scanno) if not L2i: abort(404) urls = get_scan_urls(version, project, freqmode, scanno) info = {'L2': L2, 'L2i': L2i, 'L2c': L2c, 'URLS': urls} if version <= 'v4': collocations = get_L2_collocations( request.url_root, version, freqmode, scanno) info['Collocations'] = collocations if version >= 'v5': if not L2: abort(404) info['L2anc'] = get_ancillary_data( DatabaseConnector(), info['L2']) return info
def _get_freqmodes(self, version, project): db = level2db.Level2DB(project) freqmodes = db.get_freqmodes() base_url = get_base_url(version) info = { 'Name': project, 'FreqModes': [{ 'FreqMode': freqmode, 'URLS': { 'URL-scans': '{}/{}/{}/scans'.format( base_url, project, freqmode), 'URL-failed': '{}/{}/{}/failed'.format( base_url, project, freqmode), 'URL-comments': '{}/{}/{}/comments'.format( base_url, project, freqmode) }} for freqmode in freqmodes]} return info
def _fetch(self, version, project): try: param = parse_parameters() except ValueError as e: raise BadRequest(str(e)) db = level2db.Level2DB(project) limit = param.pop('document_limit') meas_iter = db.get_measurements(param.pop('products'), limit, **param) if version == 'v4': return meas_iter scans, next_min_scanid = level2db.get_valid_collapsed_products( list(meas_iter), limit) headers = {} if next_min_scanid is not None: link = get_level2view_paging_links( request.url, param['min_scanid'], next_min_scanid) headers = {'link': link} return scans, HTTPStatus.OK, headers
def _fetch(self, version, project, date): try: start_time = get_args.get_datetime(val=date) except ValueError: abort(400) end_time = start_time + timedelta(hours=24) try: param = parse_parameters(start_time=start_time, end_time=end_time) except ValueError as e: return jsonify({'Error': str(e)}) db = level2db.Level2DB(project) limit = param.pop('document_limit') meas_iter = db.get_measurements( param.pop('products'), limit, **param) if version == 'v4': return meas_iter scans, next_min_scanid = level2db.get_valid_collapsed_products( list(meas_iter), limit) headers = {} if next_min_scanid is not None: link = get_level2view_paging_links( request.url, param['min_scanid'], next_min_scanid) headers = {'link': link} return scans, HTTPStatus.OK, headers
def _get_v5(self, version, project, freqmode): db = level2db.Level2DB(project) return db.get_products(freqmode=int(freqmode))
def _get(self, version, project): db = level2db.Level2DB(project) return db.get_product_count()
def _get(self, version, project, freqmode, scanno): db = level2db.Level2DB(project) L2c = db.get_L2c(freqmode, scanno) if not L2c: abort(404) return L2c
def post(self, version): """Insert level2 data for a scan id and freq mode""" msg = request.args.get('d') if not msg: logging.warning('Level2Write.post: request message is empty') abort(400) try: scanid, freqmode, project = decode_level2_target_parameter(msg) except: # noqa logging.warning('Level2Write.post: data can not be decoded') abort(400) data = request.json if not data: logging.warning('Level2Write.post: no json data') abort(400) if any(k not in data for k in ('L2', 'L2I', 'L2C')): logging.warning( "Level2Write.post: at least one of L2, L2I, " "or, L2C is missing") abort(400) L2c = data.pop('L2C') or '' if not isinstance(L2c, str): logging.warning('Level2Write.post: L2c is not a string') abort(400) L2 = data.pop('L2') or [] if isinstance(L2, dict): L2 = [L2] if not isinstance(L2, list): logging.warning('Level2Write.post: L2 is not a list') abort(400) for nr, species in enumerate(L2): try: check_json(species, prototype=l2_prototype) except JsonModelError as e: return ( jsonify({'error': 'L2 species %d: %s' % (nr, e)}), HTTPStatus.BAD_REQUEST) L2i = data.pop('L2I') or {} if not isinstance(L2i, dict): logging.warning('Level2Write.post: L2I is not a dict') abort(400) if L2i: try: check_json(L2i, prototype=l2i_prototype) except JsonModelError as e: return jsonify( {'error': 'L2i: %s' % e}), HTTPStatus.BAD_REQUEST L2i['ProcessingError'] = False else: # Processing error, L2i is empty, we have to trust the provided # scanid and freqmode. L2i['ScanID'] = scanid L2i['FreqMode'] = freqmode L2i['ProcessingError'] = True if scanid != L2i['ScanID']: logging.warning('Level2Write.post: scanid mismatch') return jsonify( {'error': 'ScanID missmatch (%r != %r)' % ( scanid, L2i['ScanID'])}), HTTPStatus.BAD_REQUEST if freqmode != L2i['FreqMode']: logging.warning('Level2Write.post: freqmode mismatch') return jsonify( {'error': 'FreqMode missmatch (%r != %r)' % ( scanid, L2i['FreqMode'])}), HTTPStatus.BAD_REQUEST projects = level2db.ProjectsDB() projects.add_project_if_not_exists(project) db = level2db.Level2DB(project) try: db.store(L2, L2i, L2c) except DuplicateKeyError: # DuplicateKeyError should not return an error, # we allow to overwrite posted level2 data, # if someone wants to reprocess scans we expect # that there is a good reason for that db.delete(L2i['ScanID'], L2i['FreqMode']) db.store(L2, L2i, L2c) logging.warning( "Level2Write.post: DuplicateKeyError " "scan data already existed in database " "for project={0}, FreqMode={1}, and ScanID={2} " "but has now been replaced".format( project, L2i['FreqMode'], L2i['ScanID'])) return '', HTTPStatus.CREATED