def request_fits_by_tw(): auth.log_visit() msg = {'error': 'Invalid request', 'data': [], 'status': 'FAILED'} if request.method == 'POST': try: #print(request.values) group = request.values['prodgroup'] start_utc = request.values['startutc'] end_utc = request.values['endutc'] start_unix = parse(start_utc).timestamp() end_unix = parse(end_utc).timestamp() group_types = [] #if group != 'any': # group_types = [group] types = get_product_types(group) rows = list( STIX_MDB.get_fits_info_by_time_range(start_unix, end_unix, product_groups=types[0], product_types=types[1], complete='any')) for x in rows: x['path'] = '' # don't show them on client side return json_util.dumps({'data': rows, 'status': 'OK'}) except Exception as e: msg = {'error': str(e), 'data': [], 'status': 'FAILED'} return json_util.dumps(msg)
def request_fits_of_file(file_id): auth.log_visit() results = list(STIX_MDB.get_fits_info_by_file_id(file_id)) for x in results: x['path'] = '' # don't show them on client side return json_util.dumps(results)
def request_ql_flareloc(): auth.log_visit() try: begin_utc = request.values['begin'] end_utc = request.values['end'] start_unix = sdt.utc2unix(begin_utc) end_unix = sdt.utc2unix(end_utc) duration = end_unix - start_unix dlt = 0 if start_unix < 0 or duration <= 0: result = {'error': 'Begin time or end time invalid'} elif duration > 48 * 3600: result = { 'error': 'The duration was too long. The maximum allowed duration is 48 hours.' } else: #empheris=solo.get_solo_ephemeris(begin_utc, # end_utc, # num_steps=5) rows = STIX_MDB.get_ql_flare_loc_in_timewindow( start_unix, duration) result = {'flare_locations': list(rows)} except Exception as e: result = {'error': 'Invalid request'} return json_util.dumps(result)
def view_filelist(file_id, start_unix, duration): auth.log_visit() return render_template('list-fits.html', file_id=file_id, start_unix=int(start_unix), duration=int(duration), host_url=request.host_url)
def retrieve_bulk_science_data(): auth.log_visit() result = {} try: record_id = int(request.values['id']) doc = STIX_MDB.select_data_request_info_by_id(-1, record_id) loaded_from_cache = False if not doc: result = {'status': 'The request data does not exist!'} load_from_cache = False elif 'level1' in doc[0]: #load preprocessed data level1_file = doc[0]['level1'] if os.path.isfile(level1_file): with open(level1_file) as f: loaded_from_cache = True result = json.load(f) result['status'] = 'SUCCESS' if not loaded_from_cache: spid = doc[0]['SPID'] if bsd_analyzer_stream.get_process_method(spid) != 'yield': result = bsd_analyzer_stream.to_dict(STIX_MDB, spid, record_id) else: cursor = STIX_MDB.get_packets_of_bsd_request(record_id, header_only=False) analyzer = bsd_analyzer_stream.StixBulkL1L2AnalyzerStream() return Response(analyzer.merge(cursor), content_type='application/json') result['data_type'] = doc[0].get('name', 'UNKNOWN') except Exception as e: result = {'error': str(e)} return json_util.dumps(result)
def create_fits_by_tw(utc_begin, utc_end, product_type): auth.log_visit() result = {'error': 'No packet found'} try: start_unix = stix_datetime.utc2unix(utc_begin) duration = stix_datetime.utc2unix(utc_end) - start_unix if duration> MAX_DURATION: return {'error': f'Requested data time range not satisfiable. Max duration: {MAX_DURATION} sec'} config = { 'folder': TEMP_FITS_FOLDER, 'type': 'time', 'memory_file': True, 'spid': PRODUCT_SPID[product_type], 'conditions': { 'start_unix_time': start_unix, 'duration': duration } } info = fits_creator.create_fits(config) #print(info) if info['success']: filename = info['filename'] mem = info['data'] return send_file(mem, as_attachment=True, attachment_filename=filename, mimetype='binary/x-fits') result['error'] = info['message'] except Exception as e: result = {'error': str(e)} return json_util.dumps(result)
def get_fits_info_by_calibration_id(calibration_id): auth.log_visit() out = [] data = list(STIX_MDB.get_calibration_run_fits_info(calibration_id)) if data: row = data[0] out = [{ 'calibration_run_id': calibration_id, 'raw_file_id': row['file_id'], 'fits_filename': row['filename'], 'fits_file_id': row['_id'], 'packet_start_id': row['packet_id_start'], 'packet_end_id': row['packet_id_end'], 'is_complete': row['complete'], 'meas_start_utc': stix_datetime.unix2utc(row['data_start_unix']), 'meas_end_utc': stix_datetime.unix2utc(row['data_end_unix']), 'duration_seconds': row['data_end_unix'] - row['data_start_unix'], 'fits_creation_time': row['creation_time'], }] return json_util.dumps(out)
def download_bsd_fits_file(request_id): auth.log_visit() results = list(STIX_MDB.get_bsd_fits_info_by_request_id(request_id)) if results: return download_one(results[0]) error_msg = {'error': 'The requested fits file is not available!'} return json_util.dumps(error_msg)
def download_fits_by_filename(filename): auth.log_visit() if 'fits' not in filename: msg = {'error': 'invalid request'} return json_util.dumps(msg) result = list(STIX_MDB.get_fits_info_by_filename(filename)) return download_fits(result)
def data_to_fits(): auth.log_visit() # write post data to fits data = [] plot_name = '' if request.method == 'POST': jsondata = json.loads(request.form['jsonstring']) data = jsondata['data'] plot_name = jsondata['name'] else: abort(404, description="Failed to create fits file") p_hdr = fits.Header() t_hdr = fits.Header() p_hdr['source'] = 'STIX pub023 server' p_hdr['name'] = plot_name cols = [] try: for i, trace in enumerate(data): if i == 0: if not is_date(trace['x'][0]): # print(trace['x']) cols.append( fits.Column(name='x', array=np.array(trace['x']), format='D')) else: time_zero = trace['x'][0] t_hdr['timezero'] = time_zero time_zero_unix = stix_datetime.utc2unix(time_zero) time_array = np.array([ stix_datetime.utc2unix(x) - time_zero_unix for x in trace['x'] ]) cols.append( fits.Column(name='time', array=time_array, unit='s', format='D')) name = 'y' + str(i) if 'name' in trace: name = trace['name'] cols.append( fits.Column(name=name, array=np.array(trace['y']), format='D')) hdulist = fits.HDUList() hdulist.append(fits.PrimaryHDU(header=p_hdr)) hdulist.append( fits.BinTableHDU.from_columns(cols, header=t_hdr, name='DATA')) mem = io.BytesIO() hdulist.writeto(mem) mem.seek(0) return send_file(mem, as_attachment=True, attachment_filename=plot_name + '.fits', mimetype='binary/x-fits') except Exception as e: abort(404, description="Failed to create fits file")
def log_page_visits(): try: page = request.values['page'] auth.log_visit(page) except Exception: pass msg = {'status': 'OK'} return json_util.dumps(msg)
def download_multiple_fits(): auth.log_visit() error_msg = {'error': 'An error occurred when processing the request.'} if request.method == 'POST': jsondata = json.loads(request.form['jsonstring']) ids = [int(i) for i in jsondata['data']] if ids: docs = STIX_MDB.get_fits_info_by_id(ids) return download_fits(docs) return jsonify(error_msg)
def get_eluts(): auth.log_visit() data = {} try: utc = request.values['utc'] t = stix_config.Elut(utc) data = t.get_data() except Exception as e: data['error'] = str(e) return json_util.dumps(data)
def view_bulk_science_data(fid, uid, record_id, reqform_id, start, end): auth.log_visit() return render_template('bsd.html', parameters={ "fid": fid, 'uid': uid, 'id': record_id, 'reqformId': reqform_id, 'start': start, 'end': end })
def request_fits_file_info(direction, start_id, num): auth.log_visit() if num > 1000: num = 1000 result = STIX_MDB.get_fits_info_page(direction, start_id, num) if direction == -1: result = list(result) result.reverse() return json_util.dumps(result)
def view_housekeeping(): auth.log_visit() start_unix = 0 span_sec = 0 if request.method == 'GET': try: start_unix = float(request.args['start_unix']) span_sec = float(request.args['span_sec']) except: pass return create_housekeeping_view(start_unix, span_sec)
def get_emphemeris(): auth.log_visit() data = {} try: start_utc = request.values['start_utc'] end_utc = request.values['end_utc'] num_steps = int(request.values['steps']) num_steps = 1000 if num_steps > 1000 else num_steps data = solo.get_solo_ephemeris(start_utc, end_utc, num_steps=num_steps) except Exception as e: data['error'] = str(e) return json_util.dumps(data)
def view_lightcurves_for_time_range(start_utc, duration): auth.log_visit() start_unix = 0 run = -1 try: start_unix = sdt.utc2unix(start_utc) except (KeyError, ValueError): pass return render_template('plot-lightcurves.html', start_unix=start_unix, duration=duration, run=run)
def view_qlspectra(file_id, packet_id): auth.log_visit() if file_id >= 0: message = 'Requesting quicklook spectra of file: {}'.format(file_id) elif packet_id >= 0: message = 'Requesting packet # {}'.format(packet_id) else: message = '' return render_template('plot-qlspectra.html', packet_id=packet_id, file_id=file_id, message=message)
def request_ql_light_curves(): auth.log_visit() try: begin_utc = request.values['begin'] end_utc = request.values['end'] light_time_correction = False ltc = request.values['ltc'] if ltc in ['true', 'True', True]: light_time_correction = True except Exception as e: return jsonify({'error': 'Invalid request'}) return get_light_curve_data(begin_utc, end_utc, light_time_correction)
def view_housekeeping_file_url(): auth.log_visit() file_id = -1 message = "" if request.method == 'GET': try: file_id = int(request.args['file_id']) message = 'Requesting data of file # {}'.format(file_id) except: message = 'Invalid request' return render_template('plot-housekeeping.html', file_id=file_id, message=message)
def view_qlspec_file(): auth.log_visit() file_id = -1 packet_id = -1 message = '' try: file_id = int(request.values['file_id']) message = 'Requesting quicklook spectra of file: {}'.format(file_id) except: pass return render_template('plot-qlspectra.html', packet_id=packet_id, file_id=file_id, message=message)
def view_lightcurves(): auth.log_visit() start_unix = 0 duration = 0 run = -1 try: start_unix = float(request.values['start']) duration = float(request.values['span']) except (KeyError, ValueError): pass try: run = int(request.values['run']) except (KeyError, ValueError): pass return render_template('plot-lightcurves.html', start_unix=start_unix, duration=duration, run=run)
def view_background(): auth.log_visit() start_unix = 0 duration = 0 run = -1 try: start_unix = float(request.values['start']) duration = float(request.values['span']) except: pass try: run = float(request.values['run']) except: pass return render_template('plot-background.html', start_unix=start_unix, duration=duration, run=run)
def query_fits_by_tw(utc_begin, utc_end, product_type): auth.log_visit() try: types = get_product_types(product_type) if types: start_unix = parse(utc_begin).timestamp() end_unix = parse(utc_end).timestamp() if end_unix-start_unix > MAX_FITS_QUERY_SPAN: return json_util.dumps({'error':f'Time span not satisfiable. Time span must be < {MAX_FITS_QUERY_SPAN/86400.} days'}) rows = STIX_MDB.get_fits_info_by_time_range( start_unix, end_unix, product_groups=types[0], product_types=types[1], complete='any') result = [] for row in rows: try: creation_time = stix_datetime.format_datetime( row['creation_time']) except Exception as e: creation_time = row['creation_time'] result.append({ 'url': '{}download/fits/filename/{}'.format( request.host_url, row['filename']), 'observation_time_range': [ stix_datetime.unix2utc(row['data_start_unix']), stix_datetime.unix2utc(row['data_end_unix']) ], #'raw_file_id': row['file_id'], 'creation_time': creation_time, 'fits_id':row['_id'] }) else: result = {'error': 'Invalid product filter!'} except Exception as e: result = {'error': str(e)} return json_util.dumps(result)
def get_calibration_info_by_fits_id(fits_id): auth.log_visit() out = [] data = STIX_MDB.get_calibration_info_by_fits_id(fits_id) if data: row = data[0] if 'error' not in row: out = [{ 'fits_file_id': fits_id, 'calibration_run_id': row['_id'], 'raw_file_id': row['run_id'], 'meas_start_utc': stix_datetime.unix2utc(row['start_unix_time']), 'duration_seconds': row['duration'], }] else: out = row return json_util.dumps(out)
def get_fits_info(fits_id): auth.log_visit() result = list(STIX_MDB.get_fits_info_by_id(fits_id)) return json_util.dumps(result)
def view_filelist(utc, hours): auth.log_visit() return render_template('list-files.html', utc=utc, hours=0)
def view_file_housekeeping(file_id): auth.log_visit() message = 'Requesting data of file # {}'.format(file_id) return render_template('plot-housekeeping.html', file_id=file_id, message=message)
def load_joint_obs_page(flare_id): auth.log_visit() return render_template('joint-obs.html', flare_id=flare_id)