def get_descript(campaign_name, system, parameter): """ """ app.logger.info("got request fori description {} {} {}".format( campaign_name, system, parameter)) larda = pyLARDA.LARDA().connect(campaign_name, build_lists=False) #if "rformat" in request.args and request.args['rformat'] == 'bin': # rformat = 'bin' #elif "rformat" in request.args and request.args['rformat'] == 'msgpack': # rformat = 'msgpack' #else: # rformat = 'json' app.logger.warning('request.args {}'.format(dict(request.args))) text_string = larda.description(system, parameter) #if rformat == 'bin': # resp = Response(cbor.dumps(data_container), status=200, mimetype='application/cbor') #elif rformat == 'msgpack': # resp = Response(msgpack.packb(data_container), status=200, mimetype='application/msgpack') #elif rformat == 'json': # resp = Response(json.dumps(data_container), status=200, mimetype='application/json') resp = Response(text_string.encode('utf-8'), status=200, mimetype='text/plain') return resp
def api_entry(): """ Returns: campaign list """ larda = pyLARDA.LARDA() return jsonify(campaign_list=larda.campaign_list)
def get_campaign_info(campaign_name): """ Returns: json object with all the campaign info """ campaign_info = {} larda = pyLARDA.LARDA().connect(campaign_name, build_lists=False) campaign_info['config_file'] = larda.camp.info_dict #print("Parameters in stock: ",[(k, larda.connectors[k].params_list) for k in larda.connectors.keys()]) campaign_info['connectors'] = { system: conn.get_as_plain_dict() for system, conn in larda.connectors.items() } return jsonify(**campaign_info)
sys.path.append('.') import matplotlib matplotlib.use('Agg') import pyLARDA import datetime import logging log = logging.getLogger('pyLARDA') log.setLevel(logging.INFO) log.addHandler(logging.StreamHandler()) # Load LARDA larda = pyLARDA.LARDA().connect('lacros_dacapo') #case_prefix = 'plots/scatter_case_studies/comparison_cloud_' case_prefix = 'plots/' c_info = [larda.camp.LOCATION, larda.camp.VALID_DATES] print(larda.days_with_data()) # begin_dt=datetime.datetime(2018,12,6,0,1) begin_dt = datetime.datetime(2018, 12, 6, 0, 0, 0) end_dt = datetime.datetime(2018, 12, 6, 0, 30, 0) plot_range = [0, 12000] # load the reflectivity data MIRA_Zg = larda.read("MIRA", "Zg", [begin_dt, end_dt], [0, 'max'])
def get_param(campaign_name, system, param): """ """ app.logger.info("got request for {} {} {}".format(campaign_name, system, param)) starttime = time.time() larda = pyLARDA.LARDA().connect(campaign_name, build_lists=False) app.logger.debug("{:5.3f}s load larda".format(time.time() - starttime)) if "rformat" in request.args and request.args['rformat'] == 'bin': rformat = 'bin' elif "rformat" in request.args and request.args['rformat'] == 'msgpack': rformat = 'msgpack' else: rformat = 'json' intervals = request.args.get('interval').split(',') time_interval = [h.ts_to_dt(float(t)) for t in intervals[0].split('-')] further_slices = [[ float(e) if e not in ['max'] else e for e in s.split('-') ] for s in intervals[1:]] app.logger.warning('request.args {}'.format(dict(request.args))) app.logger.info("time request {}".format(time_interval)) starttime = time.time() data_container = larda.read(system, param, time_interval, *further_slices, **dict(request.args)) app.logger.debug("{:5.3f}s read data".format(time.time() - starttime)) starttime = time.time() for k in ['ts', 'rg', 'vel', 'var', 'mask']: if k in data_container and hasattr(data_container[k], 'tolist'): if data_container[k].dtype is not np.dtype('object'): data_container[k][~np.isfinite(data_container[k])] = 0 data_container[k] = data_container[k].tolist() app.logger.debug("{:5.3f}s convert data".format(time.time() - starttime)) #import io #test_datacont = {**data_container} #for k in ['ts', 'rg', 'var', 'mask']: # if k in test_datacont: # print(k,type(test_datacont[k])) # start = time.time() # test_datacont[k][~np.isfinite(test_datacont[k])] = 0 # memfile = io.BytesIO() # np.save(memfile, test_datacont[k]) # memfile.seek(0) # test_datacont[k] = memfile.read().decode('latin-1') #resp = Response(json.dumps(test_datacont), status=200, mimetype='application/json') starttime = time.time() if rformat == 'bin': resp = Response(cbor.dumps(data_container), status=200, mimetype='application/cbor') elif rformat == 'msgpack': resp = Response(msgpack.packb(data_container), status=200, mimetype='application/msgpack') elif rformat == 'json': resp = Response(json.dumps(data_container), status=200, mimetype='application/json') app.logger.debug("{:5.3f}s dumps {}".format(time.time() - starttime, rformat)) #for some reason the manual response is faster... #return jsonify(data_container) return resp
log = logging.getLogger('pyLARDA') log.setLevel(logging.INFO) log.addHandler(logging.StreamHandler()) ROOT_DIR = Path(__file__).resolve().parent parser = argparse.ArgumentParser(description=''' Example `python3 ListCollector.py -c oceanet_pascal lacros_accept`. As default all campaigns are collected.''') parser.add_argument('-c', '--campaign', nargs='+', help='just run for a defined campaign(s)') args = parser.parse_args() camp = pyLARDA.LARDA_campaign(ROOT_DIR.parent / "larda-cfg", "campaigns.toml") camp_list = camp.get_campaign_list() if args.campaign: assert set(args.campaign).issubset(camp_list), 'campaign not in list' camp_list = args.campaign #larda=pyLARDA.LARDA().connect('test_filepatterns', build_lists=True) for cname in camp_list: larda = pyLARDA.LARDA().connect(cname, build_lists=True) print(larda.connectors.keys()) #larda=pyLARDA.LARDA('COLRAWI') #larda=pyLARDA.LARDA('LACROS_at_Leipzig', build_lists=True)
*range_interval, ",".join(params)) return s if __name__ == '__main__': case_name = '20190801-01' config_case_studies = toml.load('dacapo_case_studies.toml') case_study = config_case_studies['case'][case_name] dt_interval = [ datetime.datetime.strptime(t, '%Y%m%d-%H%M') for t in case_study['time_interval'] ] # pprint.pprint(config_case_studies) larda = pyLARDA.LARDA().connect('lacros_dacapo_gpu', build_lists=True) data, savenames = plot_case_study(case_study, contour_lines='T') case_study['link'] = get_explorer_link( 'lacros_dacapo', dt_interval, case_study['range_interval'], ["CLOUDNET|CLASS", "CLOUDNET|Z", "POLLY|attbsc1064", "POLLY|depol"]) case_study['location'] = larda.camp.LOCATION case_study['coordinates'] = larda.camp.COORDINATES make_html_overview(case_name, case_study, data, savenames) print('\n ...Done...\n')
# | | | |_____| | | \ | |_____] |_____/ | | | ____ |_____/ |_____| | | | # | | | | | __|__ | \_| | | \_ |_____| |_____| | \_ | | | | | # # if __name__ == '__main__': start_time = time.time() log = logging.getLogger('pyLARDA') log.setLevel(logging.INFO) log.addHandler(logging.StreamHandler()) # Load LARDA # larda = pyLARDA.LARDA('remote', uri='http://larda.tropos.de/larda3').connect('lacros_dacapo', build_lists=False) larda = pyLARDA.LARDA().connect('lacros_dacapo_catalpa') c_info = [larda.camp.LOCATION, larda.camp.VALID_DATES] # print('available systems:', larda.connectors.keys()) # print("available parameters: ", [(k, larda.connectors[k].params_list) for k in larda.connectors.keys()]) print('days with data', larda.days_with_data()) # gather command line arguments method_name, args, kwargs = h._method_info_from_argv(sys.argv) # gather argument if 'date' in kwargs: date = str(kwargs['date']) begin_dt = datetime.datetime.strptime(date + ' 00:00:05', '%Y%m%d %H:%M:%S') end_dt = datetime.datetime.strptime(date + ' 23:59:55',
import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import pyLARDA import pyLARDA.helpers as h import datetime import numpy as np import scipy.ndimage as spn from scipy import stats #Load LARDA #larda=pyLARDA.LARDA('lacros_dacapo') larda = pyLARDA.LARDA().connect_local('lacros_dacapo') #c_info = [larda.camp.LOCATION, larda.camp.VALID_DATES] print(larda.days_with_data()) #print("array_avail()", larda.array_avail(2015, 6)) #print("single month with new interface ", larda.instr_status(2015, 6)) #begin_dt=datetime.datetime(2018,12,6,0,1) begin_dt = datetime.datetime(2018, 12, 6, 1, 40) end_dt = datetime.datetime(2018, 12, 6, 4, 0, 0) plot_range = [300, 10000] case_prefix = 'plots/scatter_case_studies/comparison_cloud_' # load the reflectivity data MIRA_Zg = larda.read("MIRA", "Zg", [begin_dt, end_dt], [0, 'max'])