def get_times(): """Given bins return start time of first and start time of first + 1 bins""" sb = request.args.get('start') eb = request.args.get('stop') if sb is None or eb is None: return "Need bin and end to compute bin range" sb = int(sb) eb = int(eb) ret = { 'startDT' : ntp_to_ISO_date(bin_to_time(sb)), 'start' : bin_to_time(sb), 'stopDT' : ntp_to_ISO_date(bin_to_time(eb + 1)), 'stop' : bin_to_time(eb + 1) } return Response(json.dumps(ret), mimetype='application/json')
def offload_bin(stream, data_bin, san_dir_string): # get the data and drop duplicates cols, data = fetch_all_data(stream, TimeRange(bin_to_time(data_bin), bin_to_time(data_bin + 1))) dataset = to_xray_dataset(cols, data, stream, san=True) nc_directory = san_dir_string.format(data_bin) if not os.path.exists(nc_directory): os.makedirs(nc_directory) for deployment, deployment_ds in dataset.groupby("deployment"): # get a file name and create deployment directory if needed nc_file_name = get_nc_filename(stream, nc_directory, deployment) log.info( "Offloading %s deployment %d to %s - There are %d particles", str(stream), deployment, nc_file_name, len(deployment_ds["index"]), ) # create netCDF file deployment_ds.to_netcdf(path=nc_file_name) return True, ""