def main(): # Setup needed parameters for the request, the user would need to vary these to suit their own needs and # sites/instruments of interest. Site, node, sensor, stream and delivery method names can be obtained from the # Ocean Observatories Initiative web site. The last two parameters (level and instrmt) will set path and naming # conventions to save the data to the local disk. site = 'CE02SHSM' # OOI Net site designator node = 'SBD12' # OOI Net node designator sensor = '04-PCO2AA000' # OOI Net sensor designator stream = 'pco2a_a_dcl_instrument_air_recovered' # OOI Net stream name method = 'recovered_host' # OOI Net data delivery method level = 'buoy' # local directory name, level below site instrmt = 'pco2a' # local directory name, instrument below level # We are after recovered_host data. Determine list of deployments and use the last, presumably currently active, # deployment to determine the start and end dates for our request. vocab = get_vocabulary(site, node, sensor)[0] deployments = list_deployments(site, node, sensor) deploy = deployments[-1] start, stop = get_deployment_dates(site, node, sensor, deploy) # request and download the data -- air measurements r = m2m_request(site, node, sensor, method, stream, start, stop) air = m2m_collect(r, ('.*deployment%04d.*PCO2A.*air.*\\.nc$' % deploy)) air = air.where(air.deployment == deploy, drop=True) # limit to the deployment of interest # request and download the data -- water measurements r = m2m_request(site, node, sensor, method, 'pco2a_a_dcl_instrument_water_recovered', start, stop) water = m2m_collect(r, ('.*deployment%04d.*PCO2A.*water.*\\.nc$' % deploy)) water = water.where(water.deployment == deploy, drop=True) # limit to the deployment of interest # clean-up and reorganize the air and water datasets air = pco2a_datalogger(air, True) air = update_dataset(air, vocab['maxdepth']) water = pco2a_datalogger(water, True) water = update_dataset(water, vocab['maxdepth']) # save the data -- utilize groups for the air and water datasets out_path = os.path.join(CONFIG['base_dir']['m2m_base'], site.lower(), level, instrmt) out_path = os.path.abspath(out_path) if not os.path.exists(out_path): os.makedirs(out_path) out_file = ('%s.%s.%s.deploy%02d.%s.%s.nc' % (site.lower(), level, instrmt, deploy, method, stream)) nc_out = os.path.join(out_path, out_file) air.to_netcdf(nc_out, mode='w', format='NETCDF4', engine='netcdf4', group='air') water.to_netcdf(nc_out, mode='a', format='NETCDF4', engine='netcdf4', group='water')
def main(argv=None): args = inputs(argv) site = args.site node = args.node sensor = args.sensor method = args.method stream = args.stream deploy = args.deploy start = args.start stop = args.stop burst = args.burst # determine the start and stop times for the data request based on either the deployment number or user entered # beginning and ending dates. if not deploy or (start and stop): return SyntaxError('You must specify either a deployment number or beginning and end dates of interest.') else: if deploy: # Determine start and end dates based on the deployment number start, stop = get_deployment_dates(site, node, sensor, deploy) if not start or not stop: exit_text = ('Deployment dates are unavailable for %s-%s-%s, deployment %02d.' % (site, node, sensor, deploy)) raise SystemExit(exit_text) # Request the data for download r = m2m_request(site, node, sensor, method, stream, start, stop) if not r: exit_text = ('Request failed for %s-%s-%s. Check request.' % (site, node, sensor)) raise SystemExit(exit_text) # Valid request, start downloading the data if deploy: flort = m2m_collect(r, '.*deployment%04d.*FLORT.*\\.nc$') else: flort = m2m_collect(r, '.*FLORT.*\\.nc$') if not flort: exit_text = ('Data unavailable for %s-%s-%s. Check request.' % (site, node, sensor)) raise SystemExit(exit_text) # clean-up and reorganize if method in ['telemetered', 'recovered_host']: flort = flort_datalogger(flort, burst) else: flort = flort_instrument(flort) vocab = get_vocabulary(site, node, sensor)[0] flort = update_dataset(flort, vocab['maxdepth']) # save the data to disk out_file = os.path.abspath(os.path.join(CONFIG['base_dir']['m2m_base'], args.outfile)) if not os.path.exists(os.path.dirname(out_file)): os.makedirs(os.path.dirname(out_file)) flort.to_netcdf(out_file, mode='w', format='NETCDF4', engine='netcdf4')
def main(): # Setup needed parameters for the request, the user would need to vary these to suit their own needs and # sites/instruments of interest. Site, node, sensor, stream and delivery method names can be obtained from the # Ocean Observatories Initiative web site. The last two will set path and naming conventions to save the data # to the local disk site = 'CE02SHSM' # OOI Net site designator node = 'RID26' # OOI Net node designator sensor = '06-PHSEND000' # OOI Net sensor designator stream = 'phsen_abcdef_dcl_instrument' # OOI Net stream name method = 'telemetered' # OOI Net data delivery method level = 'nsif' # local directory name, level below site instrmt = 'phsen' # local directory name, instrument below level # We are after telemetered data. Determine list of deployments and use the last, presumably currently active, # deployment to determine the start and end dates for our request. vocab = get_vocabulary(site, node, sensor)[0] deployments = list_deployments(site, node, sensor) deploy = deployments[-1] start, stop = get_deployment_dates(site, node, sensor, deploy) # request and download the data r = m2m_request(site, node, sensor, method, stream, start, stop) phsen = m2m_collect(r, '.*PHSEN.*\\.nc$') phsen = phsen.where(phsen.deployment == deploy, drop=True) # limit to the deployment of interest # clean-up and reorganize phsen = phsen_datalogger(phsen) phsen = update_dataset(phsen, vocab['maxdepth']) # save the data out_path = os.path.join(CONFIG['base_dir']['m2m_base'], site.lower(), level, instrmt) out_path = os.path.abspath(out_path) if not os.path.exists(out_path): os.makedirs(out_path) out_file = ('%s.%s.%s.deploy%02d.%s.%s.nc' % (site.lower(), level, instrmt, deploy, method, stream)) nc_out = os.path.join(out_path, out_file) phsen.to_netcdf(nc_out, mode='w', format='NETCDF4', engine='netcdf4')
def main(): # Setup needed parameters for the request, the user would need to vary these to suit their own needs and # sites/instruments of interest. Site, node, sensor, stream and delivery method names can be obtained from the # Ocean Observatories Initiative web site. The last two will set path and naming conventions to save the data # to the local disk site = 'CE01ISSM' # OOI Net site designator node = 'SBD17' # OOI Net node designator sensor = '06-FLORTD000' # OOI Net sensor designator stream = 'flort_sample' # OOI Net stream name method = 'recovered_inst' # OOI Net data delivery method level = 'buoy' # local directory name, level below site instrmt = 'flort' # local directory name, instrument below level # We are after recovered instrument data. Determine list of deployments and use a more recent deployment to # determine the start and end dates for our request. vocab = get_vocabulary(site, node, sensor)[0] deployments = list_deployments(site, node, sensor) deploy = deployments[5] start, stop = get_deployment_dates(site, node, sensor, deploy) # request and download the data r = m2m_request(site, node, sensor, method, stream, start, stop) flort = m2m_collect(r, '.*FLORT.*\\.nc$') flort = flort.where(flort.deployment == deploy, drop=True) # limit to the deployment of interest # clean-up and reorganize flort = flort_instrument(flort) flort = update_dataset(flort, vocab['maxdepth']) # save the data out_path = os.path.join(CONFIG['base_dir']['m2m_base'], site.lower(), level, instrmt) out_path = os.path.abspath(out_path) if not os.path.exists(out_path): os.makedirs(out_path) out_file = ('%s.%s.%s.deploy%02d.%s.%s.nc' % (site.lower(), level, instrmt, deploy, method, stream)) nc_out = os.path.join(out_path, out_file) flort.to_netcdf(nc_out, mode='w', format='NETCDF4', engine='netcdf4')
def main(): # Setup needed parameters for the request, the user would need to vary these to suit their own needs and # sites/instruments of interest. Site, node, sensor and stream names can be obtained from the Ocean Observatories # Initiative web site site = 'CE02SHSM' # OOI Net site designator node = 'RID27' # OOI Net node designator sensor = '03-CTDBPC000' # OOI Net sensor designator stream = 'ctdbp_cdef_instrument_recovered' # OOI Net stream name method = 'recovered_inst' # OOI Net data delivery method level = 'nsif' # local directory name, level below site instrmt = 'ctdbp' # local directory name, instrument below level # We are after recovered instrument data. Determine list of deployments and use the first deployment to determine # the start and end dates for our request. vocab = get_vocabulary(site, node, sensor)[0] deployments = list_deployments(site, node, sensor) deploy = deployments[0] start, stop = get_deployment_dates(site, node, sensor, deploy) # request and download the data r = m2m_request(site, node, sensor, method, stream, start, stop) ctdbp = m2m_collect(r, '.*ctdbp.*\\.nc$') ctdbp = ctdbp.where(ctdbp.deployment == deploy, drop=True) # limit to the deployment of interest # clean-up and reorganize ctdbp = ctdbp_instrument(ctdbp, burst=True) ctdbp = update_dataset(ctdbp, vocab['maxdepth']) # save the data out_path = os.path.join(CONFIG['base_dir']['m2m_base'], site.lower(), level, instrmt) out_path = os.path.abspath(out_path) if not os.path.exists(out_path): os.makedirs(out_path) out_file = ('%s.%s.%s.deploy%02d.%s.%s.nc' % (site.lower(), level, instrmt, deploy, method, stream)) nc_out = os.path.join(out_path, out_file) ctdbp.to_netcdf(nc_out, mode='w', format='NETCDF4', engine='netcdf4')
def main(): # Setup needed parameters for the request, the user would need to vary these to suit their own needs and # sites/instruments of interest. Site, node, sensor, stream and delivery method names can be obtained from the # Ocean Observatories Initiative web site. The last two parameters (level and instrmt) will set path and naming # conventions to save the data to the local disk. site = 'CE04OSSM' # OOI Net site designator node = 'SBD11' # OOI Net node designator sensor = '06-METBKA000' # OOI Net sensor designator stream = 'metbk_hourly' # OOI Net stream name method = 'telemetered' # OOI Net data delivery method level = 'buoy' # local directory name, level below site instrmt = 'metbk' # local directory name, instrument below level # We are after telemetered data. Determine list of deployments and use the last, presumably currently active, # deployment to determine the start and end dates for our request. vocab = get_vocabulary(site, node, sensor)[0] deployments = list_deployments(site, node, sensor) deploy = deployments[-1] start, stop = get_deployment_dates(site, node, sensor, deploy) # request and download the data r = m2m_request(site, node, sensor, method, stream, start, stop) metbk = m2m_collect(r, ('.*deployment%04d.*METBK.*hourly.*\\.nc$' % deploy)) # clean-up and reorganize the METBK hourly bulk flux data metbk = metbk_hourly(metbk) metbk = update_dataset(metbk, vocab['maxdepth']) # save the data -- utilize groups for the metbk and water datasets out_path = os.path.join(CONFIG['base_dir']['m2m_base'], site.lower(), level, instrmt) out_path = os.path.abspath(out_path) if not os.path.exists(out_path): os.makedirs(out_path) out_file = ('%s.%s.%s.deploy%02d.%s.%s.nc' % (site.lower(), level, instrmt, deploy, method, stream)) nc_out = os.path.join(out_path, out_file) metbk.to_netcdf(nc_out, mode='w', format='NETCDF4', engine='netcdf4')
def main(argv=None): # setup the input arguments args = inputs(argv) site = args.site node = args.node sensor = args.sensor method = args.method stream = args.stream deploy = args.deploy start = args.start stop = args.stop # determine the start and stop times for the data request based on either the deployment number or user entered # beginning and ending dates. if not deploy or (start and stop): return SyntaxError( 'You must specify either a deployment number or beginning and end dates of interest.' ) else: if deploy: # Determine start and end dates based on the deployment number start, stop = get_deployment_dates(site, node, sensor, deploy) if not start or not stop: exit_text = ( 'Deployment dates are unavailable for %s-%s-%s, deployment %02d.' % (site, node, sensor, deploy)) raise SystemExit(exit_text) # Request the data r = m2m_request(site, node, sensor, method, stream, start, stop) if not r: exit_text = ( 'Data unavailable for %s-%s-%s, deployment %02d. Check request.' % (site, node, sensor, deploy)) raise SystemExit(exit_text) # Valid request, start downloading the data phsen = m2m_collect(r, '.*PHSEN.*\\.nc$') # If limiting to a specific deployment, apply the filter if deploy: phsen = phsen.where(phsen.deployment == deploy, drop=True) # limit to the deployment of interest # check to see if there is any data after limiting to this specific deployment if len(phsen.time) == 0: exit_text = ('Data unavailable for %s-%s-%s, deployment %02d.' % (site, node, sensor, deploy)) raise SystemExit(exit_text) # clean-up and reorganize phsen = phsen_streamed(phsen) vocab = get_vocabulary(site, node, sensor)[0] phsen = update_dataset(phsen, vocab['maxdepth']) # save the data to disk out_file = os.path.abspath( os.path.join(CONFIG['base_dir']['m2m_base'], args.outfile)) if not os.path.exists(os.path.dirname(out_file)): os.makedirs(os.path.dirname(out_file)) phsen.to_netcdf(out_file, mode='w', format='NETCDF4', engine='netcdf4')
def main(argv=None): # setup the input arguments args = inputs(argv) site = args.site node = args.node sensor = args.sensor method = args.method stream = args.stream deploy = args.deploy start = args.start stop = args.stop burst = args.burst # determine the start and stop times for the data request based on either the deployment number or user entered # beginning and ending dates. if not deploy or (start and stop): return SyntaxError( 'You must specify either a deployment number or beginning and end dates of interest.' ) else: if deploy: # Determine start and end dates based on the deployment number start, stop = get_deployment_dates(site, node, sensor, deploy) if not start or not stop: exit_text = ( 'Deployment dates are unavailable for %s-%s-%s, deployment %02d.' % (site, node, sensor, deploy)) raise SystemExit(exit_text) # Request the data for download r = m2m_request(site, node, sensor, method, stream, start, stop) if not r: exit_text = ('Request failed for %s-%s-%s. Check request.' % (site, node, sensor)) raise SystemExit(exit_text) # Valid request, start downloading the data if re.match(r'.*_air.*', stream): if deploy: pco2a = m2m_collect( r, ('.*deployment%04d.*PCO2A.*air.*\\.nc$' % deploy)) else: pco2a = m2m_collect(r, '.*PCO2A.*air.*\\.nc$') nc_group = 'air' else: if deploy: pco2a = m2m_collect( r, ('.*deployment%04d.*PCO2A.*water.*\\.nc$' % deploy)) else: pco2a = m2m_collect(r, '.*PCO2A.*water.*\\.nc$') nc_group = 'water' if not pco2a: exit_text = ('Data unavailable for %s-%s-%s. Check request.' % (site, node, sensor)) raise SystemExit(exit_text) # clean-up and reorganize pco2a = pco2a_datalogger(pco2a, burst) vocab = get_vocabulary(site, node, sensor)[0] pco2a = update_dataset(pco2a, vocab['maxdepth']) # save the data to disk out_file = os.path.abspath( os.path.join(CONFIG['base_dir']['m2m_base'], args.outfile)) if not os.path.exists(os.path.dirname(out_file)): os.makedirs(os.path.dirname(out_file)) if os.path.isfile(out_file): pco2a.to_netcdf(out_file, mode='a', format='NETCDF4', engine='netcdf4', group=nc_group) else: pco2a.to_netcdf(out_file, mode='w', format='NETCDF4', engine='netcdf4', group=nc_group)