Пример #1
0
def main(argv=None):
    # setup the input arguments
    args = inputs(argv)
    site = args.site
    node = args.node
    sensor = args.sensor
    method = args.method
    stream = args.stream
    deploy = args.deploy
    start = args.start
    stop = args.stop

    # determine the start and stop times for the data request based on either the deployment number or user entered
    # beginning and ending dates.
    if not deploy or (start and stop):
        return SyntaxError('You must specify either a deployment number or beginning and end dates of interest.')
    else:
        if deploy:
            # Determine start and end dates based on the deployment number
            start, stop = get_deployment_dates(site, node, sensor, deploy)
            if not start or not stop:
                exit_text = ('Deployment dates are unavailable for %s-%s-%s, deployment %02d.' % (site, node, sensor,
                                                                                                  deploy))
                raise SystemExit(exit_text)

    # Request the data for download
    r = m2m_request(site, node, sensor, method, stream, start, stop)
    if not r:
        exit_text = ('Request failed for %s-%s-%s. Check request.' % (site, node, sensor))
        raise SystemExit(exit_text)

    # Valid request, start downloading the data
    if deploy:
        phsen = m2m_collect(r, ('.*deployment%04d.*PHSEN.*\\.nc$' % deploy))
    else:
        phsen = m2m_collect(r, '.*PHSEN.*\\.nc$')

    if not phsen:
        exit_text = ('Data unavailable for %s-%s-%s. Check request.' % (site, node, sensor))
        raise SystemExit(exit_text)

    # clean-up and reorganize
    if method in ['telemetered', 'recovered_host']:
        if re.match('.*imodem.*', stream):
            phsen = phsen_imodem(phsen)
        else:
            phsen = phsen_datalogger(phsen)
    else:
        phsen = phsen_instrument(phsen)

    vocab = get_vocabulary(site, node, sensor)[0]
    phsen = update_dataset(phsen, vocab['maxdepth'])

    # save the data to disk
    out_file = os.path.abspath(args.outfile)
    if not os.path.exists(os.path.dirname(out_file)):
        os.makedirs(os.path.dirname(out_file))

    phsen.to_netcdf(out_file, mode='w', format='NETCDF4', engine='h5netcdf', encoding=ENCODINGS)
Пример #2
0
def main(argv=None):
    args = inputs(argv)
    site = args.site
    node = args.node
    sensor = args.sensor
    method = args.method
    stream = args.stream
    deploy = args.deploy
    start = args.start
    stop = args.stop
    burst = args.burst
    sensor_type = args.sensor_type

    # determine the start and stop times for the data request based on either the deployment number or user entered
    # beginning and ending dates.
    if not deploy or (start and stop):
        return SyntaxError(
            'You must specify either a deployment number or beginning and end dates of interest.'
        )
    else:
        if deploy:
            # Determine start and end dates based on the deployment number
            start, stop = get_deployment_dates(site, node, sensor, deploy)
            if not start or not stop:
                exit_text = (
                    'Deployment dates are unavailable for %s-%s-%s, deployment %02d.'
                    % (site, node, sensor, deploy))
                raise SystemExit(exit_text)

    # Request the data for download
    r = m2m_request(site, node, sensor, method, stream, start, stop)
    if not r:
        exit_text = ('Request failed for %s-%s-%s. Check request.' %
                     (site, node, sensor))
        raise SystemExit(exit_text)

    # Valid request, start downloading the data
    if deploy:
        dosta = m2m_collect(r, ('.*deployment%04d.*DOSTA.*\\.nc$' % deploy))
    else:
        dosta = m2m_collect(r, '.*DOSTA.*\\.nc$')

    if not dosta:
        exit_text = ('Data unavailable for %s-%s-%s. Check request.' %
                     (site, node, sensor))
        raise SystemExit(exit_text)

    if not sensor_type in ['solo', 'ctdbp']:
        exit_text = 'You need to specify the type of DOSTA in order to process: solo or ctdbp'
        raise SystemExit(exit_text)

    # clean-up and reorganize based on the type and data delivery method
    if sensor_type == 'solo':
        dosta = dosta_datalogger(dosta, burst)

    if sensor_type == 'ctdbp':
        if method in ['telemetered', 'recovered_host']:
            dosta = dosta_ctdbp_datalogger(dosta)
        else:
            dosta = dosta_ctdbp_instrument(dosta)

    vocab = get_vocabulary(site, node, sensor)[0]
    dosta = update_dataset(dosta, vocab['maxdepth'])

    # save the data to disk
    out_file = os.path.abspath(args.outfile)
    if not os.path.exists(os.path.dirname(out_file)):
        os.makedirs(os.path.dirname(out_file))

    dosta.to_netcdf(out_file,
                    mode='w',
                    format='NETCDF4',
                    engine='h5netcdf',
                    encoding=ENCODINGS)
Пример #3
0
def main(argv=None):
    args = inputs(argv)
    site = args.site
    node = args.node
    sensor = args.sensor
    method = args.method
    stream = args.stream
    deploy = args.deploy
    start = args.start
    stop = args.stop
    burst = args.burst

    # check if we are specifying a deployment or a specific date and time range
    if not deploy or (start and stop):
        return SyntaxError(
            'You must specify either a deployment number or beginning and end dates of interest.'
        )

    # if we are specifying a deployment number, then get the data from the Gold Copy THREDDS server
    if deploy:
        # download the data for the deployment
        flort = load_gc_thredds(site, node, sensor, method, stream,
                                ('.*deployment%04d.*FLORT.*\\.nc$' % deploy))

        # check to see if we downloaded any data
        if not flort:
            exit_text = (
                'Data unavailable for %s-%s-%s, %s, %s, deployment %d.' %
                (site, node, sensor, method, stream, deploy))
            raise SystemExit(exit_text)
    else:
        # otherwise, request the data for download from OOINet via the M2M API using the specified dates
        r = m2m_request(site, node, sensor, method, stream, start, stop)
        if not r:
            exit_text = (
                'Request failed for %s-%s-%s, %s, %s, from %s to %s.' %
                (site, node, sensor, method, stream, start, stop))
            raise SystemExit(exit_text)

        # Valid M2M request, start downloading the data
        flort = m2m_collect(r, '.*FLORT.*\\.nc$')

        # check to see if we downloaded any data
        if not flort:
            exit_text = (
                'Data unavailable for %s-%s-%s, %s, %s, from %s to %s.' %
                (site, node, sensor, method, stream, start, stop))
            raise SystemExit(exit_text)

    # clean-up and reorganize the data
    if node == 'SP001':
        # this FLORT is part of a CSPP
        flort = flort_cspp(flort)
    elif node == 'WFP01':
        # this FLORT is part of a Wire-Following Profiler
        flort = flort_wfp(flort)
    elif node == 'SBD17':
        # this FLORT is connected to the CTDBP on an EA Inshore Surface Mooring
        flort = flort_instrument(flort)
        if not flort:
            # there was no data after removing all the 0's
            sys.exit()
    else:
        # this FLORT is stand-alone on one of the moorings
        flort = flort_datalogger(flort, burst)

    vocab = get_vocabulary(site, node, sensor)[0]
    flort = update_dataset(flort, vocab['maxdepth'])

    # save the data to disk
    out_file = os.path.abspath(args.outfile)
    if not os.path.exists(os.path.dirname(out_file)):
        os.makedirs(os.path.dirname(out_file))

    flort.to_netcdf(out_file,
                    mode='w',
                    format='NETCDF4',
                    engine='h5netcdf',
                    encoding=ENCODINGS)
def main(argv=None):
    # setup the input arguments
    args = inputs(argv)
    site = args.site
    node = args.node
    sensor = args.sensor
    method = args.method
    stream = args.stream
    deploy = args.deploy
    start = args.start
    stop = args.stop
    burst = args.burst

    # determine the start and stop times for the data request based on either the deployment number or user entered
    # beginning and ending dates.
    if not deploy or (start and stop):
        return SyntaxError(
            'You must specify either a deployment number or beginning and end dates of interest.'
        )
    else:
        if deploy:
            # Determine start and end dates based on the deployment number
            start, stop = get_deployment_dates(site, node, sensor, deploy)
            if not start or not stop:
                exit_text = (
                    'Deployment dates are unavailable for %s-%s-%s, deployment %02d.'
                    % (site, node, sensor, deploy))
                raise SystemExit(exit_text)

    # Request the data for download
    r = m2m_request(site, node, sensor, method, stream, start, stop)
    if not r:
        exit_text = ('Request failed for %s-%s-%s. Check request.' %
                     (site, node, sensor))
        raise SystemExit(exit_text)

    # Valid request, start downloading the data
    if re.match(r'.*_air.*', stream):
        if deploy:
            pco2a = m2m_collect(
                r, ('.*deployment%04d.*PCO2A.*air.*\\.nc$' % deploy))
        else:
            pco2a = m2m_collect(r, '.*PCO2A.*air.*\\.nc$')
        nc_group = 'air'
    else:
        if deploy:
            pco2a = m2m_collect(
                r, ('.*deployment%04d.*PCO2A.*water.*\\.nc$' % deploy))
        else:
            pco2a = m2m_collect(r, '.*PCO2A.*water.*\\.nc$')
        nc_group = 'water'

    if not pco2a:
        exit_text = ('Data unavailable for %s-%s-%s. Check request.' %
                     (site, node, sensor))
        raise SystemExit(exit_text)

    # clean-up and reorganize
    pco2a = pco2a_datalogger(pco2a, burst)
    vocab = get_vocabulary(site, node, sensor)[0]
    pco2a = update_dataset(pco2a, vocab['maxdepth'])

    # save the data to disk
    out_file = os.path.abspath(args.outfile)
    if not os.path.exists(os.path.dirname(out_file)):
        os.makedirs(os.path.dirname(out_file))

    if os.path.isfile(out_file):
        pco2a.to_netcdf(out_file,
                        mode='a',
                        format='NETCDF4',
                        engine='h5netcdf',
                        encoding=ENCODINGS,
                        group=nc_group)
    else:
        pco2a.to_netcdf(out_file,
                        mode='w',
                        format='NETCDF4',
                        engine='h5netcdf',
                        encoding=ENCODINGS,
                        group=nc_group)
def main(argv=None):
    # setup the input arguments
    args = inputs(argv)
    site = args.site
    node = args.node
    sensor = args.sensor
    method = args.method
    stream = args.stream
    deploy = args.deploy
    start = args.start
    stop = args.stop

    # check if we are specifying a deployment or a specific date and time range
    if not deploy or (start and stop):
        return SyntaxError(
            'You must specify either a deployment number or beginning and end dates of interest.'
        )

    # if we are specifying a deployment number, then get the data from the Gold Copy THREDDS server
    if deploy:
        # download the data for the deployment
        pco2w = load_gc_thredds(
            site, node, sensor, method, stream,
            ('^(?!.*blank).*deployment%04d.*PCO2W.*\\.nc$' % deploy))

        # check to see if we downloaded any data
        if not pco2w:
            exit_text = (
                'Data unavailable for %s-%s-%s, %s, %s, deployment %d.' %
                (site, node, sensor, method, stream, deploy))
            raise SystemExit(exit_text)
    else:
        # otherwise, request the data for download from OOINet via the M2M API using the specified dates
        r = m2m_request(site, node, sensor, method, stream, start, stop)
        if not r:
            exit_text = (
                'Request failed for %s-%s-%s, %s, %s, from %s to %s.' %
                (site, node, sensor, method, stream, start, stop))
            raise SystemExit(exit_text)

        # Valid M2M request, start downloading the data
        pco2w = m2m_collect(r, '^(?!.*blank).*PCO2W.*\\.nc$')

        # check to see if we downloaded any data
        if not pco2w:
            exit_text = (
                'Data unavailable for %s-%s-%s, %s, %s, from %s to %s.' %
                (site, node, sensor, method, stream, start, stop))
            raise SystemExit(exit_text)

    # clean-up and reorganize
    if method in ['telemetered', 'recovered_host']:
        pco2w = pco2w_datalogger(pco2w)
    else:
        pco2w = pco2w_instrument(pco2w)

    vocab = get_vocabulary(site, node, sensor)[0]
    pco2w = update_dataset(pco2w, vocab['maxdepth'])

    # save the data to disk
    out_file = os.path.abspath(args.outfile)
    if not os.path.exists(os.path.dirname(out_file)):
        os.makedirs(os.path.dirname(out_file))

    pco2w.to_netcdf(out_file,
                    mode='w',
                    format='NETCDF4',
                    engine='h5netcdf',
                    encoding=ENCODINGS)
Пример #6
0
def main(argv=None):
    args = inputs(argv)
    site = args.site
    node = args.node
    sensor = args.sensor
    method = args.method
    stream = args.stream
    deploy = args.deploy
    start = args.start
    stop = args.stop
    burst = args.burst

    # determine the start and stop times for the data request based on either the deployment number or user entered
    # beginning and ending dates.
    if not deploy or (start and stop):
        return SyntaxError(
            'You must specify either a deployment number or beginning and end dates of interest.'
        )
    else:
        if deploy:
            # Determine start and end dates based on the deployment number
            start, stop = get_deployment_dates(site, node, sensor, deploy)
            if not start or not stop:
                exit_text = (
                    'Deployment dates are unavailable for %s-%s-%s, deployment %02d.'
                    % (site, node, sensor, deploy))
                raise SystemExit(exit_text)

    if stream not in ['suna_dcl_recovered']:
        exit_text = (
            'Currently the only stream supported is suna_dcl_recovered, you requested %s.'
            % stream)
        raise SystemExit(exit_text)

    # Request the data for download
    r = m2m_request(site, node, sensor, method, stream, start, stop)
    if not r:
        exit_text = ('Request failed for %s-%s-%s. Check request.' %
                     (site, node, sensor))
        raise SystemExit(exit_text)

    # Valid request, start downloading the data
    if deploy:
        nutnr = m2m_collect(r, ('.*deployment%04d.*NUTNR.*\\.nc$' % deploy))
    else:
        nutnr = m2m_collect(r, '.*NUTNR.*\\.nc$')

    if not nutnr:
        exit_text = ('Data unavailable for %s-%s-%s. Check request.' %
                     (site, node, sensor))
        raise SystemExit(exit_text)

    # clean-up and reorganize
    nutnr = nutnr_datalogger(nutnr, burst)
    vocab = get_vocabulary(site, node, sensor)[0]
    nutnr = update_dataset(nutnr, vocab['maxdepth'])

    # save the data to disk
    out_file = os.path.abspath(args.outfile)
    if not os.path.exists(os.path.dirname(out_file)):
        os.makedirs(os.path.dirname(out_file))

    nutnr.to_netcdf(out_file,
                    mode='w',
                    format='NETCDF4',
                    engine='h5netcdf',
                    encoding=ENCODINGS)