Beispiel #1
0
def main(args):
    """Fetch and print the geoJSON track for the specified dataset_id from the IOOS Glider DAC"""

    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    dataset_id = args.dataset_id
    response = args.format

    # Fire up the GdacClient
    client = GdacClient()

    client.search_datasets(dataset_ids=dataset_id)
    if client.datasets.empty:
        logging.warning('Dataset not found: {:}'.format(dataset_id))
        return 1

    if response == 'json':
        track = client.get_dataset_track_geojson(dataset_id)
        sys.stdout.write('{:}\n'.format(json.dumps(track)))
    elif response == 'csv':
        track = client.get_dataset_profiles(dataset_id)
        sys.stdout.write('{:}\n'.format(track.to_csv()))

    return 0
Beispiel #2
0
def main(args):
    """Request and download a map of the profile positions for the specified dataset_id"""
    # Set up logger
    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    dataset_id = args.dataset_id
    img_path = args.directory
    img_type = args.img_type
    marker_color = args.color
    colorbar = args.colorbar
    zoom_level = args.zoom
    no_legend = args.no_legend
    debug = args.debug

    # Connect to the GDAC ERDDAP server
    client = GdacClient()
    # Fetch the dataset
    client.search_datasets(dataset_ids=dataset_id)
    if client.datasets.empty:
        logging.error('Dataset not found: {:}'.format(dataset_id))
        return 1

    # Create the ploter
    plotter = ErddapPlotter(client.server, response=img_type)

    # Configure the plot parameters
    plotter.set_y_range(ascending=False)
    plotter.set_colorbar(colorbar=colorbar)
    plotter.set_marker_color(marker_color)
    if zoom_level:
        plotter.set_zoom(zoom_level)
    if no_legend:
        plotter.set_legend_loc('Off')
        plotter.set_trim_pixels()

    map_url = plotter.build_image_request(dataset_id, 'longitude', 'latitude',
                                          'time')
    if marker_color:
        map_url = plotter.build_image_request(dataset_id, 'longitude',
                                              'latitude')

    ext = img_type[-3:].lower()

    image_name = os.path.join(
        img_path, '{:}_track_map_{:}.{:}'.format(dataset_id, img_type, ext))

    if debug:
        logging.info('Image request: {:}'.format(map_url))
    else:
        logging.info('Requesting and dowloading image {:}'.format(image_name))
        logging.debug('Image url: {:}'.format(map_url))
        img_path = plotter.download_image(map_url, image_name)
        if img_path:
            sys.stdout.write('{:}\n'.format(img_path))

    return 0
Beispiel #3
0
def main(args):
    """Fetch all profile times, positions and wmo_id from the IOOS Glider DAC and for the specified dataset id"""

    # Set up logger
    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    dataset_id = args.dataset_id
    response = args.format

    client = GdacClient()

    # Search for the specified dataset id
    client.search_datasets(dataset_ids=dataset_id)
    if client.datasets.empty:
        logging.warning('No dataset found for dataset_id: {:}'.format(dataset_id))
        return 1

    # Does the dataset have a wmo_id
    if not client.datasets.iloc[0].wmo_id:
        logging.warning('Dataset {:} does not have a WMO id'.format(dataset_id))
        return 1


    osmc_client = DuoProfilesClient()
    osmc_client.dataset_id = args.osmc_dataset_id
    logging.info('Using OSMC dataset: {:}'.format(osmc_client))

    # Fetch observations
    obs = osmc_client.get_profiles_by_wmo_id(client.datasets.iloc[0].wmo_id, client.datasets.iloc[0].start_date,
                                             client.datasets.iloc[0].end_date, gps=args.gps)

    if obs.empty:
        return 1

    if response == 'json':
        sys.stdout.write('{:}\n'.format(obs.reset_index().to_json(orient='records')))
    elif response == 'csv':
        sys.stdout.write('{:}\n'.format(obs.to_csv()))

    return 0
Beispiel #4
0
def main(args):
    """Generate observation calendars for all IOOS Glider DAC datasets that have reported within the last 7 days"""

    # Set up logger
    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    hours = args.hours
    start_time = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
    if args.start_time:
        start_time = parser.parse(args.start_time)
    end_time = args.end_time
    project = args.project
    north = args.north
    south = args.south
    east = args.east
    west = args.west
    output_dir = args.outputdir
    debug = args.debug
    today = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)

    if not os.path.isdir(output_dir):
        logging.error(
            'Invalid output directory specified: {:}'.format(output_dir))
        return 1

    img_path = os.path.join(output_dir, project)
    if not debug and not os.path.isdir(img_path):
        try:
            os.mkdir(img_path)
        except OSError as e:
            logging.critical(e)
            return 1

    logging.info('Destination: {:}'.format(img_path))

    params = {
        'min_time': start_time.strftime('%Y-%m-%d'),
        'min_lat': south,
        'max_lat': north,
        'min_lon': west,
        'max_lon': east
    }

    if end_time:
        params['max_time'] = end_time.strftime('%Y-%m-%d')

    client = GdacClient()
    client.search_datasets(params=params)
    if client.datasets.empty:
        logging.warning('No datasets found matching the search criteria')
        return 0

    logging.info('{:} datasets found.'.format(client.datasets.shape[0]))
    if debug:
        logging.info('Debug switch set. No operations performed')
        return 0

    osmc_client = DuoProfilesClient()

    # Plot the DAC datasets and profiles calendars
    fig, ax = plt.subplots(3, 1, figsize=(11, 8.5))
    # Get deployments
    ymd = client.ymd_deployments_calendar
    plot_calendar(ymd.loc[(2020, 6):(2020, 12), :], ax=ax[0])

    # Get glider days
    ymd = client.ymd_glider_days_calendar
    plot_calendar(ymd.loc[(2020, 6):(2020, 12), :], ax=ax[1])

    # Get profiles
    dac_calendar = client.ymd_profiles_calendar
    plot_calendar(dac_calendar.loc[(2020, 6):(2020, 12), :],
                  ax=ax[2],
                  annot_kws={'fontsize': 8.})

    ax[0].set_title('IOOS DAC Real-Time Deployments')
    ax[1].set_title('IOOS DAC Real-Time Glider Days')
    ax[2].set_title('IOOS DAC Real-Time Profiles')

    fig.suptitle(
        'U.S. IOOS Glider Data Assembly Center Observation Report (As of: {:})'
        .format(datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%MZ')))
    plt.tight_layout()

    img_name = os.path.join(img_path, 'obs_calendars.png')
    logging.info('Writing calendars: {:}'.format(img_name))
    plt.savefig(img_name, dpi=300)
    plt.close()

    # Get the GTS obs calendar and plot along with DAC and the difference between the 2
    gts_calendar = osmc_client.get_ymd_obs_calendar(client.datasets)
    diff_calendar = dac_calendar - gts_calendar

    # Plot DAC, GTS and diff
    fig, ax = plt.subplots(3, 1, figsize=(11, 8.5))
    plot_calendar(dac_calendar.loc[(2020, 6):(2020, 12), :].fillna(0),
                  ax=ax[0],
                  annot_kws={'fontsize': 8.})
    plot_calendar(gts_calendar.loc[(2020, 6):(2020, 12), :].fillna(0),
                  ax=ax[1],
                  annot_kws={'fontsize': 8.})
    plot_calendar(diff_calendar.loc[(2020, 6):(2020, 12), :].fillna(0),
                  ax=ax[2],
                  cmap=cm.RdGy,
                  center=0,
                  annot_kws={'fontsize': 8.})

    ax[0].set_title('IOOS DAC Real-Time Profiles')
    ax[1].set_title('GTS Observations')
    ax[2].set_title('DAC Profiles - GTS Observations')

    img_name = os.path.join(img_path, 'dac_gts_obs_calendars.png')
    logging.info('Writing calendars: {:}'.format(img_name))
    plt.savefig(img_name, dpi=300)
    plt.close()

    # Plot the map of all of the tracks contained in client.datasets
    # NaturalEarth Shapefile Feature
    nef_scale = '10m'
    nef_category = 'cultural'
    nef_name = 'admin_0_countries'

    dt0 = parser.parse(params['min_time']).date()
    if end_time:
        dt1 = parser.parse(params['max_time']).date()
        profiles = client.daily_profile_positions.loc[
            (client.daily_profile_positions.date >= dt0)
            & (client.daily_profile_positions.date <= dt1)]
    else:
        profiles = client.daily_profile_positions.loc[
            client.daily_profile_positions.date >= dt0]

    # Bounding box/extent [w e s n]
    bbox = np.array([
        params['min_lon'], params['max_lon'], params['min_lat'],
        params['max_lat']
    ])

    xticker = mticker.AutoLocator()
    yticker = mticker.AutoLocator()
    xticks = xticker.tick_values(bbox[0], bbox[1])
    yticks = yticker.tick_values(bbox[2], bbox[3])

    map_fig, map_ax = plt.subplots(subplot_kw=dict(
        projection=ccrs.PlateCarree()))

    # lightblue ocean and tan land
    map_ax.background_patch.set_facecolor('lightblue')
    countries = cfeature.NaturalEarthFeature(category=nef_category,
                                             scale=nef_scale,
                                             facecolor='none',
                                             name=nef_name)
    map_ax.add_feature(countries,
                       linewidth=0.5,
                       edgecolor='black',
                       facecolor='tan')
    states = cfeature.NaturalEarthFeature(category=nef_category,
                                          scale=nef_scale,
                                          facecolor='none',
                                          name='admin_1_states_provinces')
    map_ax.add_feature(states, linewidth=0.5, edgecolor='black')

    map_ax.set_xticks(xticks)
    map_ax.xaxis.set_major_formatter(LONGITUDE_FORMATTER)
    map_ax.set_yticks(yticks)
    map_ax.yaxis.set_major_formatter(LATITUDE_FORMATTER)
    map_ax.set_extent(bbox, crs=ccrs.PlateCarree())

    calendars_path = os.path.join(img_path, 'dataset_calendars')
    if not os.path.isdir(calendars_path):
        try:
            logging.info(
                'Creating dataset calendars path: {:}'.format(calendars_path))
            os.mkdir(calendars_path)
        except OSError as e:
            logging.error(e)
            return 1

    gts_datasets = []
    for dataset_id, dataset in client.datasets.iterrows():

        dp = profiles.loc[profiles.dataset_id == dataset_id]

        if dp.empty:
            logging.warning('{:}: No profile GPS found'.format(dataset_id))
            continue

        # Plot the daily averaged track
        track = map_ax.plot(dp.longitude, dp.latitude, marker='None')

        # Get the individual dataset DAC profiles calendar
        dac_calendar = client.get_dataset_ymd_profiles_calendar(
            dataset_id).fillna(0)
        # Get the GTS obs calendar
        dataset_gts_obs = osmc_client.obs.loc[osmc_client.obs.dataset_id ==
                                              dataset_id]
        dataset['num_gts_obs'] = dataset_gts_obs.shape[0]
        gts_datasets.append(dataset)
        gts_calendar = gts_obs_to_ymd_calendar(dataset_gts_obs).fillna(0)
        if gts_calendar.empty:
            gts_calendar = dac_calendar.copy()
            gts_calendar[:] = 0.
        # Calculate the difference
        diff_calendar = dac_calendar - gts_calendar

        cal_fig, cal_ax = plt.subplots(3, 1, figsize=(11, 8.5))

        _ = plot_calendar(dac_calendar,
                          ax=cal_ax[0],
                          annot_kws={'fontsize': 8.})
        cal_ax[0].set_title('{:}: IOOS Glider DAC Profiles'.format(dataset_id))
        if not gts_calendar.empty:
            _ = plot_calendar(gts_calendar,
                              ax=cal_ax[1],
                              annot_kws={'fontsize': 8.})
            cal_ax[1].set_title('{:}: GTS Observations'.format(dataset_id))
        else:
            logging.warning(
                'No GTS Observations available for {:}'.format(dataset_id))
        _ = plot_calendar(diff_calendar,
                          ax=cal_ax[2],
                          cmap=cm.RdGy,
                          center=0,
                          annot_kws={'fontsize': 8.})
        cal_ax[2].set_title(
            '{:}: DAC Profiles - GTS Observations'.format(dataset_id))

        cal_fig.suptitle('As of: {:}'.format(
            datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%MZ')))

        dataset_calendar_img_path = os.path.join(
            calendars_path, 'obs_calendar_{:}.png'.format(dataset_id))
        logging.info('Writing {:}'.format(dataset_calendar_img_path))
        cal_fig.savefig(dataset_calendar_img_path, dpi=300)
        plt.close(cal_fig)

    map_fig.suptitle('IOOS DAC Real-Time Tracks', fontsize=12)
    map_fig.tight_layout()
    img_name = os.path.join(img_path, 'dac_obs_tracks.png')
    map_fig.savefig(img_name, dpi=300)
    plt.close(map_fig)

    datasets_meta = pd.concat(gts_datasets, axis=1).T
    info_path = os.path.join(img_path, 'info.json')
    logging.info('Writing dataset info: {:}'.format(info_path))
    try:
        with open(info_path, 'w') as fid:
            json.dump(datasets_meta.fillna('null').to_dict(orient='records'),
                      fid,
                      indent=4,
                      sort_keys=True,
                      default=str)
    except (IOError, OSError, ValueError) as e:
        logging.error(e)
        return 1

    return 0
def main(args):
    # Set up logger
    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    output_dir = args.outputdir
    hours = args.hours
    today = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)

    client = GdacClient()
    client.search_datasets(params={'min_time': today.strftime('%Y-%m-%d')})

    if client.datasets.shape[0] == 0:
        logging.warning('No recent datasets found (>={:})'.format(
            today.strftime('%Y-%m-%d')))
        return 0

    osmc_client = DuoProfilesClient()

    # Create and write the totals images
    ax = client.plot_datasets_calendar('datasets')
    img_name = os.path.join(output_dir, 'active_datasets.png')
    logging.info('Writing: {:}'.format(img_name))
    ax.get_figure().savefig(img_name)
    plt.close()

    ax = client.plot_datasets_calendar('days')
    img_name = os.path.join(output_dir, 'active_glider_days.png')
    logging.info('Writing: {:}'.format(img_name))
    ax.get_figure().savefig(img_name)
    plt.close()

    ax = client.plot_datasets_calendar('profiles')
    img_name = os.path.join(output_dir, 'active_glider_profiles.png')
    logging.info('Writing: {:}'.format(img_name))
    ax.get_figure().savefig(img_name)
    plt.close()

    # GTS observations
    ax = osmc_client.plot_gts_obs_calendar(client.datasets,
                                           calendar_type='month')
    img_name = os.path.join(output_dir, 'active-gts-obs.png')
    logging.info('Writing: {:}'.format(img_name))
    ax.get_figure().savefig(img_name)
    plt.close()

    # Write the individual dataset profiles calendar
    for r, dataset in client.datasets.iterrows():

        # IOOS Glider DAC
        ax = client.plot_dataset_profiles_calendar(dataset.dataset_id)
        img_name = os.path.join(output_dir,
                                '{:}-profiles.png'.format(dataset.dataset_id))
        logging.info('Writing: {:}'.format(img_name))
        ax.get_figure().savefig(img_name)
        plt.close()

        # GTS observations
        ax = osmc_client.plot_gts_obs_calendar(dataset)
        img_name = os.path.join(output_dir,
                                '{:}-gts-obs.png'.format(dataset.dataset_id))
        logging.info('Writing: {:}'.format(img_name))
        ax.get_figure().savefig(img_name)
        plt.close()

    return 0
Beispiel #6
0
def main(args):
    """Create catalogs for all real-time glider datasets located at the IOOS Glider Data Assembly Center"""

    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    if not os.path.isdir(args.outputdir):
        logging.error('Invalid destination path: {:}'.format(args.output_dir))
        return 1

    datasets_path = os.path.join(args.outputdir, 'datasets')
    if not os.path.isdir(datasets_path):
        logging.info('Creating datasets path: {:}'.format(datasets_path))
        try:
            os.mkdir(datasets_path)
        except OSError as e:
            logging.error('Error creating {:}: {:}'.format(datasets_path, e))
            return 1

    # Fetch the DAC registered deployments
    deployments = fetch_dac_catalog()
    if not deployments:
        return 1

    # Fire up the GdacClient
    client = GdacClient()

    dataset_ids = [dataset['name'] for dataset in deployments]
    if args.status == 'active':
        dataset_ids = [
            dataset['name'] for dataset in deployments
            if not dataset['completed']
        ]
    elif args.status == 'completed':
        dataset_ids = [
            dataset['name'] for dataset in deployments if dataset['completed']
        ]

    if not args.delayed:
        dataset_ids = [
            did for did in dataset_ids if not did.endswith('delayed')
        ]

    dataset_ids = sorted(dataset_ids)

    client.search_datasets(dataset_ids=dataset_ids)

    drop_columns = [
        'estimated_deploy_date', 'estimated_deploy_location', 'glider_name',
        'deployment_dir', 'title'
    ]
    catalog = []
    for dataset_id, dataset in client.datasets.iterrows():

        deployment = [d for d in deployments if d['name'] == dataset_id]
        if not deployment:
            logging.warning(
                'Deployment not registered at the DAC: {:}'.format(dataset_id))
            continue

        deployment = deployment[0]

        # chop off unnecessary decimal places
        dataset.lat_min = float(
            Decimal(dataset.lat_min).quantize(Decimal('0.001'),
                                              rounding=ROUND_HALF_DOWN))
        dataset.lat_max = float(
            Decimal(dataset.lat_max).quantize(Decimal('0.001'),
                                              rounding=ROUND_HALF_UP))
        dataset.lon_min = float(
            Decimal(dataset.lon_min).quantize(Decimal('0.001'),
                                              rounding=ROUND_HALF_DOWN))
        dataset.lon_max = float(
            Decimal(dataset.lon_max).quantize(Decimal('0.001'),
                                              rounding=ROUND_HALF_UP))
        dataset.deployment_lat = float(
            Decimal(dataset.deployment_lat).quantize(Decimal('0.001'),
                                                     rounding=ROUND_HALF_DOWN))
        dataset.deployment_lon = float(
            Decimal(dataset.deployment_lon).quantize(Decimal('0.001'),
                                                     rounding=ROUND_HALF_DOWN))

        # Update the deployment with the results from the GdacClient dataset
        deployment.update(dataset.fillna(False))

        # Chop off the end of the summary
        deployment['summary'] = deployment['summary'][
            0:deployment['summary'].find('\\n\\ncdm_data_type') - 1]

        [deployment.pop(col, None) for col in drop_columns]

        # Make a copy of the deployment before removing the 'summary' field
        deployment_copy = deployment.copy()

        deployment.pop('summary', None)

        # Get the daily average profile GPS for this dataset
        daily_gps = client.daily_profile_positions.loc[
            client.daily_profile_positions.dataset_id == dataset_id]
        if daily_gps.empty:
            logging.warning(
                'Dataset contains no profile GPS positions: {:}'.format(
                    dataset_id))
            continue

        dataset_out_path = os.path.join(datasets_path, dataset_id)
        if not os.path.isdir(dataset_out_path):
            logging.info('Creating dataset path: {:}'.format(dataset_out_path))
            try:
                os.mkdir(dataset_out_path)
            except OSError as e:
                logging.error(e)
                continue

        # Create and write the daily averaged GPS position track
        track = latlon_to_geojson_track(daily_gps.latitude,
                                        daily_gps.longitude, daily_gps.date)
        track['properties'] = {'datasetd_id': dataset_id}
        daily_track_json_path = os.path.join(
            dataset_out_path, 'daily_track.json'.format(dataset_id))
        try:
            with open(daily_track_json_path, 'w') as fid:
                json.dump(track, fid)
        except IOError as e:
            logging.error('Error writing daily track GPS {:}: {:}'.format(
                daily_track_json_path, e))
            continue

        # Create and write the detailed deployment summary
        deployment_json_path = os.path.join(dataset_out_path,
                                            'deployment.json')
        try:
            with open(deployment_json_path, 'w') as fid:
                json.dump(deployment_copy, fid, default=str, sort_keys=True)
        except IOError as e:
            logging.error('Error writing deployment summary {:}: {:}'.format(
                deployment_json_path, e))
            continue

        catalog.append(deployment)

    status_path = os.path.join(args.outputdir, '{:}.json'.format(args.status))
    try:
        with open(status_path, 'w') as fid:
            json.dump(sorted(catalog, key=itemgetter('end_date'),
                             reverse=True),
                      fid,
                      default=str,
                      sort_keys=True)
    except IOError as e:
        logging.error('Error writing status file: {:}'.format(status_path))
        return 1

    return 0
Beispiel #7
0
def main(args):
    """Get IOOS Glider Data Assembly Center dataset metadata records for the specified dataset_id(s)"""

    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    dataset_ids = args.dataset_ids
    exclude_summaries = args.exclude_summaries
    response = args.format

    # Fetch the DAC registered deployments
    deployments = fetch_dac_catalog()
    if not deployments:
        return 1

    # Fire up the GdacClient
    client = GdacClient()

    client.search_datasets(dataset_ids=dataset_ids)
    if client.datasets.empty:
        for dataset_id in dataset_ids:
            logging.warning('Dataset not found: {:}'.format(dataset_id))
        return 1

    drop_columns = [
        'estimated_deploy_date', 'estimated_deploy_location', 'glider_name',
        'deployment_dir', 'title'
    ]

    catalog = []
    for dataset_id, dataset in client.datasets.iterrows():

        deployment = [d for d in deployments if d['name'] == dataset_id]
        if not deployment:
            logging.warning(
                'Deployment not registered at the DAC: {:}'.format(dataset_id))
            continue

        deployment = deployment[0]

        # Update the deployment with the results from the GdacClient dataset
        deployment.update(dataset.fillna(False))

        # Chop off the end of the summary
        deployment['summary'] = deployment['summary'][
            0:deployment['summary'].find('\\n\\ncdm_data_type') - 1]

        [deployment.pop(col, None) for col in drop_columns]

        # Make a copy of the deployment before removing the 'summary' field
        deployment_copy = deployment.copy()

        if exclude_summaries:
            deployment.pop('summary', None)

        catalog.append(deployment)

    if len(catalog) == 0:
        logging.warning('No dataset(s) found.')
        return 1

    datasets = pd.DataFrame(catalog).set_index('name')

    if response == 'json':
        sys.stdout.write('{:}\n'.format(datasets.to_json(orient='records')))
    elif response == 'csv':
        sys.stdout.write('{:}\n'.format(datasets.to_csv()))

    return 0
def main(args):
    """Request and download a map of the profile positions for the specified dataset_id"""
    # Set up logger
    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    dataset_id = args.dataset_id
    img_path = args.directory
    img_type = args.img_type
    marker_color = args.color
    colorbar = args.colorbar
    profiles = args.profiles
    no_legend = args.no_legend
    debug = args.debug
    dataset_variable = args.dataset_variable
    plot_all = args.plot_all
    hours = args.hours
    start_date = args.start_date
    end_date = args.end_date

    # Connect to the GDAC ERDDAP server
    client = GdacClient()
    # Fetch the dataset
    client.search_datasets(dataset_ids=dataset_id)
    if client.datasets.empty:
        logging.error('Dataset not found: {:}'.format(dataset_id))
        return 1

    # Create the ploter
    plotter = ErddapPlotter(client.server, response=img_type)

    # Configure the plot parameters
    plotter.set_colorbar(colorbar=colorbar)
    plotter.set_marker_color(marker_color)
    plotter.set_y_range(min_val=0)
    if no_legend:
        plotter.set_legend_loc('Off')
        plotter.set_trim_pixels()

    # Set up time window
    if not plot_all:
        if not start_date and not end_date:
            plotter.add_constraint('time>=',
                                   'max(time)-{:}hours'.format(hours))
        else:
            if start_date:
                plotter.add_constraint('time>=', start_date)
            if end_date:
                plotter.add_constraint('time<=', end_date)

    ext = img_type[-3:].lower()

    img_url = plotter.build_image_request(dataset_id, 'time', 'depth',
                                          dataset_variable)
    image_name = os.path.join(
        img_path, '{:}_{:}_ts_{:}.{:}'.format(dataset_id, dataset_variable,
                                              img_type, ext))
    if profiles:
        img_url = plotter.build_image_request(dataset_id, dataset_variable,
                                              'depth', 'time')
        image_name = os.path.join(
            img_path,
            '{:}_{:}_profiles_{:}.{:}'.format(dataset_id, dataset_variable,
                                              img_type, ext))
        if marker_color:
            img_url = plotter.build_image_request(dataset_id, dataset_variable,
                                                  'depth')

    if debug:
        logging.info('Image request: {:}'.format(img_url))
    else:
        logging.info('Requesting and downloading image {:}'.format(image_name))
        logging.debug('Image url: {:}'.format(img_url))
        img_path = plotter.download_image(img_url, image_name)
        if img_path:
            sys.stdout.write('{:}\n'.format(img_path))

    return 0
Beispiel #9
0
def main(args):
    """Search the IOOS Glider DAC and return the dataset ids for all datasets which have updated within the last 24
    hours"""

    # Set up logger
    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    hours = args.hours
    start_time = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
    if args.start_time:
        start_time = parser.parse(args.start_time)
    end_time = datetime.datetime.utcnow()
    if args.end_time:
        end_time = parser.parse(args.end_time)
    north = args.north
    south = args.south
    east = args.east
    west = args.west
    search_string = args.search_string
    response = args.format
    exclude_summaries = args.exclude_summaries
    debug = args.debug

    params = {
        'min_time': start_time.strftime('%Y-%m-%dT%H:%M'),
        'max_time': end_time.strftime('%Y-%m-%dT%H:%M'),
        'min_lat': south,
        'max_lat': north,
        'min_lon': west,
        'max_lon': east
    }

    client = GdacClient()

    if search_string:
        client.search_datasets(search_for=search_string, params=params)
    else:
        client.search_datasets(params=params)

    if client.datasets.empty:
        logging.warning('No datasets found matching the search criteria')
        return 1

    datasets = client.datasets
    if exclude_summaries:
        datasets = client.datasets.drop('summary', axis=1)

    if response == 'json':
        sys.stdout.write('{:}\n'.format(datasets.to_json(orient='records')))
    elif response == 'csv':
        sys.stdout.write('{:}\n'.format(datasets.to_csv()))
    else:
        columns = ['dataset_id']
        if args.timestamps:
            columns.append('start_date')
            columns.append('end_date')

        if args.wmoid:
            columns.append('wmo_id')

        sys.stdout.write('{:}\n'.format(
            datasets.reset_index()[columns].to_csv(index=False)))

    return 0