Пример #1
0
    def test_from_dict(self):

        d = dict(standard_name='lwe_thickness_of_precipitation_amount')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 vertical_datum='NAVD88')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#vertical_datum=navd88'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 vertical_datum='NAVD88',
                 discriminant='2')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount-2#vertical_datum=navd88'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: sum (interval: PT24H) time: mean')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:sum;interval=pt24h'

        # Interval as a dict key (not inline with cell_methods)
        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: sum time: mean',
                 interval='pt24h')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:sum;interval=pt24h'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: minimum within years time: mean over years')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean_over_years,time:minimum_within_years'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: variance (interval: PT1H comment: sampled instantaneously)')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:variance;interval=pt1h'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: variance time: mean (interval: PT1H comment: sampled instantaneously)')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:variance;interval=pt1h'

        # Interval specified twice
        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: variance time: mean (interval: PT1H comment: sampled instantaneously)',
                 interval='PT1H')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:variance;interval=pt1h'

        # Interval specified twice
        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: variance time: mean (interval: PT1H comment: sampled instantaneously)',
                 interval='PT1H',
                 discriminant='2')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount-2#cell_methods=time:mean,time:variance;interval=pt1h'
Пример #2
0
    def test_from_variable(self):

        filename = 'test_urn_from_variable.nc'
        times = [0, 1000, 2000, 3000, 4000, 5000]
        verticals = None
        ts = TimeSeries(output_directory=self.output_directory,
                        latitude=self.latitude,
                        longitude=self.longitude,
                        station_name=self.station_name,
                        global_attributes=self.global_attributes,
                        output_filename=filename,
                        times=times,
                        verticals=verticals)

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(standard_name='lwe_thickness_of_precipitation_amount',
                     vertical_datum='NAVD88')
        ts.add_variable('temperature', values=values, attributes=attrs)
        ts.ncd.sync()
        urn = urnify('axiom', 'foo', ts.ncd.variables['temperature'])
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#vertical_datum=navd88'

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(
            standard_name='lwe_thickness_of_precipitation_amount',
            cell_methods=
            'time: variance (interval: PT1H comment: sampled instantaneously)')
        ts.add_variable('temperature2', values=values, attributes=attrs)
        ts.ncd.sync()
        urn = urnify('axiom', 'foo', ts.ncd.variables['temperature2'])
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:variance;interval=pt1h'

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(
            standard_name='lwe_thickness_of_precipitation_amount',
            cell_methods=
            'time: variance time: mean (interval: PT1H comment: sampled instantaneously)'
        )
        ts.add_variable('temperature3', values=values, attributes=attrs)
        ts.ncd.sync()
        urn = urnify('axiom', 'foo', ts.ncd.variables['temperature3'])
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:variance;interval=pt1h'

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(
            standard_name='lwe_thickness_of_precipitation_amount',
            cell_methods=
            'time: variance time: mean (interval: PT1H comment: sampled instantaneously)',
            discriminant='2')
        ts.add_variable('temperature4', values=values, attributes=attrs)
        ts.ncd.sync()
        urn = urnify('axiom', 'foo', ts.ncd.variables['temperature4'])
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount-2#cell_methods=time:mean,time:variance;interval=pt1h'
Пример #3
0
    def test_from_variable(self):

        filename = 'test_urn_from_variable.nc'
        times = [0, 1000, 2000, 3000, 4000, 5000]
        verticals = None
        ts = TimeSeries(output_directory=self.output_directory,
                        latitude=self.latitude,
                        longitude=self.longitude,
                        station_name=self.station_name,
                        global_attributes=self.global_attributes,
                        output_filename=filename,
                        times=times,
                        verticals=verticals)

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(standard_name='lwe_thickness_of_precipitation_amount',
                     vertical_datum='NAVD88')
        ts.add_variable('temperature', values=values, attributes=attrs)
        ts.ncd.sync()
        urn = urnify('axiom', 'foo', ts.ncd.variables['temperature'])
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#vertical_datum=navd88'

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(standard_name='lwe_thickness_of_precipitation_amount',
                     cell_methods='time: variance (interval: PT1H comment: sampled instantaneously)')
        ts.add_variable('temperature2', values=values, attributes=attrs)
        ts.ncd.sync()
        urn = urnify('axiom', 'foo', ts.ncd.variables['temperature2'])
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:variance;interval=pt1h'

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(standard_name='lwe_thickness_of_precipitation_amount',
                     cell_methods='time: variance time: mean (interval: PT1H comment: sampled instantaneously)')
        ts.add_variable('temperature3', values=values, attributes=attrs)
        ts.ncd.sync()
        urn = urnify('axiom', 'foo', ts.ncd.variables['temperature3'])
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:variance;interval=pt1h'

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(standard_name='lwe_thickness_of_precipitation_amount',
                     cell_methods='time: variance time: mean (interval: PT1H comment: sampled instantaneously)',
                     discriminant='2')
        ts.add_variable('temperature4', values=values, attributes=attrs)
        ts.ncd.sync()
        urn = urnify('axiom', 'foo', ts.ncd.variables['temperature4'])
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount-2#cell_methods=time:mean,time:variance;interval=pt1h'

        ts.close()
Пример #4
0
    def test_instrumnet_metadata_variable(self):
        filename = 'test_timeseries.nc'
        times = [0, 1000, 2000, 3000, 4000, 5000]
        verticals = None

        gats = copy(self.global_attributes)
        gats['naming_authority'] = 'pyaxiom'
        gats['geospatial_bounds_vertical_crs'] = 'NAVD88'

        ts = TimeSeries(output_directory=self.output_directory,
                        latitude=self.latitude,
                        longitude=self.longitude,
                        station_name=self.station_name,
                        global_attributes=gats,
                        output_filename=filename,
                        times=times,
                        verticals=verticals)

        values = [20, 21, 22, 23, 24, 25]
        attrs = dict(standard_name='sea_water_temperature')
        ts.add_variable('temperature', values=values, attributes=attrs, create_instrument_variable=True, sensor_vertical_datum='bar')

        nc = netCDF4.Dataset(os.path.join(self.output_directory, filename))
        assert nc is not None
        assert nc.geospatial_bounds_vertical_crs == 'NAVD88'  # First one set

        datavar = nc.variables.get('temperature')
        instrument_var_name = datavar.instrument
        instvar = nc.variables[instrument_var_name]
        assert instvar.short_name == 'sea_water_temperature'
        assert instvar.ioos_code == urnify(gats['naming_authority'], gats['id'], attrs)
Пример #5
0
    def add_instrument_variable(self, variable_name):
        if variable_name not in self._nc.variables:
            logger.error("Variable {} not found in file, cannot create instrument metadata variable")
            return
        elif 'id' not in self._nc.ncattrs() or 'naming_authority' not in self._nc.ncattrs():
            logger.error("Global attributes 'id' and 'naming_authority' are required to create an instrument variable")
            return

        instr_var_name = "{}_instrument".format(variable_name)
        instrument = self._nc.createVariable(instr_var_name, "i4")

        datavar = self._nc.variables[variable_name]
        vats = { k: getattr(datavar, k) for k in datavar.ncattrs() }
        instrument_urn = urnify(self._nc.naming_authority, self._nc.id, vats)

        inst_urn = IoosUrn.from_string(instrument_urn)
        instrument.long_name = 'Instrument measuring {} from {}'.format(inst_urn.component, inst_urn.label)
        instrument.ioos_code = instrument_urn
        instrument.short_name = inst_urn.component
        instrument.definition = "http://mmisw.org/ont/ioos/definition/sensorID"

        datavar.instrument = instr_var_name

        # Append the instrument to the ancilary variables
        av = getattr(datavar, 'ancillary_variables', '')
        av += ' {}'.format(instr_var_name)
        datavar.ancillary_variables = av.strip()

        self._nc.sync()
Пример #6
0
    def test_from_dict(self):

        d = dict(standard_name='lwe_thickness_of_precipitation_amount')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 vertical_datum='NAVD88')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#vertical_datum=navd88'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 vertical_datum='NAVD88',
                 discriminant='2')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount-2#vertical_datum=navd88'

        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: sum (interval: PT24H) time: mean')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:sum;interval=pt24h'

        # Interval as a dict key (not inline with cell_methods)
        d = dict(standard_name='lwe_thickness_of_precipitation_amount',
                 cell_methods='time: sum time: mean',
                 interval='pt24h')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:sum;interval=pt24h'

        d = dict(
            standard_name='lwe_thickness_of_precipitation_amount',
            cell_methods='time: minimum within years time: mean over years')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean_over_years,time:minimum_within_years'

        d = dict(
            standard_name='lwe_thickness_of_precipitation_amount',
            cell_methods=
            'time: variance (interval: PT1H comment: sampled instantaneously)')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:variance;interval=pt1h'

        d = dict(
            standard_name='lwe_thickness_of_precipitation_amount',
            cell_methods=
            'time: variance time: mean (interval: PT1H comment: sampled instantaneously)'
        )
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:variance;interval=pt1h'

        # Interval specified twice
        d = dict(
            standard_name='lwe_thickness_of_precipitation_amount',
            cell_methods=
            'time: variance time: mean (interval: PT1H comment: sampled instantaneously)',
            interval='PT1H')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount#cell_methods=time:mean,time:variance;interval=pt1h'

        # Interval specified twice
        d = dict(
            standard_name='lwe_thickness_of_precipitation_amount',
            cell_methods=
            'time: variance time: mean (interval: PT1H comment: sampled instantaneously)',
            interval='PT1H',
            discriminant='2')
        urn = urnify('axiom', 'foo', d)
        assert urn == 'urn:ioos:sensor:axiom:foo:lwe_thickness_of_precipitation_amount-2#cell_methods=time:mean,time:variance;interval=pt1h'
Пример #7
0
def main(output, station, datatype):

    if datatype == 'met':
        headers = met_header
        mapping = met_mapping
    elif datatype == 'waves':
        headers = waves_header
        mapping = waves_mapping
    elif datatype == 'currents':
        headers = currents_header
        mapping = currents_mapping

    df = None

    def dp(*args):
        datestr = "".join([str(x) for x in args])
        try:
            return datetime.strptime(datestr, '%Y %m %d %H %M %S')
        except ValueError:
            return np.nan

    datapath = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'data', datatype))
    for csv_file in sorted(os.listdir(datapath)):
        f = os.path.join(datapath, csv_file)
        cf = pd.read_csv(
            f,
            header=None,
            names=headers,
            parse_dates={
                'time': ['year', 'month', 'day', 'hour', 'minute', 'second']
            },
            date_parser=dp)
        cf.dropna(subset=['time'], inplace=True)

        if df is None:
            df = cf
        else:
            df = df.append(cf)

    fillvalue = -9999.9
    # Station metadata
    stat_meta = stations[station]
    station_urn = IoosUrn(asset_type='station',
                          authority=global_attributes['naming_authority'],
                          label=stat_meta['title'])

    for var in df.columns:

        try:
            var_meta = mapping[var]
        except KeyError:
            logger.error(
                "Variable {!s} was not found in variable map!".format(var))
            continue

        sensor_urn = urnify(station_urn.authority, station_urn.label, var_meta)

        gas = copy(global_attributes)
        gas['keywords'] = var_meta['keywords']
        gas['title'] = stat_meta['title']
        gas['description'] = stat_meta['description']

        skip_variable_attributes = [
            'keywords', 'height_above_site', 'depth_below_surface',
            'add_offset'
        ]
        vas = {
            k: v
            for k, v in var_meta.items() if k not in skip_variable_attributes
        }

        if var_meta.get('height_above_site') and stat_meta.get('site_height'):
            # Convert to positive down
            df['depth'] = -1 * (stat_meta['site_height'] +
                                var_meta['height_above_site'])
        else:
            df['depth'] = var_meta.get('depth_below_surface', np.nan)

        if 'add_offset' in var_meta:
            df[var] = df[var] + var_meta['add_offset']

        output_filename = '{}_{}_{}.nc'.format(station_urn.label, datatype,
                                               var_meta['standard_name'])
        ts = TimeSeries.from_dataframe(
            df,
            output,
            output_filename,
            stat_meta['latitude'],
            stat_meta['longitude'],
            station_urn.urn,
            gas,
            var_meta["standard_name"],
            vas,
            sensor_vertical_datum=var_meta.get('vertical_datum'),
            fillvalue=fillvalue,
            data_column=var,
            vertical_axis_name='height',
            vertical_positive='down')
        ts.add_instrument_metadata(urn=sensor_urn)
Пример #8
0
def main(output_format, output, do_download, download_folder, filesubset=None):

    if do_download is True:

        try:
            os.makedirs(download_folder)
        except OSError:
            pass

        waf = 'http://ga.water.usgs.gov/flood/hurricane/sandy/datafiles/'

        r = requests.get(waf)
        soup = BeautifulSoup(r.text, "lxml")

        for link in soup.find_all('a'):

            # Skip non .txt files
            site_id, ext = os.path.splitext(link['href'])
            if ext != ".txt":
                continue

            if filesubset and site_id.lower() not in filesubset:
                # Skip this file!
                continue

            csv_link = waf + link['href']
            logger.info("Downloading '{}'".format(csv_link))
            d = requests.get(csv_link)
            try:
                d.raise_for_status()
            except requests.exceptions.HTTPError:
                logger.error("Could not download: {!s}, skipping. Status code: {!s}".format(csv_link, d.status_code))
                continue

            with open(os.path.join(download_folder, os.path.basename(csv_link)), 'wt') as f:
                f.write(d.text)

    # Yes, this uses lots of RAM, but we need to match up lon/lat positions later on.
    results = []
    for datafile in os.listdir(download_folder):

        site_id = os.path.splitext(os.path.basename(datafile))[0]

        if filesubset and site_id.lower() not in filesubset:
            # Skip this file!
            continue

        with open(os.path.join(download_folder, datafile)) as d:
            contents = d.read()
            r = None
            for line in contents.split("\n"):
                if "agency_cd" in line:
                    r = parse_type_1(output_format, site_id, contents, output)
                    break
                elif "date_time_GMT" in line:
                    r = parse_type_2(output_format, site_id, contents, output)
                    break
                else:
                    continue

            if r is None:
                logger.error('Could not process file: {}'.format(datafile))
            else:
                logger.info("Processed {}".format(datafile))
                results.append(r)

    results = sorted(results, key=attrgetter('lon', 'lat'))
    gresults = groupby(results, attrgetter('lon', 'lat'))

    for (glon, glat), group in gresults:

        groups = [ x for x in list(group) if x ]

        # Strip off the variable type if need be
        gsite = groups[0].site
        if gsite[-2:] in ['WV', 'BP', 'WL']:
            gsite = gsite[:-2]

        for result in groups:

            gas = get_globals(glat, glon, result.z, result.name, gsite)
            station_urn = IoosUrn(asset_type='station',
                                  authority=gas['naming_authority'],
                                  label=gsite)

            if output_format == 'cf16':
                # If CF, a file for each result dataframe
                times = [ calendar.timegm(x.timetuple()) for x in result.df['time'] ]
                verticals = result.df['depth'].values
                output_filename = '{}.nc'.format(result.site)
                ts = TimeSeries(output, latitude=glat, longitude=glon, station_name=gsite, global_attributes=gas, output_filename=output_filename, times=times, verticals=verticals)

            for var in result.df.columns:
                if var in ['date_time_GMT', 'datetime', 'time', 'depth', 'tz_cd', 'site_no', 'agency_cd']:
                    continue

                try:
                    var_meta = copy(variable_map[var])
                except KeyError:
                    logger.error("Variable {!s} was not found in variable map!".format(var))
                    continue

                # Convert to floats
                result.df[var] = result.df[var].map(to_floats)
                if var_meta["units"].lower() in ["feet", "ft"]:
                    result.df[var] = result.df[var].apply(lambda x: None if pd.isnull(x) else x * 0.3048)
                    var_meta["units"] = "meters"
                elif var_meta["units"].lower() in ["psi"]:
                    result.df[var] = result.df[var].apply(lambda x: None if pd.isnull(x) else x * 68.9476)
                    var_meta["units"] = "mbar"
                elif var_meta["units"].lower() in ['millimeters of mercury']:
                    result.df[var] = result.df[var].apply(lambda x: None if pd.isnull(x) else x * 1.33322)
                    var_meta["units"] = "mbar"

                # Now put the fillvalue we want to be interpreted
                result.df.fillna(fillvalue, inplace=True)

                if output_format == 'axiom':
                    # If Axiom, a file for each variable
                    output_directory = os.path.join(output, gsite)
                    output_filename = '{}_{}.nc'.format(result.site, var_meta['standard_name'])
                    ts = TimeSeries.from_dataframe(result.df, output_directory, output_filename, glat, glon, station_urn.urn, gas, var_meta["standard_name"], var_meta, sensor_vertical_datum='NAVD88', fillvalue=fillvalue, data_column=var, vertical_axis_name='height')
                    sensor_urn = urnify(station_urn.authority, station_urn.label, var_meta)
                    ts.add_instrument_metadata(urn=sensor_urn)
                elif output_format == 'cf16':
                    # If CF, add variable to existing TimeSeries
                    try:
                        int(var[0])
                        variable_name = 'v_{}'.format(var)
                    except BaseException:
                        variable_name = var
                    ts.add_variable(variable_name, values=result.df[var].values, attributes=var_meta, fillvalue=fillvalue, sensor_vertical_datum='NAVD88')
Пример #9
0
def main(output_format, output, do_download, download_folder, filesubset=None):

    if do_download is True:

        try:
            os.makedirs(download_folder)
        except OSError:
            pass

        waf = 'http://ga.water.usgs.gov/flood/hurricane/sandy/datafiles/'

        r = requests.get(waf)
        soup = BeautifulSoup(r.text, "lxml")

        for link in soup.find_all('a'):

            # Skip non .txt files
            site_id, ext = os.path.splitext(link['href'])
            if ext != ".txt":
                continue

            if filesubset and site_id.lower() not in filesubset:
                # Skip this file!
                continue

            csv_link = waf + link['href']
            logger.info("Downloading '{}'".format(csv_link))
            d = requests.get(csv_link)
            try:
                d.raise_for_status()
            except requests.exceptions.HTTPError:
                logger.error(
                    "Could not download: {!s}, skipping. Status code: {!s}".
                    format(csv_link, d.status_code))
                continue

            with open(
                    os.path.join(download_folder, os.path.basename(csv_link)),
                    'wt') as f:
                f.write(d.text)

    # Yes, this uses lots of RAM, but we need to match up lon/lat positions later on.
    results = []
    for datafile in os.listdir(download_folder):

        site_id = os.path.splitext(os.path.basename(datafile))[0]

        if filesubset and site_id.lower() not in filesubset:
            # Skip this file!
            continue

        with open(os.path.join(download_folder, datafile)) as d:
            contents = d.read()
            r = None
            for line in contents.split("\n"):
                if "agency_cd" in line:
                    r = parse_type_1(output_format, site_id, contents, output)
                    break
                elif "date_time_GMT" in line:
                    r = parse_type_2(output_format, site_id, contents, output)
                    break
                else:
                    continue

            if r is None:
                logger.error('Could not process file: {}'.format(datafile))
            else:
                logger.info("Processed {}".format(datafile))
                results.append(r)

    results = sorted(results, key=attrgetter('lon', 'lat'))
    gresults = groupby(results, attrgetter('lon', 'lat'))

    for (glon, glat), group in gresults:

        groups = [x for x in list(group) if x]

        # Strip off the variable type if need be
        gsite = groups[0].site
        if gsite[-2:] in ['WV', 'BP', 'WL']:
            gsite = gsite[:-2]

        for result in groups:

            gas = get_globals(glat, glon, result.z, result.name, gsite)
            station_urn = IoosUrn(asset_type='station',
                                  authority=gas['naming_authority'],
                                  label=gsite)

            if output_format == 'cf16':
                # If CF, a file for each result dataframe
                times = [
                    calendar.timegm(x.timetuple()) for x in result.df['time']
                ]
                verticals = result.df['depth'].values
                output_filename = '{}.nc'.format(result.site)
                ts = TimeSeries(output,
                                latitude=glat,
                                longitude=glon,
                                station_name=gsite,
                                global_attributes=gas,
                                output_filename=output_filename,
                                times=times,
                                verticals=verticals)

            for var in result.df.columns:
                if var in [
                        'date_time_GMT', 'datetime', 'time', 'depth', 'tz_cd',
                        'site_no', 'agency_cd'
                ]:
                    continue

                try:
                    var_meta = copy(variable_map[var])
                except KeyError:
                    logger.error(
                        "Variable {!s} was not found in variable map!".format(
                            var))
                    continue

                # Convert to floats
                result.df[var] = result.df[var].map(to_floats)
                if var_meta["units"].lower() in ["feet", "ft"]:
                    result.df[var] = result.df[var].apply(
                        lambda x: None if pd.isnull(x) else x * 0.3048)
                    var_meta["units"] = "meters"
                elif var_meta["units"].lower() in ["psi"]:
                    result.df[var] = result.df[var].apply(
                        lambda x: None if pd.isnull(x) else x * 68.9476)
                    var_meta["units"] = "mbar"
                elif var_meta["units"].lower() in ['millimeters of mercury']:
                    result.df[var] = result.df[var].apply(
                        lambda x: None if pd.isnull(x) else x * 1.33322)
                    var_meta["units"] = "mbar"

                # Now put the fillvalue we want to be interpreted
                result.df.fillna(fillvalue, inplace=True)

                if output_format == 'axiom':
                    # If Axiom, a file for each variable
                    output_directory = os.path.join(output, gsite)
                    output_filename = '{}_{}.nc'.format(
                        result.site, var_meta['standard_name'])
                    ts = TimeSeries.from_dataframe(
                        result.df,
                        output_directory,
                        output_filename,
                        glat,
                        glon,
                        station_urn.urn,
                        gas,
                        var_meta["standard_name"],
                        var_meta,
                        sensor_vertical_datum='NAVD88',
                        fillvalue=fillvalue,
                        data_column=var,
                        vertical_axis_name='height')
                    sensor_urn = urnify(station_urn.authority,
                                        station_urn.label, var_meta)
                    ts.add_instrument_metadata(urn=sensor_urn)
                elif output_format == 'cf16':
                    # If CF, add variable to existing TimeSeries
                    try:
                        int(var[0])
                        variable_name = 'v_{}'.format(var)
                    except BaseException:
                        variable_name = var
                    ts.add_variable(variable_name,
                                    values=result.df[var].values,
                                    attributes=var_meta,
                                    fillvalue=fillvalue,
                                    sensor_vertical_datum='NAVD88')