Ejemplo n.º 1
0
def read(dfc, fileobj):
    dfile = DataFile()
    retval = frcsv.read(dfile, fileobj, 'ctd')
    split_dfc = split_on_cast(dfile)
    dfc.files = split_dfc.files

    # Convert header columns to globals
    global_headers = [
        'EXPOCODE', 'STNNBR', 'CASTNO', '_DATETIME', 'LATITUDE', 'LONGITUDE',
        'DEPTH'
    ]
    for dfile in dfc.files:
        for header in global_headers:
            value = dfile[header][0]
            if type(value) == int:
                value = str(value)
            dfile.globals[header] = value
            del dfile[header]

        # Arbitrarily set SECT_ID to blank
        dfile.globals['SECT_ID'] = ''

        # Take largest depth value and set as bottom depth
        try:
            depth = max(dfile['DEPTH'])
        except KeyError:
            depth = depth_unesco(max(dfile['CTDPRS']),
                                 dfile.globals['LATITUDE'])
        dfile.globals['DEPTH'] = ceil(depth)

    return retval
Ejemplo n.º 2
0
def read(self, fileobj):
    """How to read an LDEO ASEP file."""
    line1 = _getline(fileobj)

    dtype_shipcode, stn, cast, lat, lon, date, yday, time, cruise_id = \
        line1.split()

    dtype = dtype_shipcode[0]

    if not is_datatype_ctd(dtype):
        log.error(u'Unable to read non-CTD ASEP files at the moment.')
        return

    shipcode = dtype_shipcode[1:]
    # FIXME this is not really the EXPOCODE
    self.globals['EXPOCODE'] = cruise_id
    # FIXME this is not really the SECT_ID
    self.globals['SECT_ID'] = cruise_id
    self.globals['STNNBR'] = str(int(stn))
    self.globals['CASTNO'] = cast
    self.globals['LATITUDE'] = lat
    self.globals['LONGITUDE'] = lon
    self.globals['_DATETIME'] = datetime.strptime(date + time, '%Y/%m/%d%H:%M')
    self.globals['header'] = '#' + cruise_id

    line2 = _getline(fileobj)
    while line2[0] != '&':
        log.warn(u'Ignoring line not preceded by &: {0!r}'.format(line2))
        line2 = _getline(fileobj)

    self.globals['header'] += "\n#" + line2 + "\n"

    line3 = _getline(fileobj)
    while line3[0] != '@':
        log.warn(u'Ignoring line not preceded by @: {0!r}'.format(line2))
        line3 = _getline(fileobj)

    param_keys = line3[1:].split()
    parameters = [CTD_PARAM_MAP.get(key, None) for key in param_keys]
    cols = self.create_columns(parameters)
    for line in fileobj:
        for col, val in zip(cols, line.split()):
            if val == '-9':
                val = None
            col.append(_decimal(val))

    # rewrite every data column to be the same sigfigs
    for col in self.columns.values():
        decplaces = col.decimal_places()
        col.values = [pad_decimal(val, decplaces) for val in col.values]

    if 'pr' in param_keys:
        pressures = cols[param_keys.index('pr')].values
        lat = _decimal(self.globals['LATITUDE'])
        depth = int(depth_unesco(pressures[-1], lat))
        self.globals['DEPTH'] = depth

    self.check_and_replace_parameters()
Ejemplo n.º 3
0
def read(self, handle):
    """How to read a CTD WOCE Egee file."""
    # Egee1
    egee = handle.readline().strip()
    # STRNBR CASTNO NO RECORDS
    line_cast_id = handle.readline()
    match_cast_id = re.match('\s*STRNBR(.*)CASTNO(.*)NO.RECORDS(.*)',
                             line_cast_id)
    try:
        cast_id = [xxx.strip() for xxx in match_cast_id.groups()]
        self.globals['STNNBR'] = cast_id[0]
        self.globals['CASTNO'] = cast_id[1]
        num_records = int(cast_id[2])
    except AttributeError:
        log.error(u'Unable to read station cast and number of records.')
    except TypeError:
        log.warn(u'Unable to determine number of data records.')
    # DATE
    line_date = handle.readline()
    try:
        line_date = line_date.split(':', 1)[1].strip()
        dtime = datetime.strptime(line_date, '%b %d %Y %H:%M:%S')
        self.globals['_DATETIME'] = dtime
    except IndexError:
        log.warn(u'Unable to determine date.')
    # blank
    handle.readline()
    # LATITUDE LONGITUDE
    line_coord = handle.readline()
    match_coord = re.match('\s*LATITUDE:(.*)LONGITUDE:(.*)', line_coord)
    try:
        coord = [xxx.strip() for xxx in match_coord.groups()]
        lat_coords = coord[0].split()
        lng_coords = coord[1].split()
        self.globals['LATITUDE'] = woce.woce_lat_to_dec_lat(lat_coords)
        self.globals['LONGITUDE'] = woce.woce_lng_to_dec_lng(lng_coords)
    except AttributeError:
        log.error(u'Unable to read coordinates')

    parameters_line = handle.readline()
    units_line = handle.readline()
    asterisk_line = handle.readline()

    woce.read_data_egee(self, handle, parameters_line, units_line,
                        asterisk_line)

    self.check_and_replace_parameters()

    self.globals['EXPOCODE'] = egee
    self.globals['SECT_ID'] = 'EGEE'
    self.globals['DEPTH'] = [
        int(depth_unesco(vvv, self.globals['LATITUDE']) or 0)
        for vvv in self['CTDPRS'].values
    ][-1]
Ejemplo n.º 4
0
 def test_depth_unesco(self):
     print depth.depth_unesco(1, 0)
Ejemplo n.º 5
0
                      'algorithm probably oscillates.'.format(lat))
            raise err
        try:
            sal_tmp_pres = zip(salt.values, temp.values, pres.values)
            density_series = [depth.density(*args) for args in sal_tmp_pres]
            if None in density_series:
                raise ValueError(
                    u'Cannot perform depth integration with missing data points'
                )
            depths = depth.depth(localgrav, pres.values, density_series)
            return ('sverdrup', depths)
        except (AttributeError, IndexError, ValueError):
            pass
        try:
            log.info(u'Falling back from depth integration to Unesco method.')
            depths = [depth.depth_unesco(pres, lat) for pres in pres.values]
            return ('unesco1983', depths)
        except AttributeError:
            raise ValueError(
                u'Cannot convert non-existant pressures to depths.')


class DataFileCollection(object):
    """Stores a collection of DataFiles

       A DataFileCollection represents data files that are actually a
       collection of many sub files (e.g. Exchange CTD files).
    """
    def __init__(self, allow_contrived=False):
        self.files = []
        self.allow_contrived = allow_contrived