Пример #1
0
    def __call__(self, environ, start_response):
        path_info = environ.get('PATH_INFO') or environ.get('SCRIPT_NAME', '')
        path_info = path_info.lstrip('/')
        path, type_ = path_info.rsplit('.', 1)
        # For DAS, we need to clear the constraint expression to get the
        # full dataset.
        if type_ == 'das': environ['QUERY_STRING'] = ''

        # Add some vars to the environment.
        environ['pydap.path'] = path
        environ['pydap.response'] = type_
        environ['pydap.ce'] = parse_qs(environ.get('QUERY_STRING', ''))
        environ['pydap.logger'] = logging.getLogger('pydap')
        environ['pydap.headers'] = []  # additional headers

        try:
            # Build the dataset using the proper subclass method and
            # pass it to a response (DAS/DDS/etc.) builder.
            dataset = self.parse_constraints(environ)
            response = self.response_map[type_]
            responder = response(dataset)
            return responder(environ, start_response)
        except HTTPException, exc:
            # Some responses (like HTML) raise an HTTPRedirect when the
            # form is posted, so we need this here.
            return exc(environ, start_response)
Пример #2
0
    def parse_constraints(self, environ):
        dataset = copy.deepcopy(self.dataset)

        projection, selection = parse_qs(environ.get('QUERY_STRING', ''))
        list_of_var = []
        if projection:
            for var in projection:
                var_name = var[len(var)-1][0]
                if var_name in list_of_var:
                    continue
                list_of_var.append(var_name)
                if var_name in ['lat', 'lon']:
                    if var_name == 'lon':
                        dataset[var_name] = BaseType(name=var_name,
                                                  data=self.lons,
                                                  shape=self.lons.shape,
                                                  dimensions=('lon',),
                                                  type=self.lons.dtype.char)
                    elif var_name == 'lat':
                        dataset[var_name] = BaseType(name=var_name,
                                                  data=self.lats,
                                                  shape=self.lats.shape,
                                                  dimensions=('lat',),
                                                  type=self.lats.dtype.char)
                else:
                    data = self.get_data_for_parameter(var_name, None)
                    dataset[var_name] = BaseType(name=var_name,
                                                 data=data,
                                                 shape=data.shape,
                                                 dimensions=('lat', 'lon'),
                                                 type=data.dtype.char)

        return constrain(dataset, environ.get('QUERY_STRING', ''))
Пример #3
0
    def parse_constraints(self, environ):
        dataset = copy.deepcopy(self.dataset)

        projection, selection = parse_qs(environ.get('QUERY_STRING', ''))
        list_of_var = []
        if projection:
            for var in projection:
                var_name = var[len(var) - 1][0]
                if var_name in list_of_var:
                    continue
                list_of_var.append(var_name)
                if var_name in ['lat', 'lon']:
                    if var_name == 'lon':
                        dataset[var_name] = BaseType(name=var_name,
                                                     data=self.lons,
                                                     shape=self.lons.shape,
                                                     dimensions=('lon', ),
                                                     type=self.lons.dtype.char)
                    elif var_name == 'lat':
                        dataset[var_name] = BaseType(name=var_name,
                                                     data=self.lats,
                                                     shape=self.lats.shape,
                                                     dimensions=('lat', ),
                                                     type=self.lats.dtype.char)
                else:
                    data = self.get_data_for_parameter(var_name, None)
                    dataset[var_name] = BaseType(name=var_name,
                                                 data=data,
                                                 shape=data.shape,
                                                 dimensions=('lat', 'lon'),
                                                 type=data.dtype.char)

        return constrain(dataset, environ.get('QUERY_STRING', ''))
Пример #4
0
    def __call__(self, environ, start_response):
        path_info = environ.get('PATH_INFO') or environ.get('SCRIPT_NAME', '')
        path_info = path_info.lstrip('/')
        path, type_ = path_info.rsplit('.', 1)
        # For DAS, we need to clear the constraint expression to get the
        # full dataset.
        if type_ == 'das': environ['QUERY_STRING'] = ''

        # Add some vars to the environment.
        environ['pydap.path'] = path
        environ['pydap.response'] = type_
        environ['pydap.ce'] = parse_qs(environ.get('QUERY_STRING', ''))
        environ['pydap.logger'] = logging.getLogger('pydap')
        environ['pydap.headers'] = []  # additional headers
        
        try:
            # Build the dataset using the proper subclass method and
            # pass it to a response (DAS/DDS/etc.) builder.
            dataset = self.parse_constraints(environ)
            response = self.response_map[type_]
            responder = response(dataset)
            return responder(environ, start_response)
        except HTTPException, exc:
            # Some responses (like HTML) raise an HTTPRedirect when the
            # form is posted, so we need this here.
            return exc(environ, start_response)
Пример #5
0
    def parse_constraints(self, environ):
        projection, selection = parse_qs(environ.get('QUERY_STRING', ''))
        if projection:
            try:
                if self.filepath.endswith('gz'):
                    file = gzip.open(self.filepath, 'rb')
                else:
                    file = open(self.filepath, "rb")
                bytes_read = np.frombuffer(file.read(), np.uint8)
                file.close()
            except Exception, exc:
                message = 'Unable to open file %s: %s' % (self.filepath, exc)
                raise OpenFileError(message)

            for var in projection:
                var_name = var[len(var)-1][0]
                slices = var[len(var)-1][1]

                if var_name in ['lat', 'lon', 'part_of_day']:
                    if var_name == 'lon':
                        if len(slices):
                            lon_slice = slices[0]
                        else:
                            lon_slice = slice(0, 1440, 1)
                        self.dataset['lon'].data = self.read_variable_lon(lon_slice)
                    elif var_name == 'lat':
                        if len(slices):
                            lat_slice = slices[0]
                        else:
                            lat_slice = slice(0, 720, 1)
                        self.dataset['lat'].data = self.read_variable_lat(lat_slice)
                    elif var_name == 'part_of_day':
                        if len(slices):
                            part_slice = slices[0]
                        else:
                            part_slice = slice(0, 2, 1)
                        self.dataset['part_of_day'].data = self.read_variable_part(part_slice)
                else:
                    for variable in self.variables:
                        if variable.name == var_name:
                            slices = var[len(var)-1][1]
                            if len(slices) != 3:
                                slices = [slice(0, 1440, 1), slice(0, 720, 1), slice(0, 2, 1)]
                                # raise ValueError('Cannot obtain slices for %s. '
                                #                  'Should be 3 slices, but %d found' % (var_name, len(slices)))
                            print 'retrieving %s' % var_name, slices
                            index = 0
                            for i in range(len(self.variables)):
                                if self.variables[i].name == variable.name:
                                    index = i

                            self.dataset[variable.name]['lon'].data = self.read_variable_lon(slices[0])
                            self.dataset[variable.name]['lat'].data = self.read_variable_lat(slices[1])
                            self.dataset[variable.name]['part_of_day'].data = self.read_variable_part(slices[2])

                            self.dataset[variable.name][variable.name].data = self.read_variable_data(bytes_read, index, slices)
Пример #6
0
def open_url(url):
    """
    Open a given dataset URL, trying different response methods. 

    The function checks the stub DDX method, and falls back to the
    DDS+DAS responses. It can be easily extended for other representations
    like JSON.

    The URL should point to the dataset, omitting any response extensions
    like ``.dds``. Username and password can be passed in the URL like::

        http://user:[email protected]:port/path

    They will be transmitted as plaintext if the server supports only
    Basic authentication, so be careful. For Digest authentication this
    is safe.

    The URL can point directly to an Opendap dataset, or it can contain
    any number of contraint expressions (selection/projections)::

        http://example.com/dataset?var1,var2&var3>10

    You can also specify a cache directory, a timeout and a proxy using
    the global variables from ``pydap.lib``::

        >>> import pydap.lib
        >>> pydap.lib.TIMEOUT = 60  # seconds
        >>> pydap.lib.CACHE = '.cache'
        >>> import httplib2
        >>> from pydap.util import socks
        >>> pydap.lib.PROXY = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, 'localhost', 8000)

    """
    for response in [_ddx, _ddsdas]:
        dataset = response(url)
        if dataset: break
    else:
        raise ClientError("Unable to open dataset.")

    # Remove any projections from the url, leaving selections.
    scheme, netloc, path, query, fragment = urlsplit(url)
    projection, selection = parse_qs(query)
    url = urlunsplit(
            (scheme, netloc, path, '&'.join(selection), fragment))

    # Set data to a Proxy object for BaseType and SequenceType. These
    # variables can then be sliced to retrieve the data on-the-fly.
    for var in walk(dataset, BaseType):
        var.data = ArrayProxy(var.id, url, var.shape)
    for var in walk(dataset, SequenceType):
        var.data = SequenceProxy(var.id, url)

    # Set server-side functions.
    dataset.functions = Functions(url)

    # Apply the corresponding slices.
    projection = fix_shn(projection, dataset)
    for var in projection:
        target = dataset
        while var:
            token, slice_ = var.pop(0)
            target = target[token]
            if slice_ and isinstance(target.data, VariableProxy):
                shape = getattr(target, 'shape', (sys.maxint,))
                target.data._slice = fix_slice(slice_, shape)

    return dataset
Пример #7
0
def constrain(dataset, ce):
    """
    A constraint expression applier.

        >>> dataset = DatasetType(name='test')
        >>> dataset['seq'] = SequenceType(name='seq')
        >>> dataset['seq']['index'] = BaseType(name='index', type=Int32)
        >>> dataset['seq']['temperature'] = BaseType(name='temperature', type=Float32)
        >>> dataset['seq']['site'] = BaseType(name='site', type=String)

        >>> dataset['seq'].data = [
        ...         (10, 17.2, 'Diamont_St'),
        ...         (11, 15.1, 'Blacktail_Loop'),
        ...         (12, 15.3, 'Platinum_St'),
        ...         (13, 15.1, 'Kodiak_Trail')]
        >>> for struct_ in dataset.seq:
        ...     print struct_.data
        (10, 17.2, 'Diamont_St')
        (11, 15.1, 'Blacktail_Loop')
        (12, 15.3, 'Platinum_St')
        (13, 15.1, 'Kodiak_Trail')

        >>> dataset2 = constrain(dataset, 'seq.index>11')
        >>> for struct_ in dataset2.seq:
        ...     print struct_.data
        (12, 15.3, 'Platinum_St')
        (13, 15.1, 'Kodiak_Trail')
        >>> dataset2 = constrain(dataset, 'seq.index>11&seq.temperature<15.2')
        >>> for struct_ in dataset2.seq:
        ...     print struct_.data
        (13, 15.1, 'Kodiak_Trail')

        >>> dataset.clear()
        >>> dataset['casts'] = SequenceType(name='casts')
        >>> dataset['casts']['lat'] = BaseType(name='lat', type=Float32)
        >>> dataset['casts']['lon'] = BaseType(name='lon', type=Float32)
        >>> dataset['casts']['time'] = BaseType(name='time', type=Float64)
        >>> dataset['casts']['profile'] = SequenceType(name='profile')
        >>> dataset['casts']['profile']['t'] = BaseType(name='t', type=Float32)
        >>> dataset['casts']['profile']['s'] = BaseType(name='s', type=Float32)
        >>> dataset['casts']['profile']['p'] = BaseType(name='p', type=Float32)
        >>> dataset['casts'].data = [
        ...         (-10.0, 290.0, 1.0, [(21.0, 35.0, 100.0), (20.5, 34.9, 200.0), (19.0, 33.0, 300.0)]),
        ...         (-11.0, 295.0, 2.0, [(22.0, 35.5, 100.0), (21.0, 35.4, 200.0), (20.0, 33.5, 300.0), (19.0, 33.0, 500.0)])]
        >>> dataset2 = constrain(dataset, 'casts.lat>-11')
        >>> for struct_ in dataset2.casts:
        ...     print struct_.data
        (-10.0, 290.0, 1.0, array([[21.0, 35.0, 100.0],
               [20.5, 34.9, 200.0],
               [19.0, 33.0, 300.0]], dtype=object))

    Filtering is guaranteed to work only in outer sequences, but not in inner
    sequences like this::

        >>> dataset2 = constrain(dataset, 'casts.profile.p>100')
        >>> for struct_ in dataset2.casts:
        ...     print struct_.data
        (-10.0, 290.0, 1.0, array([[21.0, 35.0, 100.0],
               [20.5, 34.9, 200.0],
               [19.0, 33.0, 300.0]], dtype=object))
        (-11.0, 295.0, 2.0, array([[22.0, 35.5, 100.0],
               [21.0, 35.4, 200.0],
               [20.0, 33.5, 300.0],
               [19.0, 33.0, 500.0]], dtype=object))
        
    Instead, inner sequences have to be filtered inside a loop::

        >>> for struct_ in dataset.casts:
        ...     for profile in struct_.profile[ struct_.profile.p > 100 ]:
        ...         print profile.data
        (20.5, 34.9, 200.0)
        (19.0, 33.0, 300.0)
        (21.0, 35.4, 200.0)
        (20.0, 33.5, 300.0)
        (19.0, 33.0, 500.0)

    """
    projection, selection = parse_qs(ce)
    projection = projection or [[(key, ())] for key in dataset.keys()]
    projection = fix_shn(projection, dataset)

    # Make a copy of the dataset.
    filtered = copy.deepcopy(dataset)

    # Filter sequences.
    for seq in walk(filtered, SequenceType):
        if seq._nesting_level == 1:
            filter_ = []
            # Check only selections that apply to the direct children of this sequence
            # (ie, skip children from nested sequences).
            for cond in [
                cond for cond in selection if re.match("%s\.[^\.]+(<=|<|>=|>|=|!=)" % re.escape(seq.id), cond)
            ]:
                id_, op, other = parse_selection(cond, dataset)
                filter_.append(op(id_, other))
            if filter_:
                seq.data = seq[reduce(lambda c1, c2: c1 & c2, filter_)].data

    # Create a new empty dataset to build it up.
    new_ = DatasetType(name=filtered.name, attributes=filtered.attributes.copy())

    for var in projection:
        target, template = new_, filtered
        while var:
            name, slice_ = var.pop(0)
            candidate = copy.deepcopy(template[name])
            if slice_:
                if isinstance(candidate, SequenceType):
                    candidate = candidate[slice_[0]]
                elif isinstance(candidate, BaseType):
                    candidate.data = candidate[slice_]
                    candidate.shape = candidate.data.shape
                else:
                    candidate = candidate[slice_]

            if isinstance(candidate, StructureType):
                if var:
                    # Convert degenerate grids into structures.
                    if isinstance(candidate, GridType):
                        candidate.__class__ = StructureType
                    candidate.clear()
                if name not in target or not var:
                    target[name] = candidate
                target, template = target[name], template[name]
            else:
                target[name] = candidate

    return new_
Пример #8
0
def constrain(dataset, ce):
    """
    A constraint expression applier.

        >>> dataset = DatasetType(name='test')
        >>> dataset['seq'] = SequenceType(name='seq')
        >>> dataset['seq']['index'] = BaseType(name='index', type=Int32)
        >>> dataset['seq']['temperature'] = BaseType(name='temperature', type=Float32)
        >>> dataset['seq']['site'] = BaseType(name='site', type=String)

        >>> dataset['seq'].data = [
        ...         (10, 17.2, 'Diamont_St'),
        ...         (11, 15.1, 'Blacktail_Loop'),
        ...         (12, 15.3, 'Platinum_St'),
        ...         (13, 15.1, 'Kodiak_Trail')]
        >>> for struct_ in dataset.seq:
        ...     print struct_.data
        (10, 17.2, 'Diamont_St')
        (11, 15.1, 'Blacktail_Loop')
        (12, 15.3, 'Platinum_St')
        (13, 15.1, 'Kodiak_Trail')

        >>> dataset2 = constrain(dataset, 'seq.index>11')
        >>> for struct_ in dataset2.seq:
        ...     print struct_.data
        (12, 15.3, 'Platinum_St')
        (13, 15.1, 'Kodiak_Trail')
        >>> dataset2 = constrain(dataset, 'seq.index>11&seq.temperature<15.2')
        >>> for struct_ in dataset2.seq:
        ...     print struct_.data
        (13, 15.1, 'Kodiak_Trail')

        >>> dataset.clear()
        >>> dataset['casts'] = SequenceType(name='casts')
        >>> dataset['casts']['lat'] = BaseType(name='lat', type=Float32)
        >>> dataset['casts']['lon'] = BaseType(name='lon', type=Float32)
        >>> dataset['casts']['time'] = BaseType(name='time', type=Float64)
        >>> dataset['casts']['profile'] = SequenceType(name='profile')
        >>> dataset['casts']['profile']['t'] = BaseType(name='t', type=Float32)
        >>> dataset['casts']['profile']['s'] = BaseType(name='s', type=Float32)
        >>> dataset['casts']['profile']['p'] = BaseType(name='p', type=Float32)
        >>> dataset['casts'].data = [
        ...         (-10.0, 290.0, 1.0, [(21.0, 35.0, 100.0), (20.5, 34.9, 200.0), (19.0, 33.0, 300.0)]),
        ...         (-11.0, 295.0, 2.0, [(22.0, 35.5, 100.0), (21.0, 35.4, 200.0), (20.0, 33.5, 300.0), (19.0, 33.0, 500.0)])]
        >>> dataset2 = constrain(dataset, 'casts.lat>-11')
        >>> for struct_ in dataset2.casts:
        ...     print struct_.data
        (-10.0, 290.0, 1.0, array([[21.0, 35.0, 100.0],
               [20.5, 34.9, 200.0],
               [19.0, 33.0, 300.0]], dtype=object))

    Filtering is guaranteed to work only in outer sequences, but not in inner
    sequences like this::

        >>> dataset2 = constrain(dataset, 'casts.profile.p>100')
        >>> for struct_ in dataset2.casts:
        ...     print struct_.data
        (-10.0, 290.0, 1.0, array([[21.0, 35.0, 100.0],
               [20.5, 34.9, 200.0],
               [19.0, 33.0, 300.0]], dtype=object))
        (-11.0, 295.0, 2.0, array([[22.0, 35.5, 100.0],
               [21.0, 35.4, 200.0],
               [20.0, 33.5, 300.0],
               [19.0, 33.0, 500.0]], dtype=object))
        
    Instead, inner sequences have to be filtered inside a loop::

        >>> for struct_ in dataset.casts:
        ...     for profile in struct_.profile[ struct_.profile.p > 100 ]:
        ...         print profile.data
        (20.5, 34.9, 200.0)
        (19.0, 33.0, 300.0)
        (21.0, 35.4, 200.0)
        (20.0, 33.5, 300.0)
        (19.0, 33.0, 500.0)

    """
    projection, selection = parse_qs(ce)
    projection = projection or [[(key, ())] for key in dataset.keys()]
    projection = fix_shn(projection, dataset)

    # Make a copy of the dataset.
    filtered = copy.deepcopy(dataset)

    # Filter sequences.
    for seq in walk(filtered, SequenceType):
        if seq._nesting_level == 1:
            filter_ = []
            # Check only selections that apply to the direct children of this sequence
            # (ie, skip children from nested sequences).
            for cond in [
                    cond for cond in selection if re.match(
                        '%s\.[^\.]+(<=|<|>=|>|=|!=)' % re.escape(seq.id), cond)
            ]:
                id_, op, other = parse_selection(cond, dataset)
                filter_.append(op(id_, other))
            if filter_:
                seq.data = seq[reduce(lambda c1, c2: c1 & c2, filter_)].data

    # Create a new empty dataset to build it up.
    new_ = DatasetType(name=filtered.name,
                       attributes=filtered.attributes.copy())

    for var in projection:
        target, template = new_, filtered
        while var:
            name, slice_ = var.pop(0)
            candidate = copy.deepcopy(template[name])
            if slice_:
                if isinstance(candidate, SequenceType):
                    candidate = candidate[slice_[0]]
                elif isinstance(candidate, BaseType):
                    candidate.data = candidate[slice_]
                    candidate.shape = candidate.data.shape
                else:
                    candidate = candidate[slice_]

            if isinstance(candidate, StructureType):
                if var:
                    # Convert degenerate grids into structures.
                    if isinstance(candidate, GridType):
                        candidate.__class__ = StructureType
                    candidate.clear()
                if name not in target or not var:
                    target[name] = candidate
                target, template = target[name], template[name]
            else:
                target[name] = candidate

    return new_