def __init__(self, filepath): BaseHandler.__init__(self) try: self.fp = h5py.File(filepath, 'r') except Exception, exc: message = 'Unable to open file %s: %s' % (filepath, exc) raise OpenFileError(message)
def __init__(self, filepath): BaseHandler.__init__(self) try: self.fp = netcdf_file(filepath) except Exception, exc: message = "Unable to open file %s: %s" % (filepath, exc) raise OpenFileError(message)
def __init__(self, filepath): BaseHandler.__init__(self) try: with open(filepath, 'Ur') as fp: reader = csv.reader(fp, quoting=csv.QUOTE_NONNUMERIC) vars = reader.next() except Exception, exc: message = 'Unable to open file {filepath}: {exc}'.format(filepath=filepath, exc=exc) raise OpenFileError(message)
def __init__(self, filepath): BaseHandler.__init__(self) try: with open(filepath, 'Ur') as fp: reader = csv.reader(fp, quoting=csv.QUOTE_NONNUMERIC) vars = reader.next() except Exception, exc: message = 'Unable to open file {filepath}: {exc}'.format( filepath=filepath, exc=exc) raise OpenFileError(message)
def __init__(self, filepath): """Prepare dataset.""" BaseHandler.__init__(self) # open the YAML file and parse configuration try: with open(filepath, 'Ur') as fp: fp = open(filepath, 'Ur') config = yaml.load(fp) except Exception, exc: message = 'Unable to open file {filepath}: {exc}'.format( filepath=filepath, exc=exc) raise OpenFileError(message)
def setUp(self): """Create a WSGI app""" self.app = BaseHandler(SimpleSequence) self.local = SimpleSequence.cast.data dataset = DAPHandler("http://localhost:8001/", self.app).dataset self.remote = dataset.cast.data
def test_no_ce(self): data = np.rec.fromrecords(D1.Drifters.data.tolist(), names=D1.Drifters.keys()) projection, selection = parse_ce('') dataset = BaseHandler(D1).parse(projection, selection) np.testing.assert_array_equal(data, dataset.Drifters.data)
def setUp(self): # create dataset self.dataset = DatasetType("test") self.dataset["foo["] = BaseType("foo[", np.array(1)) # create WSGI app self.app = BaseHandler(self.dataset)
def setUp(self): # create dataset dataset = DatasetType("test") dataset["a.b"] = BaseType("a.b", np.array(1)) # create WSGI app self.app = BaseHandler(dataset)
def test_body(self): """Test the generated DAS response.""" app = App(BaseHandler(SimpleStructure)) res = app.get('/.das') self.assertEqual( res.text, """Attributes { types { String key "value"; nested { String string "bar"; Int32 list 42, 43; Int32 array 1; Float64 float 1000; } b { } i32 { } ui32 { } i16 { } ui16 { } f32 { } f64 { } s { } u { } } } """)
def setUp(self): """Create a WSGI app""" self.app = BaseHandler(SimpleArray) self.data = BaseProxy("http://localhost:8001/", "byte", np.dtype("b"), (5, ), application=self.app)
def __init__(self, filepath): BaseHandler.__init__(self) self.filepath = filepath try: with netcdf_file(self.filepath, 'r') as source: self.additional_headers.append(('Last-modified', (formatdate( time.mktime(time.localtime( os.stat(filepath)[ST_MTIME])))))) # shortcuts vars = source.variables dims = source.dimensions # build dataset name = os.path.split(filepath)[1] self.dataset = DatasetType( name, attributes=dict(NC_GLOBAL=attrs(source))) for dim in dims: if dims[dim] is None: self.dataset.attributes['DODS_EXTRA'] = { 'Unlimited_Dimension': dim, } break # add grids grids = [var for var in vars if var not in dims] for grid in grids: self.dataset[grid] = GridType(grid, attrs(vars[grid])) # add array self.dataset[grid][grid] = BaseType( grid, LazyVariable(source, grid, grid, self.filepath), vars[grid].dimensions, attrs(vars[grid])) # add maps for dim in vars[grid].dimensions: self.dataset[grid][dim] = BaseType( dim, vars[dim][:], None, attrs(vars[dim])) # add dims for dim in dims: self.dataset[dim] = BaseType(dim, vars[dim][:], None, attrs(vars[dim])) except Exception as exc: raise message = 'Unable to open file %s: %s' % (filepath, exc) raise OpenFileError(message)
def setUp(self): """Create a WSGI app with array data""" self.app = BaseHandler(SimpleArray) self.data = BaseProxy("http://localhost:8001/", "short", np.dtype(">i"), (), application=self.app)
def __init__(self, filepath): BaseHandler.__init__(self) if filepath is None: self.config = {} self.dataset = None else: try: with open(filepath, 'r') as file: config = yaml.load(file, Loader=yaml.FullLoader) except Exception as exc: raise OpenFileError( 'Unable to open file {filepath}: {exc}'.format( filepath=filepath, exc=exc)) self.config = config self.dataset = dataset_model(config)
def test_filtering(self): data = np.rec.fromrecords(D1.Drifters.data.tolist(), names=D1.Drifters.keys()) filtered = data[data['longitude'] < 999] projection, selection = parse_ce('Drifters.longitude<999') dataset = BaseHandler(D1).parse(projection, selection) np.testing.assert_array_equal(filtered, dataset.Drifters.data)
def setUp(self): """Create a WSGI app with array data""" dataset = DatasetType("test") self.original_data = np.array([["This ", "is "], ["a ", "test"]], dtype='<U5') dataset["s"] = BaseType("s", self.original_data) self.app = BaseHandler(dataset) self.data = DAPHandler("http://localhost:8001/", self.app).dataset.s
def setUp(self): """Create a WSGI app with array data""" dataset = DatasetType("test") dataset["s"] = BaseType("s", np.array(["one", "two", "three"])) self.app = BaseHandler(dataset) self.data = BaseProxy( "http://localhost:8001/", "s", np.dtype("|S5"), (3,), application=self.app)
def __init__(self, filepath): """ Prepare dataset. The `__init__` method of handlers is responsible for preparing the dataset for incoming requests. """ BaseHandler.__init__(self) # open the YAML file and parse configuration try: with open(filepath, 'Ur') as fp: fp = open(filepath, 'Ur') config = yaml.load(fp) except Exception, exc: message = 'Unable to open file {filepath}: {exc}'.format(filepath=filepath, exc=exc) raise OpenFileError(message)
def test_environ_loader_with_template(self): """Test that global environment is used.""" loader = DictLoader({'html.html': 'global'}) env = Environment(loader=loader) app = BaseHandler(VerySimpleSequence) req = Request.blank('/.html') req.environ["pydap.jinja2.environment"] = env res = req.get_response(app) self.assertEqual(res.text, "global")
def setUp(self): """Create a WSGI app with array data""" dataset = DatasetType("test") data = np.array("This is a test", dtype='S') dataset["s"] = BaseType("s", data) self.app = BaseHandler(dataset) self.data = BaseProxy( "http://localhost:8001/", "s", np.dtype("|S14"), (), application=self.app)
def test_environ_loader_without_template(self): """Test that global environment is used.""" env = Environment() self.assertIsNone(env.loader) app = BaseHandler(VerySimpleSequence) req = Request.blank('/.html') req.environ["pydap.jinja2.environment"] = env res = req.get_response(app) self.assertNotEqual(res.text, "global")
def ssf_app(): """Test the local implementation of server-side functions. Calling server-side functions is implemented using a lazy mechanism where arbitrary names are mapped to remove calls. The resulting dataset is only evaluated when ``__getitem__`` or ``__getattr__`` are called, allowing nested calls to be evaluated only once. """ from pydap.wsgi.ssf import ServerSideFunctions return ServerSideFunctions(BaseHandler(SimpleGrid))
def test_projection(self): """Test a simple function call on a projection.""" app = TestApp(ServerSideFunctions(BaseHandler(SimpleGrid))) res = app.get("/.asc?mean(x)") self.assertEqual(res.text, """Dataset { Float64 x; } SimpleGrid; --------------------------------------------- x 1 """)
def test_nested_projection(self): """Test a nested function call.""" app = TestApp(ServerSideFunctions(BaseHandler(SimpleGrid))) res = app.get("/.asc?mean(mean(SimpleGrid.SimpleGrid,0),0)") self.assertEqual(res.text, """Dataset { Float64 SimpleGrid; } SimpleGrid; --------------------------------------------- SimpleGrid 2.5 """)
def test_das(self): """Test that DAS requests are ignored.""" # create a simple app app = App(ServerSideFunctions(BaseHandler(SimpleSequence))) # test a DAS response with a non-existing function app.get("/.das?non_existing_function(sequence)") # now test a DDS response with self.assertRaises(KeyError): app.get("/.dds?non_existing_function(sequence)")
def test_open(sequence_type_data): """Test that LocalTestServerSSL works properly""" TestDataset = DatasetType('Test') TestDataset['sequence'] = sequence_type_data with LocalTestServerSSL(BaseHandler(TestDataset)) as server: dataset = open_url(server.url) seq = dataset['sequence'] retrieved_data = [line for line in seq] np.testing.assert_array_equal( np.array(retrieved_data, dtype=sequence_type_data.data.dtype), np.array(sequence_type_data.data[:], dtype=sequence_type_data.data.dtype))
def test_projection_clash(self): """Test a function call creating a variable with a conflicting name.""" app = TestApp(ServerSideFunctions(BaseHandler(SimpleGrid))) res = app.get("/.asc?mean(x),x") self.assertEqual(res.text, """Dataset { Int32 x[x = 3]; } SimpleGrid; --------------------------------------------- x [0] 0 [1] 1 [2] 2 """)
def test_timeout(sequence_type_data): """Test that timeout works properly""" TestDataset = DatasetType('Test') TestDataset['sequence'] = sequence_type_data TestDataset['byte'] = BaseType('byte', 0) application = BaseHandler(TestDataset) # Explictly add latency on the devel server # to guarantee that it timeouts def wrap_mocker(func): def mock_add_latency(*args, **kwargs): time.sleep(1e-1) return func(*args, **kwargs) return mock_add_latency application = wrap_mocker(application) with LocalTestServer(application) as server: url = ("http://0.0.0.0:%s/" % server.port) # test open_url assert open_url(url) == TestDataset with pytest.raises(HTTPError) as e: open_url(url, timeout=1e-5) assert 'Timeout' in str(e) # test open_dods with pytest.raises(HTTPError): open_dods(url + '.dods?sequence', timeout=1e-5) assert 'Timeout' in str(e) # test sequenceproxy dataset = open_url(url) seq = dataset['sequence'] assert isinstance(seq.data, SequenceProxy) # Change the timeout of the sequence proxy: seq.data.timeout = 1e-5 with pytest.raises(HTTPError) as e: next(seq.iterdata()) assert 'Timeout' in str(e) # test baseproxy: dat = dataset['byte'] assert isinstance(dat.data, BaseProxy) # Change the timeout of the baseprox proxy: dat.data.timeout = 1e-5 with pytest.raises(HTTPError) as e: dat[:] assert 'Timeout' in str(e)
def test_no_parsed_response(self): """Test that non-parsed responses work or raise error. pydap returns WSGI responses that contain the "parsed" dataset, so it can be manipulated by middleware. """ app = App(ServerSideFunctions(Accumulator(BaseHandler(SimpleGrid)))) # a normal request should work, even though server-side functions are # not working in the WSGI pipeline app.get("/.dds") # this will fail, since it's impossible to build the response with self.assertRaises(ServerError): app.get("/.dds?mean(x)")
def test_body(self): """Test response body.""" app = App(BaseHandler(NestedSequence)) res = app.get("/.dods") dds, xdrdata = res.body.split(b'\nData:\n', 1) dds = dds.decode('ascii') self.assertEqual( dds, """Dataset { Sequence { Int32 lat; Int32 lon; Int32 elev; Sequence { Int32 time; Int32 slp; Int32 wind; } time_series; } location; } NestedSequence;""") self.assertEqual( xdrdata, START_OF_SEQUENCE + b"\x00\x00\x00\x01" b"\x00\x00\x00\x01" b"\x00\x00\x00\x01" + START_OF_SEQUENCE + b"\x00\x00\x00\n" b"\x00\x00\x00\x0b" b"\x00\x00\x00\x0c" + START_OF_SEQUENCE + b"\x00\x00\x00\x15" b"\x00\x00\x00\x16" b"\x00\x00\x00\x17" + END_OF_SEQUENCE + START_OF_SEQUENCE + b"\x00\x00\x00\x02" b"\x00\x00\x00\x04" b"\x00\x00\x00\x04" + START_OF_SEQUENCE + b"\x00\x00\x00\x0f" b"\x00\x00\x00\x10" b"\x00\x00\x00\x11" + END_OF_SEQUENCE + START_OF_SEQUENCE + b"\x00\x00\x00\x03" b"\x00\x00\x00\x06" b"\x00\x00\x00\t" + START_OF_SEQUENCE + b"\x00\x00\x00\x04" b"\x00\x00\x00\x08" b"\x00\x00\x00\x10" + START_OF_SEQUENCE + b"\x00\x00\x00\x1f" b"\x00\x00\x00 " b"\x00\x00\x00!" + START_OF_SEQUENCE + b"\x00\x00\x00)" b"\x00\x00\x00*" b"\x00\x00\x00+" + START_OF_SEQUENCE + b"\x00\x00\x003" b"\x00\x00\x004" b"\x00\x00\x005" + START_OF_SEQUENCE + b"\x00\x00\x00=" b"\x00\x00\x00>" b"\x00\x00\x00?" + END_OF_SEQUENCE + END_OF_SEQUENCE)
def test_body(self): """Test the generated DDS response.""" app = TestApp(BaseHandler(SimpleGrid)) res = app.get('/.dds') self.assertEqual( res.text, """Dataset { Grid { Array: Int32 SimpleGrid[y = 2][x = 3]; Maps: Int32 x[x = 3]; Int32 y[y = 2]; } SimpleGrid; Int32 x[x = 3]; Int32 y[y = 2]; } SimpleGrid; """)
def test_body(self): """Test the generated ASCII response.""" app = App(BaseHandler(SimpleGrid)) res = app.get('/.asc') self.assertEqual( res.text, """Dataset { Grid { Array: Int32 SimpleGrid[y = 2][x = 3]; Maps: Int32 x[x = 3]; Int32 y[y = 2]; } SimpleGrid; Int32 x[x = 3]; Int32 y[y = 2]; } SimpleGrid; --------------------------------------------- SimpleGrid.SimpleGrid [0][0] 0 [0][1] 1 [0][2] 2 [1][0] 3 [1][1] 4 [1][2] 5 SimpleGrid.x [0] 0 [1] 1 [2] 2 SimpleGrid.y [0] 0 [1] 1 x [0] 0 [1] 1 [2] 2 y [0] 0 [1] 1 """)
def test_body(self): """Test the generated DAS response.""" app = App(BaseHandler(FaultyGrid)) res = app.get('/.das') self.assertEqual( res.text, """Attributes { String description "A faulty grid for testing."; FaultyGrid { } x { String axis "X"; Int32 code 1; } y { String axis "Y"; } } """)
def test_body(self): """Test the generated DAS response.""" app = App(BaseHandler(SimpleGrid)) res = app.get('/.das') self.assertEqual( res.text, """Attributes { String description "A simple grid for testing."; SimpleGrid { } x { String axis "X"; String units "degrees_east"; } y { String axis "Y"; String units "degrees_north"; } } """)
def __init__(self, filepath): BaseHandler.__init__(self) self.filepath = filepath self.cache = FileAttributeCache()
def __init__(self, dataset=None): BaseHandler.__init__(self, dataset) self.additional_headers = [ ("X-debug", "True") ]
def __init__(self, filepath): BaseHandler.__init__(self) self.filepath = filepath self.filename = os.path.split(filepath)[1] temporal_resolution = "Monthly" if 'weeks' in filepath: temporal_resolution = "Weekly" elif self.daily.match(self.filename): temporal_resolution = "Daily" elif self.day_3.match(self.filename): temporal_resolution = "3-Day" self.dataset = DatasetType(name=self.filename, attributes={ "SSMI_GLOBAL" : { "CONVENTIONS" : "COARDS", "short_name" : "SSMIS", "long_name" : "Special Sensor Microwave Image Sounder", "producer_agency" : "Remote Sensing Systems", "product_version" : "Version-7", "spatial_resolution" : "0.25 degree", "temporal_resolution" : temporal_resolution, "instrument" : "SSMIS", "original_filename" : self.filename, } }) time_variable = False if self.daily.match(self.filename) and not 'weeks' in filepath: time_variable = True _dim = ('lon', 'lat', 'part_of_day') _shape = (1440, 720, 2) _type = UInt16 self.variables = [] if time_variable: self.variables.append( BaseType( name='time', data=None, shape=_shape, dimensions=_dim, type=_type, attributes={ 'long_name' : 'Time', 'add_offset' : 0, 'scale_factor' : 6, '_FillValue' : 254, 'units' : 'minutes', 'coordinates': 'lon lat' } )) self.variables.append(BaseType( name='wspd', data=None, shape=_shape, dimensions=_dim, type=_type, attributes={ 'long_name' : '10 meter Surface Wind Speed', 'add_offset' : 0, 'scale_factor' : 0.2, '_FillValue' : 254, 'units' : 'm/sec', 'coordinates': 'lon lat' } )) self.variables.append(BaseType( name='vapor', data=None, shape=_shape, dimensions=_dim, type=_type, attributes=({ 'long_name' : 'Atmospheric Water Vapor', 'add_offset' : 0, 'scale_factor' : 0.3, '_FillValue' : 254, 'units' : 'mm', 'coordinates': 'lon lat' }) )) self.variables.append(BaseType( name='cloud', data=None, shape=_shape, dimensions=_dim, type=_type, attributes=({ 'long_name' : 'Cloud liquid Water', 'add_offset' : -0.05, 'scale_factor' : 0.01, '_FillValue' : 254, 'units' : 'mm', 'coordinates': 'lon lat' }) )) self.variables.append(BaseType( name='rain', data=None, shape=_shape, dimensions=_dim, type=_type, attributes=({ 'long_name' : 'Rain Rate', 'add_offset' : 0, 'scale_factor' : 0.1, '_FillValue' : 254, 'units' : 'mm/hr', 'coordinates': 'lon lat' }) )) lonVar = BaseType( name='lon', data=None, shape=(1440,), dimensions=('lon',), type=Float32, attributes=({ 'long_name' : 'longitude', # 'add_offset' : 0, # 'scale_factor' : 1, 'valid_range' : '-180, 180', 'units' : 'degrees_east' }) ) latVar = BaseType( name='lat', data=None, shape=(720,), dimensions=('lat',), type=Float32, attributes=({ 'long_name' : 'latitude', # 'add_offset' : 0, # 'scale_factor' : 1, 'valid_range' : '-90, 90', 'units' : 'degrees_north' }) ) partVar = BaseType( name='part_of_day', data=None, shape=(2,), dimensions=('part_of_day',), type=UInt16, attributes=({ 'long_name' : 'part_of_day', # 'add_offset' : 0, # 'scale_factor' : 1, 'valid_range' : '0, 1', 'units' : 'part_of_day' }) ) self.dataset['lon'] = lonVar self.dataset['lat'] = latVar self.dataset['part_of_day'] = partVar for variable in self.variables: # print variable.name g = GridType(name=variable.name) g[variable.name] = variable g['lon'] = lonVar.__deepcopy__() g['lat'] = latVar.__deepcopy__() g['part_of_day'] = partVar.__deepcopy__() g.attributes = variable.attributes self.dataset[variable.name] = g