def __init__(self, filepath): BaseHandler.__init__(self) try: self.fp = h5py.File(filepath, 'r') except Exception, exc: message = 'Unable to open file %s: %s' % (filepath, exc) raise OpenFileError(message)
def __init__(self, filepath): BaseHandler.__init__(self) try: self.fp = netcdf_file(filepath) except Exception, exc: message = "Unable to open file %s: %s" % (filepath, exc) raise OpenFileError(message)
def __init__(self, filepath): BaseHandler.__init__(self) try: with open(filepath, 'Ur') as fp: reader = csv.reader(fp, quoting=csv.QUOTE_NONNUMERIC) vars = reader.next() except Exception, exc: message = 'Unable to open file {filepath}: {exc}'.format(filepath=filepath, exc=exc) raise OpenFileError(message)
def __init__(self, filepath): BaseHandler.__init__(self) try: with open(filepath, 'Ur') as fp: reader = csv.reader(fp, quoting=csv.QUOTE_NONNUMERIC) vars = reader.next() except Exception, exc: message = 'Unable to open file {filepath}: {exc}'.format( filepath=filepath, exc=exc) raise OpenFileError(message)
def __init__(self, filepath): """Prepare dataset.""" BaseHandler.__init__(self) # open the YAML file and parse configuration try: with open(filepath, 'Ur') as fp: fp = open(filepath, 'Ur') config = yaml.load(fp) except Exception, exc: message = 'Unable to open file {filepath}: {exc}'.format( filepath=filepath, exc=exc) raise OpenFileError(message)
def __init__(self, filepath): BaseHandler.__init__(self) if filepath is None: self.config = {} self.dataset = None else: try: with open(filepath, 'r') as file: config = yaml.load(file, Loader=yaml.FullLoader) except Exception as exc: raise OpenFileError( 'Unable to open file {filepath}: {exc}'.format( filepath=filepath, exc=exc)) self.config = config self.dataset = dataset_model(config)
def __init__(self, filepath): BaseHandler.__init__(self) self.filepath = filepath try: with netcdf_file(self.filepath, 'r') as source: self.additional_headers.append(('Last-modified', (formatdate( time.mktime(time.localtime( os.stat(filepath)[ST_MTIME])))))) # shortcuts vars = source.variables dims = source.dimensions # build dataset name = os.path.split(filepath)[1] self.dataset = DatasetType( name, attributes=dict(NC_GLOBAL=attrs(source))) for dim in dims: if dims[dim] is None: self.dataset.attributes['DODS_EXTRA'] = { 'Unlimited_Dimension': dim, } break # add grids grids = [var for var in vars if var not in dims] for grid in grids: self.dataset[grid] = GridType(grid, attrs(vars[grid])) # add array self.dataset[grid][grid] = BaseType( grid, LazyVariable(source, grid, grid, self.filepath), vars[grid].dimensions, attrs(vars[grid])) # add maps for dim in vars[grid].dimensions: self.dataset[grid][dim] = BaseType( dim, vars[dim][:], None, attrs(vars[dim])) # add dims for dim in dims: self.dataset[dim] = BaseType(dim, vars[dim][:], None, attrs(vars[dim])) except Exception as exc: raise message = 'Unable to open file %s: %s' % (filepath, exc) raise OpenFileError(message)
def __init__(self, filepath): """ Prepare dataset. The `__init__` method of handlers is responsible for preparing the dataset for incoming requests. """ BaseHandler.__init__(self) # open the YAML file and parse configuration try: with open(filepath, 'Ur') as fp: fp = open(filepath, 'Ur') config = yaml.load(fp) except Exception, exc: message = 'Unable to open file {filepath}: {exc}'.format(filepath=filepath, exc=exc) raise OpenFileError(message)
def __init__(self, dataset=None): BaseHandler.__init__(self, dataset) self.additional_headers = [("X-debug", "True")]
def __getattr__(self, name): if name != 'dataset': raise AttributeError BaseHandler.__init__(self) self.dataset = make_dataset(self.filepath) return self.dataset
def __init__(self, dataset=None): BaseHandler.__init__(self, dataset) self.additional_headers = [ ("X-debug", "True") ]
def __init__(self, filepath): BaseHandler.__init__(self) self.filepath = filepath self.cache = FileAttributeCache()
def __init__(self, filepath): BaseHandler.__init__(self) self.filepath = filepath self.filename = os.path.split(filepath)[1] temporal_resolution = "Monthly" if 'weeks' in filepath: temporal_resolution = "Weekly" elif self.daily.match(self.filename): temporal_resolution = "Daily" elif self.day_3.match(self.filename): temporal_resolution = "3-Day" self.dataset = DatasetType(name=self.filename, attributes={ "SSMI_GLOBAL" : { "CONVENTIONS" : "COARDS", "short_name" : "SSMIS", "long_name" : "Special Sensor Microwave Image Sounder", "producer_agency" : "Remote Sensing Systems", "product_version" : "Version-7", "spatial_resolution" : "0.25 degree", "temporal_resolution" : temporal_resolution, "instrument" : "SSMIS", "original_filename" : self.filename, } }) time_variable = False if self.daily.match(self.filename) and not 'weeks' in filepath: time_variable = True _dim = ('lon', 'lat', 'part_of_day') _shape = (1440, 720, 2) _type = UInt16 self.variables = [] if time_variable: self.variables.append( BaseType( name='time', data=None, shape=_shape, dimensions=_dim, type=_type, attributes={ 'long_name' : 'Time', 'add_offset' : 0, 'scale_factor' : 6, '_FillValue' : 254, 'units' : 'minutes', 'coordinates': 'lon lat' } )) self.variables.append(BaseType( name='wspd', data=None, shape=_shape, dimensions=_dim, type=_type, attributes={ 'long_name' : '10 meter Surface Wind Speed', 'add_offset' : 0, 'scale_factor' : 0.2, '_FillValue' : 254, 'units' : 'm/sec', 'coordinates': 'lon lat' } )) self.variables.append(BaseType( name='vapor', data=None, shape=_shape, dimensions=_dim, type=_type, attributes=({ 'long_name' : 'Atmospheric Water Vapor', 'add_offset' : 0, 'scale_factor' : 0.3, '_FillValue' : 254, 'units' : 'mm', 'coordinates': 'lon lat' }) )) self.variables.append(BaseType( name='cloud', data=None, shape=_shape, dimensions=_dim, type=_type, attributes=({ 'long_name' : 'Cloud liquid Water', 'add_offset' : -0.05, 'scale_factor' : 0.01, '_FillValue' : 254, 'units' : 'mm', 'coordinates': 'lon lat' }) )) self.variables.append(BaseType( name='rain', data=None, shape=_shape, dimensions=_dim, type=_type, attributes=({ 'long_name' : 'Rain Rate', 'add_offset' : 0, 'scale_factor' : 0.1, '_FillValue' : 254, 'units' : 'mm/hr', 'coordinates': 'lon lat' }) )) lonVar = BaseType( name='lon', data=None, shape=(1440,), dimensions=('lon',), type=Float32, attributes=({ 'long_name' : 'longitude', # 'add_offset' : 0, # 'scale_factor' : 1, 'valid_range' : '-180, 180', 'units' : 'degrees_east' }) ) latVar = BaseType( name='lat', data=None, shape=(720,), dimensions=('lat',), type=Float32, attributes=({ 'long_name' : 'latitude', # 'add_offset' : 0, # 'scale_factor' : 1, 'valid_range' : '-90, 90', 'units' : 'degrees_north' }) ) partVar = BaseType( name='part_of_day', data=None, shape=(2,), dimensions=('part_of_day',), type=UInt16, attributes=({ 'long_name' : 'part_of_day', # 'add_offset' : 0, # 'scale_factor' : 1, 'valid_range' : '0, 1', 'units' : 'part_of_day' }) ) self.dataset['lon'] = lonVar self.dataset['lat'] = latVar self.dataset['part_of_day'] = partVar for variable in self.variables: # print variable.name g = GridType(name=variable.name) g[variable.name] = variable g['lon'] = lonVar.__deepcopy__() g['lat'] = latVar.__deepcopy__() g['part_of_day'] = partVar.__deepcopy__() g.attributes = variable.attributes self.dataset[variable.name] = g