def test_not_tuple(self): """The function fails when one of the slices is not a tuple.""" slice1 = 0 slice2 = (0,) with self.assertRaises(TypeError): combine_slices(slice1, slice2) with self.assertRaises(TypeError): combine_slices(slice2, slice1)
def test_not_tuple(self): """The function fails when one of the slices is not a tuple.""" slice1 = 0 slice2 = (0, ) with self.assertRaises(TypeError): combined = combine_slices(slice1, slice2) with self.assertRaises(TypeError): combined = combine_slices(slice2, slice1)
def __getitem__(self, index): slice_ = combine_slices(self._slice, fix_slice(index, self.shape)) scheme, netloc, path, query, fragment = urlsplit(self.url) url = urlunsplit((scheme, netloc, path + '.dods', self.id + hyperslab(slice_) + '&' + query, fragment)) resp, data = request(url) dds, xdrdata = data.split('\nData:\n', 1) dataset = DDSParser(dds).parse() data = data2 = DapUnpacker(xdrdata, dataset).getvalue() # Retrieve the data from any parent structure(s). for var in walk(dataset): if type(var) in (StructureType, DatasetType): data = data[0] elif var.id == self.id: return data # Some old servers return the wrong response. :-/ # I found a server that would return an array to a request # for an array inside a grid (instead of a structure with # the array); this will take care of it. for var in walk(dataset): if type(var) in (StructureType, DatasetType): data2 = data2[0] elif self.id.endswith(var.id): return data2
def __getitem__(self, key): out = self.clone() # return the data for a children if isinstance(key, basestring): out.id = '{id}.{child}'.format(id=self.id, child=key) def get_child(descr): mapping = dict((d[0], d) for d in descr) return mapping[key] out.descr = apply_to_list(get_child, out.descr) # return a new object with requested columns elif isinstance(key, list): def get_children(descr): mapping = dict((d[0], d) for d in descr) return [mapping[k] for k in key] out.descr = apply_to_list(get_children, out.descr) # return a copy with the added constraints elif isinstance(key, ConstraintExpression): out.selection.extend( str(key).split('&') ) # slice data else: if isinstance(key, int): key = slice(key, key+1) out.slice = combine_slices(self.slice, (key,)) return out
def __getitem__(self, key): out = self.clone() # return the data for a children if isinstance(key, basestring): out.id = '{id}.{child}'.format(id=self.id, child=key) def get_child(descr): mapping = dict((d[0], d) for d in descr) return mapping[key] out.descr = apply_to_list(get_child, out.descr) # return a new object with requested columns elif isinstance(key, list): def get_children(descr): mapping = dict((d[0], d) for d in descr) return [mapping[k] for k in key] out.descr = apply_to_list(get_children, out.descr) # return a copy with the added constraints elif isinstance(key, ConstraintExpression): out.selection.extend(str(key).split('&')) # slice data else: if isinstance(key, int): key = slice(key, key + 1) out.slice = combine_slices(self.slice, (key, )) return out
def __getitem__(self, index): slice_ = combine_slices(self._slice, fix_slice(index, self.shape)) scheme, netloc, path, query, fragment = urlsplit(self.url) url = urlunsplit(( scheme, netloc, path + '.dods', quote(self.id) + hyperslab(slice_) + '&' + query, fragment)) resp, data = request(url) dds, xdrdata = data.split('\nData:\n', 1) dataset = DDSParser(dds).parse() data = data2 = DapUnpacker(xdrdata, dataset).getvalue() # Retrieve the data from any parent structure(s). for var in walk(dataset): if type(var) in (StructureType, DatasetType): data = data[0] elif var.id == self.id: return data # Some old servers return the wrong response. :-/ # I found a server that would return an array to a request # for an array inside a grid (instead of a structure with # the array); this will take care of it. for var in walk(dataset): if type(var) in (StructureType, DatasetType): data2 = data2[0] elif self.id.endswith(var.id): return data2
def __getitem__(self, index): # build download url index = combine_slices(self.slice, fix_slice(index, self.shape)) scheme, netloc, path, query, fragment = urlsplit(self.baseurl) url = urlunsplit( (scheme, netloc, path + '.dods', self.id + hyperslab(index) + '&' + query, fragment)).rstrip('&') # download and unpack data r = requests.get(url) dds, data = r.content.split('\nData:\n', 1) if self.shape: # skip size packing if self.dtype.char == 'S': data = data[4:] else: data = data[8:] # calculate array size shape = tuple((s.stop - s.start) / s.step for s in index) size = np.prod(shape) if self.dtype == np.byte: return np.fromstring(data[:size], 'B') elif self.dtype.char == 'S': out = [] for word in range(size): n = np.fromstring(data[:4], '>I') # read length data = data[4:] out.append(data[:n]) data = data[n + (-n % 4):] return np.array(out, 'S') else: return np.fromstring(data, self.dtype).reshape(shape)
def test_integer(self): """Test slices that are just integers.""" x = np.arange(10) slice1 = (0, ) slice2 = (1, ) combined = combine_slices(slice1, slice2) self.assertEqual(combined, (slice(1, 1, 1), ))
def test_all_values(self): """Test when start and stop are all integers.""" x = np.arange(20) slice1 = (slice(0, 8), ) slice2 = (slice(5, 6), ) combined = combine_slices(slice1, slice2) self.assertEqual(combined, (slice(5, 6, 1), )) np.testing.assert_array_equal(x[combined], x[slice1][slice2])
def test_second_stop_none(self): """Test when the second slice has ``None`` for stop.""" x = np.arange(10) slice1 = (slice(0, 8), ) slice2 = (slice(5, None), ) combined = combine_slices(slice1, slice2) self.assertEqual(combined, (slice(5, 8, 1), )) np.testing.assert_array_equal(x[combined], x[slice1][slice2])
def test_stops_none(self): """Test when both of the slices have ``None`` for stop.""" x = np.arange(10) slice1 = (slice(0, None), ) slice2 = (slice(5, None), ) combined = combine_slices(slice1, slice2) self.assertEqual(combined, (slice(5, None, 1), )) np.testing.assert_array_equal(x[combined], x[slice1][slice2])
def test_all_values(self): """Test when start and stop are all integers.""" x = np.arange(20) slice1 = (slice(0, 8),) slice2 = (slice(5, 6),) combined = combine_slices(slice1, slice2) self.assertEqual(combined, (slice(5, 6, 1),)) np.testing.assert_array_equal(x[combined], x[slice1][slice2])
def test_second_stop_none(self): """Test when the second slice has ``None`` for stop.""" x = np.arange(10) slice1 = (slice(0, 8),) slice2 = (slice(5, None),) combined = combine_slices(slice1, slice2) self.assertEqual(combined, (slice(5, 8, 1),)) np.testing.assert_array_equal(x[combined], x[slice1][slice2])
def test_stops_none(self): """Test when both of the slices have ``None`` for stop.""" x = np.arange(10) slice1 = (slice(0, None),) slice2 = (slice(5, None),) combined = combine_slices(slice1, slice2) self.assertEqual(combined, (slice(5, None, 1),)) np.testing.assert_array_equal(x[combined], x[slice1][slice2])
def __getitem__(self, index): # build download url index = combine_slices(self.slice, fix_slice(index, self.shape)) scheme, netloc, path, query, fragment = urlsplit(self.baseurl) url = urlunsplit( (scheme, netloc, path + ".dods", quote(self.id) + hyperslab(index) + "&" + query, fragment) ).rstrip("&") # download and unpack data logger.info("Fetching URL: %s" % url) r = GET(url, self.application, self.session) raise_for_status(r) dds, data = r.body.split(b"\nData:\n", 1) dds = dds.decode(r.content_encoding or "ascii") if self.shape: # skip size packing if self.dtype.char in "SU": data = data[4:] else: data = data[8:] # calculate array size shape = tuple(int(np.ceil((s.stop - s.start) / float(s.step))) for s in index) size = int(np.prod(shape)) if self.dtype == np.byte: return np.fromstring(data[:size], "B").reshape(shape) elif self.dtype.char in "SU": out = [] for word in range(size): n = np.asscalar(np.fromstring(data[:4], ">I")) # read length data = data[4:] out.append(data[:n]) data = data[n + (-n % 4) :] return np.array([text_type(x.decode("ascii")) for x in out], "S").reshape(shape) else: try: return np.fromstring(data, self.dtype).reshape(shape) except ValueError as e: if str(e) == "total size of new array must be unchanged": # server-side failure. # it is expected that the user should be mindful of this: raise RuntimeError( ( "variable {0} could not be properly " "retrieved. To avoid this " "error consider using open_url(..., " "output_grid=False)." ).format(quote(self.id)) ) else: raise
def __getitem__(self, index): # build download url index = combine_slices(self.slice, fix_slice(index, self.shape)) scheme, netloc, path, query, fragment = urlsplit(self.baseurl) url = urlunsplit(( scheme, netloc, path + '.dods', quote(self.id) + hyperslab(index) + '&' + query, fragment)).rstrip('&') # download and unpack data logger.info("Fetching URL: %s" % url) r = GET(url, self.application, self.session) raise_for_status(r) dds, data = r.body.split(b'\nData:\n', 1) dds = dds.decode(r.content_encoding or 'ascii') if self.shape: # skip size packing if self.dtype.char in 'SU': data = data[4:] else: data = data[8:] # calculate array size shape = tuple( int(np.ceil((s.stop-s.start)/float(s.step))) for s in index) size = int(np.prod(shape)) if self.dtype == np.byte: return np.fromstring(data[:size], 'B').reshape(shape) elif self.dtype.char in 'SU': out = [] for word in range(size): n = np.asscalar(np.fromstring(data[:4], '>I')) # read length data = data[4:] out.append(data[:n]) data = data[n + (-n % 4):] return np.array([text_type(x.decode('ascii')) for x in out], 'S').reshape(shape) else: try: return np.fromstring(data, self.dtype).reshape(shape) except ValueError as e: if str(e) == 'total size of new array must be unchanged': # server-side failure. # it is expected that the user should be mindful of this: raise RuntimeError( ('variable {0} could not be properly ' 'retrieved. To avoid this ' 'error consider using open_url(..., ' 'output_grid=False).').format(quote(self.id))) else: raise
def __getitem__(self, index): """ Download data for all the tiles containing the request. """ slice_ = combine_slices(self._slice, fix_slice(index, self.shape)) requested = self.parse_request(slice_) with self.lock.readlock: needed = requested & ~self.index[:] # update cache with needed data with self.lock.writelock: for tile in self.get_tiles(needed): self.cache[tile] = super(CachingArrayProxy, self).__getitem__(tile) # update index with newly requested data self.index[:] = self.index[:] | needed return self.cache[slice_]
def __getitem__(self, index): # build download url index = combine_slices(self.slice, fix_slice(index, self.shape)) scheme, netloc, path, query, fragment = urlsplit(self.baseurl) url = urlunsplit(( scheme, netloc, path + '.dods', quote(self.id) + hyperslab(index) + '&' + query, fragment)).rstrip('&') # download and unpack data logger.info("Fetching URL: %s" % url) r = GET(url, self.application, self.session) raise_for_status(r) dds, data = r.body.split(b'\nData:\n', 1) dds = dds.decode(r.content_encoding or 'ascii') if self.shape: # skip size packing if self.dtype.char in 'SU': data = data[4:] else: data = data[8:] # calculate array size shape = tuple( int(np.ceil((s.stop-s.start)/float(s.step))) for s in index) size = int(np.prod(shape)) if self.dtype == np.byte: return np.fromstring(data[:size], 'B') elif self.dtype.char in 'SU': out = [] for word in range(size): n = np.fromstring(data[:4], '>I') # read length data = data[4:] out.append(data[:n]) data = data[n + (-n % 4):] return np.array([ text_type(x.decode('ascii')) for x in out ], 'S') else: return np.fromstring(data, self.dtype).reshape(shape)
def __getitem__(self, key): out = copy.deepcopy(self) if isinstance(key, ConstraintExpression): scheme, netloc, path, query, fragment = urlsplit(self.url) out.url = urlunsplit(( scheme, netloc, path, str(key & query), fragment)) if out._slice != (slice(None),): warnings.warn('Selection %s will be applied before projection "%s".' % ( key, hyperslab(out._slice))) elif isinstance(key, basestring): out._slice = (slice(None),) out.children = () parent = self.id if ',' in parent: parent = parent.split(',', 1)[0].rsplit('.', 1)[0] out.id = '%s%s.%s' % (parent, hyperslab(self._slice), key) elif isinstance(key, tuple): out.children = key[:] else: out._slice = combine_slices(self._slice, fix_slice(key, (sys.maxint,))) return out
def __getitem__(self, key): out = self.clone() # return the data for a children if isinstance(key, basestring): out.id = '{id}.{child}'.format(id=self.id, child=key) out.cols = key # return a new object with requested columns elif isinstance(key, list): out.cols = tuple(key) # return a copy with the added constraints elif isinstance(key, ConstraintExpression): out.selection.extend( str(key).split('&') ) # slice data else: if isinstance(key, int): key = slice(key, key+1) out.slice = combine_slices(self.slice, (key,)) return out
def __getitem__(self, index): # build download url index = combine_slices(self.slice, fix_slice(index, self.shape)) scheme, netloc, path, query, fragment = urlsplit(self.baseurl) url = urlunsplit(( scheme, netloc, path + '.dods', self.id + hyperslab(index) + '&' + query, fragment)).rstrip('&') # download and unpack data r = requests.get(url) r.raise_for_status() dds, data = r.content.split('\nData:\n', 1) if self.shape: # skip size packing if self.dtype.char == 'S': data = data[4:] else: data = data[8:] # calculate array size shape = tuple((s.stop-s.start)/s.step for s in index) size = np.prod(shape) if self.dtype == np.byte: return np.fromstring(data[:size], 'B') elif self.dtype.char == 'S': out = [] for word in range(size): n = np.fromstring(data[:4], '>I') # read length data = data[4:] out.append(data[:n]) data = data[n + (-n % 4):] return np.array(out, 'S') else: return np.fromstring(data, self.dtype).reshape(shape)
def __getitem__(self, key): """Return a new object representing a subset of the data.""" out = copy.copy(self) # return the data for a children if isinstance(key, string_types): out.template = out.template[key] # return a new object with requested columns elif isinstance(key, list): out.sub_children = True out.template._keys = key # return a copy with the added constraints elif isinstance(key, ConstraintExpression): out.selection.extend(str(key).split('&')) # slice data else: if isinstance(key, int): key = slice(key, key + 1) out.slice = combine_slices(self.slice, (key, )) return out
def __getitem__(self, key): out = copy.deepcopy(self) if isinstance(key, ConstraintExpression): scheme, netloc, path, query, fragment = urlsplit(self.url) out.url = urlunsplit( (scheme, netloc, path, str(key & query), fragment)) if out._slice != (slice(None), ): warnings.warn( 'Selection %s will be applied before projection "%s".' % (key, hyperslab(out._slice))) elif isinstance(key, basestring): out._slice = (slice(None), ) out.children = () parent = self.id if ',' in parent: parent = parent.split(',', 1)[0].rsplit('.', 1)[0] out.id = '%s%s.%s' % (parent, hyperslab(self._slice), key) elif isinstance(key, tuple): out.children = key[:] else: out._slice = combine_slices(self._slice, fix_slice(key, (sys.maxint, ))) return out
def __getitem__(self, key): """Return a new object representing a subset of the data.""" out = copy.copy(self) # return the data for a children if isinstance(key, string_types): out.template = out.template[key] # return a new object with requested columns elif isinstance(key, list): out.sub_children = True out.template._keys = key # return a copy with the added constraints elif isinstance(key, ConstraintExpression): out.selection.extend(str(key).split("&")) # slice data else: if isinstance(key, int): key = slice(key, key + 1) out.slice = combine_slices(self.slice, (key,)) return out
def test_integer(self): """Test slices that are just integers.""" slice1 = (0,) slice2 = (1,) combined = combine_slices(slice1, slice2) self.assertEqual(combined, (slice(1, 1, 1),))