コード例 #1
0
    def from_hdf5(cls, filename, dataset_name=None):
        r"""Attempts read in and convert a dataset in an hdf5 file into a YTArray.

        Parameters
        ----------
        filename: string
        The filename to of the hdf5 file.

        dataset_name: string
            The name of the dataset to read from.  If the dataset has a units
            attribute, attempt to infer units as well.

        """
        import h5py
        from yt.extern.six.moves import cPickle as pickle

        if dataset_name is None:
            dataset_name = 'array_data'

        f = h5py.File(filename)
        dataset = f[dataset_name]
        data = dataset[:]
        units = dataset.attrs.get('units', '')
        if 'unit_registry' in dataset.attrs.keys():
            unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring())
        else:
            unit_lut = None
        f.close()
        registry = UnitRegistry(lut=unit_lut, add_default_symbols=False)
        return cls(data, units, registry=registry)
コード例 #2
0
    def from_hdf5(cls, filename, dataset_name=None):
        r"""Attempts read in and convert a dataset in an hdf5 file into a YTArray.

        Parameters
        ----------
        filename: string
        The filename to of the hdf5 file.

        dataset_name: string
            The name of the dataset to read from.  If the dataset has a units
            attribute, attempt to infer units as well.

        """
        import h5py
        from yt.extern.six.moves import cPickle as pickle

        if dataset_name is None:
            dataset_name = "array_data"

        f = h5py.File(filename)
        dataset = f[dataset_name]
        data = dataset[:]
        units = dataset.attrs.get("units", "")
        if "unit_registry" in dataset.attrs.keys():
            unit_lut = pickle.loads(dataset.attrs["unit_registry"].tostring())
        else:
            unit_lut = None
        f.close()
        registry = UnitRegistry(lut=unit_lut, add_default_symbols=False)
        return cls(data, units, registry=registry)
コード例 #3
0
 def load_object(self, name):
     """
     Load and return and object from the data_file using the Pickle protocol,
     under the name *name* on the node /Objects.
     """
     obj = self.get_data("/Objects", name)
     if obj is None:
         return
     obj = cPickle.loads(obj.value)
     if iterable(obj) and len(obj) == 2:
         obj = obj[1]  # Just the object, not the ds
     if hasattr(obj, '_fix_pickle'): obj._fix_pickle()
     return obj
コード例 #4
0
 def load_object(self, name):
     """
     Load and return and object from the data_file using the Pickle protocol,
     under the name *name* on the node /Objects.
     """
     obj = self.get_data("/Objects", name)
     if obj is None:
         return
     obj = cPickle.loads(obj.value)
     if iterable(obj) and len(obj) == 2:
         obj = obj[1] # Just the object, not the ds
     if hasattr(obj, '_fix_pickle'): obj._fix_pickle()
     return obj
コード例 #5
0
ファイル: framework.py プロジェクト: NeilZaim/yt
 def get(self, ds_name, default = None):
     if self.reference_name is None: return default
     if ds_name in self.cache: return self.cache[ds_name]
     url = _url_path.format(self.reference_name, ds_name)
     try:
         resp = urllib.request.urlopen(url)
     except urllib.error.HTTPError:
         raise YTNoOldAnswer(url)
     else:
         for this_try in range(3):
             try:
                 data = resp.read()
             except Exception:
                 time.sleep(0.01)
             else:
                 # We were succesful
                 break
         else:
             # Raise error if all tries were unsuccessful
             raise YTCloudError(url)
         # This is dangerous, but we have a controlled S3 environment
         rv = cPickle.loads(data)
     self.cache[ds_name] = rv
     return rv
コード例 #6
0
 def get(self, ds_name, default = None):
     if self.reference_name is None: return default
     if ds_name in self.cache: return self.cache[ds_name]
     url = _url_path.format(self.reference_name, ds_name)
     try:
         resp = urllib.request.urlopen(url)
     except urllib.error.HTTPError as ex:
         raise YTNoOldAnswer(url)
     else:
         for this_try in range(3):
             try:
                 data = resp.read()
             except:
                 time.sleep(0.01)
             else:
                 # We were succesful
                 break
         else:
             # Raise error if all tries were unsuccessful
             raise YTCloudError(url)
         # This is dangerous, but we have a controlled S3 environment
         rv = cPickle.loads(data)
     self.cache[ds_name] = rv
     return rv