def yt_dataset(self, value): if value == 0: return if not hasattr(value, 'dataset_type'): raise IOError('not a yt dataset?') infile = '%s/%s' % (value.fullpath, value.basename) self.skip_hash_check = False if hasattr(self, 'hash'): if isinstance(self.hash, np.bytes_): self.hash = self.hash.decode('utf8') hash = get_hash(infile) if hash != self.hash: raise IOError('hash mismatch!') else: self._ds = value else: self._ds = value self.hash = get_hash(infile) self._ds = value self._ds_type = DatasetType(self._ds) self._assign_simulation_attributes()
def yt_dataset(self, value): if value is None: return if not hasattr(value, 'dataset_type'): raise ValueError('not a yt dataset?') hash = get_hash(os.path.join(value.fullpath, value.basename)) if hash != self.hash: raise RuntimeError('hash mismatch!') else: self._ds = value self._ds = value self._ds_type = DatasetType(self._ds)
def yt_dataset(self, value): if value is None: return if not hasattr(value, 'dataset_type'): raise ValueError('not a yt dataset?') #if 'skip_hash_check' in self._kwargs and self._kwargs['skip_hash_check']: if (self.skip_hash_check) or (self.hash is None): hash = self.hash else: hash = get_hash(os.path.join(value.fullpath, value.basename)) if hash != self.hash: raise RuntimeError('hash mismatch!') self._ds = value self._ds_type = DatasetType(self._ds)
def yt_dataset(self, value): if value is None: return if not hasattr(value, 'dataset_type'): raise IOError('not a yt dataset?') infile = '%s/%s' % (value.fullpath, value.basename) hash = get_hash(infile) if hash != self.hash: raise IOError('hash mismatch!') else: self._ds = value self._ds = value self._ds_type = DatasetType(self._ds)